method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
public TypeCode _type()
{
return t_any;
}
/**
* Write the {@link #value } field to the given stream.
* For {@link Principal} the functionality
* is delegated to
* {@link org.omg.CORBA.portable.OutputStream#write_Principal(Principal)} | TypeCode function() { return t_any; } /** * Write the {@link #value } field to the given stream. * For {@link Principal} the functionality * is delegated to * {@link org.omg.CORBA.portable.OutputStream#write_Principal(Principal)} | /**
* Returns the TypeCode, corresponding the CORBA type that is stored
* using this holder.
*/ | Returns the TypeCode, corresponding the CORBA type that is stored using this holder | _type | {
"repo_name": "SanDisk-Open-Source/SSD_Dashboard",
"path": "uefi/gcc/gcc-4.6.3/libjava/classpath/org/omg/CORBA/PrincipalHolder.java",
"license": "gpl-2.0",
"size": 3752
} | [
"org.omg.CORBA"
] | import org.omg.CORBA; | import org.omg.*; | [
"org.omg"
] | org.omg; | 1,768,834 |
public int getNumberOfObsUsingThisConcept() {
List<Concept> searchConcepts = Arrays.asList(concept);
return Context.getObsService().getObservationCount(null, null, searchConcepts, null, null, null, null, null,
null, true);
}
| int function() { List<Concept> searchConcepts = Arrays.asList(concept); return Context.getObsService().getObservationCount(null, null, searchConcepts, null, null, null, null, null, null, true); } | /**
* Get the number of observations that use this concept.
*
* @return number of obs using this concept
*/ | Get the number of observations that use this concept | getNumberOfObsUsingThisConcept | {
"repo_name": "sintjuri/openmrs-core",
"path": "web/src/main/java/org/openmrs/web/controller/ConceptFormController.java",
"license": "mpl-2.0",
"size": 39194
} | [
"java.util.Arrays",
"java.util.List",
"org.openmrs.Concept",
"org.openmrs.api.context.Context"
] | import java.util.Arrays; import java.util.List; import org.openmrs.Concept; import org.openmrs.api.context.Context; | import java.util.*; import org.openmrs.*; import org.openmrs.api.context.*; | [
"java.util",
"org.openmrs",
"org.openmrs.api"
] | java.util; org.openmrs; org.openmrs.api; | 2,153,219 |
public long stop() {
final long elapsed = clock.getTick() - startTime;
timer.update(elapsed, TimeUnit.NANOSECONDS);
return elapsed;
} | long function() { final long elapsed = clock.getTick() - startTime; timer.update(elapsed, TimeUnit.NANOSECONDS); return elapsed; } | /**
* Updates the timer with the difference between current and start time. Call to this method will
* not reset the start time. Multiple calls result in multiple updates.
*
* @return the elapsed time in nanoseconds
*/ | Updates the timer with the difference between current and start time. Call to this method will not reset the start time. Multiple calls result in multiple updates | stop | {
"repo_name": "javadelight/delight-metrics",
"path": "src/main/java/com/codahale/metrics/Timer.java",
"license": "apache-2.0",
"size": 5347
} | [
"java.util.concurrent.TimeUnit"
] | import java.util.concurrent.TimeUnit; | import java.util.concurrent.*; | [
"java.util"
] | java.util; | 926,192 |
public void createView(int tag, String className, int rootViewTag, ReadableMap props) {
ReactShadowNode cssNode = createShadowNode(className);
ReactShadowNode rootNode = mShadowNodeRegistry.getNode(rootViewTag);
cssNode.setReactTag(tag);
cssNode.setViewClassName(className);
cssNode.setRootNode(rootNode);
cssNode.setThemedContext(rootNode.getThemedContext());
mShadowNodeRegistry.addNode(cssNode);
ReactStylesDiffMap styles = null;
if (props != null) {
styles = new ReactStylesDiffMap(props);
cssNode.updateProperties(styles);
}
handleCreateView(cssNode, rootViewTag, styles);
} | void function(int tag, String className, int rootViewTag, ReadableMap props) { ReactShadowNode cssNode = createShadowNode(className); ReactShadowNode rootNode = mShadowNodeRegistry.getNode(rootViewTag); cssNode.setReactTag(tag); cssNode.setViewClassName(className); cssNode.setRootNode(rootNode); cssNode.setThemedContext(rootNode.getThemedContext()); mShadowNodeRegistry.addNode(cssNode); ReactStylesDiffMap styles = null; if (props != null) { styles = new ReactStylesDiffMap(props); cssNode.updateProperties(styles); } handleCreateView(cssNode, rootViewTag, styles); } | /**
* Invoked by React to create a new node with a given tag, class name and properties.
*/ | Invoked by React to create a new node with a given tag, class name and properties | createView | {
"repo_name": "htc2u/react-native",
"path": "ReactAndroid/src/main/java/com/facebook/react/uimanager/UIImplementation.java",
"license": "bsd-3-clause",
"size": 31555
} | [
"com.facebook.react.bridge.ReadableMap"
] | import com.facebook.react.bridge.ReadableMap; | import com.facebook.react.bridge.*; | [
"com.facebook.react"
] | com.facebook.react; | 1,974,913 |
public void commit(AsyncResult<Void> request) throws Exception {
if (!this.info.isTransacted()) {
throw new IllegalStateException("Non-transacted Session cannot start a TX.");
}
getTransactionContext().commit(request);
} | void function(AsyncResult<Void> request) throws Exception { if (!this.info.isTransacted()) { throw new IllegalStateException(STR); } getTransactionContext().commit(request); } | /**
* Commit the currently running Transaction.
*
* @param request
* The request that will be signaled on completion of this operation.
*
* @throws Exception if an error occurs while performing the operation.
*/ | Commit the currently running Transaction | commit | {
"repo_name": "delkyd/hawtjms",
"path": "hawtjms-amqp/src/main/java/io/hawtjms/provider/amqp/AmqpSession.java",
"license": "apache-2.0",
"size": 9261
} | [
"io.hawtjms.provider.AsyncResult",
"javax.jms.IllegalStateException"
] | import io.hawtjms.provider.AsyncResult; import javax.jms.IllegalStateException; | import io.hawtjms.provider.*; import javax.jms.*; | [
"io.hawtjms.provider",
"javax.jms"
] | io.hawtjms.provider; javax.jms; | 2,901,491 |
protected void putAdditionalInstanceState(Bundle saveState) {
} | void function(Bundle saveState) { } | /**
* Give the opportunity to child classes to save additional state variables
*
* @param saveState The state of the floating label widget
*/ | Give the opportunity to child classes to save additional state variables | putAdditionalInstanceState | {
"repo_name": "rundroid/android-floatinglabel-widgets",
"path": "library/src/main/java/com/marvinlabs/widget/floatinglabel/FloatingLabelWidgetBase.java",
"license": "apache-2.0",
"size": 20460
} | [
"android.os.Bundle"
] | import android.os.Bundle; | import android.os.*; | [
"android.os"
] | android.os; | 1,519,860 |
public static Stream<String> getLineStream(String filePath) {
return getLineStream(getPath(filePath));
} | static Stream<String> function(String filePath) { return getLineStream(getPath(filePath)); } | /**
* Gets line stream.
*
* @param filePath the file path
* @return the line stream
*/ | Gets line stream | getLineStream | {
"repo_name": "JM-Lab/utils-java8",
"path": "src/main/java/kr/jm/utils/helper/JMFiles.java",
"license": "apache-2.0",
"size": 10033
} | [
"java.util.stream.Stream"
] | import java.util.stream.Stream; | import java.util.stream.*; | [
"java.util"
] | java.util; | 239,563 |
ConfigurableConversionService getConversionService();
/**
* Set the {@link ConfigurableConversionService} to be used when performing type
* conversions on properties.
* <p><strong>Note:</strong> as an alternative to fully replacing the
* {@code ConversionService}, consider adding or removing individual
* {@code Converter} instances by drilling into {@link #getConversionService()} | ConfigurableConversionService getConversionService(); /** * Set the {@link ConfigurableConversionService} to be used when performing type * conversions on properties. * <p><strong>Note:</strong> as an alternative to fully replacing the * {@code ConversionService}, consider adding or removing individual * {@code Converter} instances by drilling into {@link #getConversionService()} | /**
* Return the {@link ConfigurableConversionService} used when performing type
* conversions on properties.
* <p>The configurable nature of the returned conversion service allows for
* the convenient addition and removal of individual {@code Converter} instances:
* <pre class="code">
* ConfigurableConversionService cs = env.getConversionService();
* cs.addConverter(new FooConverter());
* </pre>
* @see PropertyResolver#getProperty(String, Class)
* @see org.springframework.core.convert.converter.ConverterRegistry#addConverter
*/ | Return the <code>ConfigurableConversionService</code> used when performing type conversions on properties. The configurable nature of the returned conversion service allows for the convenient addition and removal of individual Converter instances: ConfigurableConversionService cs = env.getConversionService(); cs.addConverter(new FooConverter()); </code> | getConversionService | {
"repo_name": "spring-projects/spring-framework",
"path": "spring-core/src/main/java/org/springframework/core/env/ConfigurablePropertyResolver.java",
"license": "apache-2.0",
"size": 4240
} | [
"org.springframework.core.convert.support.ConfigurableConversionService"
] | import org.springframework.core.convert.support.ConfigurableConversionService; | import org.springframework.core.convert.support.*; | [
"org.springframework.core"
] | org.springframework.core; | 370,207 |
private void finishActivity(boolean cancelled) {
if (isFinishing())
return;
Intent intent = new Intent();
if (cancelled) {
setResult(Activity.RESULT_CANCELED, intent);
finish();
return;
}
// check if comment is added or not and set it properly on the new habitevent
String commentString;
if (isCommentChanged)
commentString = comment.getDynamicText();
else
commentString = "";
HabitEvent event = new HabitEvent(commentString);
event.setDate(eventDate);
event.setLocation(location);
if (image != null){
event.setBase64EncodedPhoto(ImageUtilities.imageToBase64(image));
}
intent.putExtra(RETURNED_HABIT, event);
intent.putExtra(ID_HABIT_HASH, habitId);
setResult(Activity.RESULT_OK, intent);
finish();
} | void function(boolean cancelled) { if (isFinishing()) return; Intent intent = new Intent(); if (cancelled) { setResult(Activity.RESULT_CANCELED, intent); finish(); return; } String commentString; if (isCommentChanged) commentString = comment.getDynamicText(); else commentString = ""; HabitEvent event = new HabitEvent(commentString); event.setDate(eventDate); event.setLocation(location); if (image != null){ event.setBase64EncodedPhoto(ImageUtilities.imageToBase64(image)); } intent.putExtra(RETURNED_HABIT, event); intent.putExtra(ID_HABIT_HASH, habitId); setResult(Activity.RESULT_OK, intent); finish(); } | /**
* End this activity after the user clicks on either "Save" or "Cancel"
* Adds data to the intent object if the user selects "Save"
* @param cancelled whether the cancel button was clicked or not
*/ | End this activity after the user clicks on either "Save" or "Cancel" Adds data to the intent object if the user selects "Save" | finishActivity | {
"repo_name": "CMPUT301F17T15/CIA",
"path": "app/src/main/java/com/cmput301/cia/activities/events/CreateHabitEventActivity.java",
"license": "mit",
"size": 11123
} | [
"android.app.Activity",
"android.content.Intent",
"com.cmput301.cia.models.HabitEvent",
"com.cmput301.cia.utilities.ImageUtilities"
] | import android.app.Activity; import android.content.Intent; import com.cmput301.cia.models.HabitEvent; import com.cmput301.cia.utilities.ImageUtilities; | import android.app.*; import android.content.*; import com.cmput301.cia.models.*; import com.cmput301.cia.utilities.*; | [
"android.app",
"android.content",
"com.cmput301.cia"
] | android.app; android.content; com.cmput301.cia; | 558,738 |
@Nullable
DBSObjectFilter getObjectFilter(Class<?> type, @Nullable DBSObject parentObject, boolean firstMatch); | DBSObjectFilter getObjectFilter(Class<?> type, @Nullable DBSObject parentObject, boolean firstMatch); | /**
* Search for object filter which corresponds specified object type and parent object.
* Search filter which match any super class or interface implemented by specified type.
* @param type object type
* @param parentObject parent object (in DBS objects hierarchy)
* @return object filter or null if not filter was set for specified type
*/ | Search for object filter which corresponds specified object type and parent object. Search filter which match any super class or interface implemented by specified type | getObjectFilter | {
"repo_name": "ruspl-afed/dbeaver",
"path": "plugins/org.jkiss.dbeaver.model/src/org/jkiss/dbeaver/model/DBPDataSourceContainer.java",
"license": "apache-2.0",
"size": 6215
} | [
"org.jkiss.code.Nullable",
"org.jkiss.dbeaver.model.struct.DBSObject",
"org.jkiss.dbeaver.model.struct.DBSObjectFilter"
] | import org.jkiss.code.Nullable; import org.jkiss.dbeaver.model.struct.DBSObject; import org.jkiss.dbeaver.model.struct.DBSObjectFilter; | import org.jkiss.code.*; import org.jkiss.dbeaver.model.struct.*; | [
"org.jkiss.code",
"org.jkiss.dbeaver"
] | org.jkiss.code; org.jkiss.dbeaver; | 1,618,836 |
public List<DataSourceConfig> getDataSources()
{
if (dataSources == null)
{
dataSources = new ArrayList<>();
}
return dataSources;
} | List<DataSourceConfig> function() { if (dataSources == null) { dataSources = new ArrayList<>(); } return dataSources; } | /**
* Gets all the DataSourceConfig objects specified by the user in the
* jdbc.xml configuration file.
*
* @return A list of DataSourcesConfig objects specified by the user.
*/ | Gets all the DataSourceConfig objects specified by the user in the jdbc.xml configuration file | getDataSources | {
"repo_name": "bridje/bridje-framework",
"path": "bridje-jdbc/src/main/java/org/bridje/jdbc/config/JdbcConfig.java",
"license": "apache-2.0",
"size": 4352
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 26,515 |
String getSource(); | String getSource(); | /**
* Returns the value of the '<em><b>Source</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Source</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Source</em>' attribute.
* @see #setSource(String)
* @see org.casa.dsltesting.Qt48Xmlschema.Qt48XmlschemaPackage#getScript_Source()
* @model dataType="org.eclipse.emf.ecore.xml.type.String" required="true"
* extendedMetaData="kind='attribute' name='source' namespace='##targetNamespace'"
* @generated
*/ | Returns the value of the 'Source' attribute. If the meaning of the 'Source' attribute isn't clear, there really should be more of a description here... | getSource | {
"repo_name": "pedromateo/tug_qt_unit_testing_fw",
"path": "qt48_model/src/org/casa/dsltesting/Qt48Xmlschema/Script.java",
"license": "gpl-3.0",
"size": 2641
} | [
"java.lang.String"
] | import java.lang.String; | import java.lang.*; | [
"java.lang"
] | java.lang; | 1,642,946 |
public TestCaseParameterType getCompareType() {
boolean setMode = false;
boolean reset = false;
for (Step s : steps) {
if (s == Step.SET) {
setMode = true;
reset = false;
}
if (s == Step.RESET)
reset = true;
if (s == Step.GET) {
if (setMode && reset)
return TestCaseParameterType.FAC;
if (setMode)
return TestCaseParameterType.GET;
}
}
return null;
} | TestCaseParameterType function() { boolean setMode = false; boolean reset = false; for (Step s : steps) { if (s == Step.SET) { setMode = true; reset = false; } if (s == Step.RESET) reset = true; if (s == Step.GET) { if (setMode && reset) return TestCaseParameterType.FAC; if (setMode) return TestCaseParameterType.GET; } } return null; } | /**
* Will return GET or FAC:
* SET: Will reset any state about RESET
* GET: MUST come after SET, but no RESET can come between SET and GET
* FAC: MUST come after SET AND a RESET
*
* Will return null if GET is first in step-sequence --> no comparison with TestCaseParameter
* @return
* @throws TestException
*/ | Will return null if GET is first in step-sequence --> no comparison with TestCaseParameter | getCompareType | {
"repo_name": "freeacs/dbi",
"path": "src/com/owera/xaps/dbi/tr069/Steps.java",
"license": "mit",
"size": 3086
} | [
"com.owera.xaps.dbi.tr069.TestCaseParameter"
] | import com.owera.xaps.dbi.tr069.TestCaseParameter; | import com.owera.xaps.dbi.tr069.*; | [
"com.owera.xaps"
] | com.owera.xaps; | 2,475,005 |
public void testBug13601() throws Exception {
if (versionMeetsMinimum(5, 0)) {
createTable("testBug13601", "(field1 BIGINT NOT NULL, field2 BIT default 0 NOT NULL) ENGINE=MyISAM");
this.rs = this.stmt.executeQuery("SELECT field1, field2 FROM testBug13601 WHERE 1=-1");
ResultSetMetaData rsmd = this.rs.getMetaData();
assertEquals(Types.BIT, rsmd.getColumnType(2));
assertEquals(Boolean.class.getName(), rsmd.getColumnClassName(2));
this.rs = this.conn.prepareStatement("SELECT field1, field2 FROM testBug13601 WHERE 1=-1").executeQuery();
rsmd = this.rs.getMetaData();
assertEquals(Types.BIT, rsmd.getColumnType(2));
assertEquals(Boolean.class.getName(), rsmd.getColumnClassName(2));
}
} | void function() throws Exception { if (versionMeetsMinimum(5, 0)) { createTable(STR, STR); this.rs = this.stmt.executeQuery(STR); ResultSetMetaData rsmd = this.rs.getMetaData(); assertEquals(Types.BIT, rsmd.getColumnType(2)); assertEquals(Boolean.class.getName(), rsmd.getColumnClassName(2)); this.rs = this.conn.prepareStatement(STR).executeQuery(); rsmd = this.rs.getMetaData(); assertEquals(Types.BIT, rsmd.getColumnType(2)); assertEquals(Boolean.class.getName(), rsmd.getColumnClassName(2)); } } | /**
* Tests BUG13601 (which doesn't seem to be present in 3.1.11, but we'll
* leave it in here for regression's-sake).
*
* @throws Exception
* if the test fails.
*/ | Tests BUG13601 (which doesn't seem to be present in 3.1.11, but we'll leave it in here for regression's-sake) | testBug13601 | {
"repo_name": "1fechner/FeatureExtractor",
"path": "sources/FeatureExtractor/lib/mysql-connector-java-5.1.38/src/testsuite/regression/MetaDataRegressionTest.java",
"license": "lgpl-2.1",
"size": 189851
} | [
"java.sql.ResultSetMetaData",
"java.sql.Types"
] | import java.sql.ResultSetMetaData; import java.sql.Types; | import java.sql.*; | [
"java.sql"
] | java.sql; | 2,574,350 |
public void setMessage(StringProperty message) {
this.message = message;
}
| void function(StringProperty message) { this.message = message; } | /**
* The log message.
*
* @param message the StringProperty.
*/ | The log message | setMessage | {
"repo_name": "NABUCCO/org.nabucco.framework.base",
"path": "org.nabucco.framework.base.facade.datatype/src/main/gen/org/nabucco/framework/base/facade/datatype/extension/schema/workflow/effect/LogEffectExtension.java",
"license": "epl-1.0",
"size": 6209
} | [
"org.nabucco.framework.base.facade.datatype.extension.property.StringProperty"
] | import org.nabucco.framework.base.facade.datatype.extension.property.StringProperty; | import org.nabucco.framework.base.facade.datatype.extension.property.*; | [
"org.nabucco.framework"
] | org.nabucco.framework; | 1,474,722 |
protected void jump()
{
this.motionY = (double)this.func_175134_bD();
if (this.isPotionActive(Potion.jump))
{
this.motionY += (double)((float)(this.getActivePotionEffect(Potion.jump).getAmplifier() + 1) * 0.1F);
}
if (this.isSprinting())
{
float var1 = this.rotationYaw * 0.017453292F;
this.motionX -= (double)(MathHelper.sin(var1) * 0.2F);
this.motionZ += (double)(MathHelper.cos(var1) * 0.2F);
}
this.isAirBorne = true;
} | void function() { this.motionY = (double)this.func_175134_bD(); if (this.isPotionActive(Potion.jump)) { this.motionY += (double)((float)(this.getActivePotionEffect(Potion.jump).getAmplifier() + 1) * 0.1F); } if (this.isSprinting()) { float var1 = this.rotationYaw * 0.017453292F; this.motionX -= (double)(MathHelper.sin(var1) * 0.2F); this.motionZ += (double)(MathHelper.cos(var1) * 0.2F); } this.isAirBorne = true; } | /**
* Causes this entity to do an upwards motion (jumping).
*/ | Causes this entity to do an upwards motion (jumping) | jump | {
"repo_name": "Hexeption/Youtube-Hacked-Client-1.8",
"path": "minecraft/net/minecraft/entity/EntityLivingBase.java",
"license": "mit",
"size": 71852
} | [
"net.minecraft.potion.Potion",
"net.minecraft.util.MathHelper"
] | import net.minecraft.potion.Potion; import net.minecraft.util.MathHelper; | import net.minecraft.potion.*; import net.minecraft.util.*; | [
"net.minecraft.potion",
"net.minecraft.util"
] | net.minecraft.potion; net.minecraft.util; | 1,989,501 |
public static AudioInputStream getAudioInputStream(URL url)
throws UnsupportedAudioFileException, IOException
{
Iterator i = lookupAudioFileReaderProviders();
while (i.hasNext())
{
AudioFileReader reader = (AudioFileReader) i.next();
try
{
return reader.getAudioInputStream(url);
}
catch (UnsupportedAudioFileException _)
{
// Try the next provider.
}
}
throw new UnsupportedAudioFileException("URL type not recognized");
} | static AudioInputStream function(URL url) throws UnsupportedAudioFileException, IOException { Iterator i = lookupAudioFileReaderProviders(); while (i.hasNext()) { AudioFileReader reader = (AudioFileReader) i.next(); try { return reader.getAudioInputStream(url); } catch (UnsupportedAudioFileException _) { } } throw new UnsupportedAudioFileException(STR); } | /**
* Return an audio input stream for the given URL.
* @param url the URL
* @return an audio input stream
* @throws UnsupportedAudioFileException if the URL's audio format is not
* supported by any of the installed providers
* @throws IOException if there is an error while reading the URL
*/ | Return an audio input stream for the given URL | getAudioInputStream | {
"repo_name": "ivmai/JCGO",
"path": "goclsp/clsp_fix/javax/sound/sampled/AudioSystem.java",
"license": "gpl-2.0",
"size": 26363
} | [
"java.io.IOException",
"java.util.Iterator",
"javax.sound.sampled.spi.AudioFileReader"
] | import java.io.IOException; import java.util.Iterator; import javax.sound.sampled.spi.AudioFileReader; | import java.io.*; import java.util.*; import javax.sound.sampled.spi.*; | [
"java.io",
"java.util",
"javax.sound"
] | java.io; java.util; javax.sound; | 301,785 |
public static synchronized void closeDatabasePoolConnection() {
if (dataSource != null && dataSource instanceof org.apache.tomcat.jdbc.pool.DataSource) {
((org.apache.tomcat.jdbc.pool.DataSource) dataSource).close();
dataSource = null;
}
}
| static synchronized void function() { if (dataSource != null && dataSource instanceof org.apache.tomcat.jdbc.pool.DataSource) { ((org.apache.tomcat.jdbc.pool.DataSource) dataSource).close(); dataSource = null; } } | /**
* Close all database connections in the pool.
*/ | Close all database connections in the pool | closeDatabasePoolConnection | {
"repo_name": "maheshika/carbon4-kernel",
"path": "core/org.wso2.carbon.user.core/src/main/java/org/wso2/carbon/user/core/util/DatabaseUtil.java",
"license": "apache-2.0",
"size": 25411
} | [
"javax.sql.DataSource"
] | import javax.sql.DataSource; | import javax.sql.*; | [
"javax.sql"
] | javax.sql; | 564,622 |
protected void adaptLogicalClause(BooleanQuery activeBooleanQuery,
LogicalClause logicalClause)
throws DiscoveryException, ParseException {
// loop the the sub clauses, recurse any logical clauses
for (DiscoveryClause clause: logicalClause.getClauses()) {
if (clause == null) {
throw new DiscoveryException("A null clause was encountered.");
} else if (clause instanceof LogicalClause) {
BooleanQuery subQuery = new BooleanQuery();
appendQuery(activeBooleanQuery,logicalClause,subQuery);
adaptLogicalClause(subQuery,(LogicalClause)clause);
} else if (clause instanceof PropertyClause) {
PropertyClauseAdapter adapter = new PropertyClauseAdapter(getQueryAdapter());
PropertyClause subClause = (PropertyClause)clause;
if ((subClause.getTarget() != null) && (subClause.getTarget().getMeaning() != null)) {
PropertyValueType pvt = subClause.getTarget().getMeaning().getValueType();
if ((pvt != null) && pvt.equals(PropertyValueType.TIMEPERIOD)) {
adapter = new TimeperiodClauseAdapter(getQueryAdapter());
}
}
adapter.adaptPropertyClause(activeBooleanQuery,logicalClause,subClause);
} else if (clause instanceof SpatialClause) {
SpatialClauseAdapter adapter = new SpatialClauseAdapter(getQueryAdapter());
SpatialClause subClause = (SpatialClause)clause;
adapter.adaptSpatialClause(activeBooleanQuery,logicalClause,subClause);
} else {
String sErr = "Unrecognized clause type:"+clause.getClass().getName();
throw new DiscoveryException(sErr);
}
}
// MUST_NOT causes a problem when there is only one MUST_NOT clause within
// a BooleanQuery, to get round it we add all documents as a SHOULD
BooleanClause[] clauses = activeBooleanQuery.getClauses();
if ((clauses == null) || (clauses.length == 0)) {
// TODO this will result in no records being returned,
// possible this should be fixed
} else if (clauses.length == 1) {
if (clauses[0].getOccur().equals(BooleanClause.Occur.MUST_NOT)) {
LOGGER.finer("Fixing single MUST_NOT clause within a BooleanQuery...");
appendSelectAll(activeBooleanQuery);
}
}
} | void function(BooleanQuery activeBooleanQuery, LogicalClause logicalClause) throws DiscoveryException, ParseException { for (DiscoveryClause clause: logicalClause.getClauses()) { if (clause == null) { throw new DiscoveryException(STR); } else if (clause instanceof LogicalClause) { BooleanQuery subQuery = new BooleanQuery(); appendQuery(activeBooleanQuery,logicalClause,subQuery); adaptLogicalClause(subQuery,(LogicalClause)clause); } else if (clause instanceof PropertyClause) { PropertyClauseAdapter adapter = new PropertyClauseAdapter(getQueryAdapter()); PropertyClause subClause = (PropertyClause)clause; if ((subClause.getTarget() != null) && (subClause.getTarget().getMeaning() != null)) { PropertyValueType pvt = subClause.getTarget().getMeaning().getValueType(); if ((pvt != null) && pvt.equals(PropertyValueType.TIMEPERIOD)) { adapter = new TimeperiodClauseAdapter(getQueryAdapter()); } } adapter.adaptPropertyClause(activeBooleanQuery,logicalClause,subClause); } else if (clause instanceof SpatialClause) { SpatialClauseAdapter adapter = new SpatialClauseAdapter(getQueryAdapter()); SpatialClause subClause = (SpatialClause)clause; adapter.adaptSpatialClause(activeBooleanQuery,logicalClause,subClause); } else { String sErr = STR+clause.getClass().getName(); throw new DiscoveryException(sErr); } } BooleanClause[] clauses = activeBooleanQuery.getClauses(); if ((clauses == null) (clauses.length == 0)) { } else if (clauses.length == 1) { if (clauses[0].getOccur().equals(BooleanClause.Occur.MUST_NOT)) { LOGGER.finer(STR); appendSelectAll(activeBooleanQuery); } } } | /**
* Builds a Lucene BooleanQuery by recursively traversing a
* catalog discovery LogicalClause.
* @param activeBooleanQuery the active Lucene boolean query
* @param logicalClause the logical clause to adapt
* @throws DiscoveryException if an invalid clause is encountered
* @throws ParseException if a Lucene query parsing exception occurs
*/ | Builds a Lucene BooleanQuery by recursively traversing a catalog discovery LogicalClause | adaptLogicalClause | {
"repo_name": "GeoinformationSystems/GeoprocessingAppstore",
"path": "src/com/esri/gpt/catalog/lucene/LogicalClauseAdapter.java",
"license": "apache-2.0",
"size": 4706
} | [
"com.esri.gpt.catalog.discovery.DiscoveryClause",
"com.esri.gpt.catalog.discovery.DiscoveryException",
"com.esri.gpt.catalog.discovery.LogicalClause",
"com.esri.gpt.catalog.discovery.PropertyClause",
"com.esri.gpt.catalog.discovery.PropertyValueType",
"com.esri.gpt.catalog.discovery.SpatialClause",
"org.apache.lucene.queryParser.ParseException",
"org.apache.lucene.search.BooleanClause",
"org.apache.lucene.search.BooleanQuery"
] | import com.esri.gpt.catalog.discovery.DiscoveryClause; import com.esri.gpt.catalog.discovery.DiscoveryException; import com.esri.gpt.catalog.discovery.LogicalClause; import com.esri.gpt.catalog.discovery.PropertyClause; import com.esri.gpt.catalog.discovery.PropertyValueType; import com.esri.gpt.catalog.discovery.SpatialClause; import org.apache.lucene.queryParser.ParseException; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; | import com.esri.gpt.catalog.discovery.*; import org.apache.lucene.*; import org.apache.lucene.search.*; | [
"com.esri.gpt",
"org.apache.lucene"
] | com.esri.gpt; org.apache.lucene; | 1,628,460 |
private boolean shouldShowRequestPermissionRationale(String[] permissions) {
for (String permission : permissions) {
if (FragmentCompat.shouldShowRequestPermissionRationale(this, permission)) {
return true;
}
}
return false;
} | boolean function(String[] permissions) { for (String permission : permissions) { if (FragmentCompat.shouldShowRequestPermissionRationale(this, permission)) { return true; } } return false; } | /**
* Gets whether you should show UI with rationale for requesting permissions.
*
* @param permissions The permissions your app wants to request.
* @return Whether you can show permission rationale UI.
*/ | Gets whether you should show UI with rationale for requesting permissions | shouldShowRequestPermissionRationale | {
"repo_name": "liuyanjun513/Camera2Video",
"path": "Application/src/main/java/com/example/android/camera2video/Camera2VideoFragment.java",
"license": "apache-2.0",
"size": 30544
} | [
"android.support.v13.app.FragmentCompat"
] | import android.support.v13.app.FragmentCompat; | import android.support.v13.app.*; | [
"android.support"
] | android.support; | 757,357 |
@Test
public void shouldMarshalWithDefaultConfiguration() throws Exception {
template.sendBody("direct:default", Arrays.asList(
asMap("A", "1", "B", "2", "C", "3"),
asMap("A", "one", "B", "two", "C", "three")
));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1\t2\t3", "one\ttwo\tthree"), body);
}
| void function() throws Exception { template.sendBody(STR, Arrays.asList( asMap("A", "1", "B", "2", "C", "3"), asMap("A", "one", "B", "two", "C", "three") )); result.expectedMessageCount(1); result.assertIsSatisfied(); String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody()); assertEquals(join(STR, STR), body); } | /**
* Tests that we can marshal TSV with the default configuration.
*/ | Tests that we can marshal TSV with the default configuration | shouldMarshalWithDefaultConfiguration | {
"repo_name": "snadakuduru/camel",
"path": "components/camel-univocity-parsers/src/test/java/org/apache/camel/dataformat/univocity/UniVocityTsvDataFormatMarshalTest.java",
"license": "apache-2.0",
"size": 5635
} | [
"java.util.Arrays",
"org.apache.camel.dataformat.univocity.UniVocityTestHelper"
] | import java.util.Arrays; import org.apache.camel.dataformat.univocity.UniVocityTestHelper; | import java.util.*; import org.apache.camel.dataformat.univocity.*; | [
"java.util",
"org.apache.camel"
] | java.util; org.apache.camel; | 1,346,072 |
@Override
public void setFormat(int formatElementIndex, Format newFormat) {
throw new UnsupportedOperationException();
} | void function(int formatElementIndex, Format newFormat) { throw new UnsupportedOperationException(); } | /**
* Throws UnsupportedOperationException - see class Javadoc for details.
*
* @param formatElementIndex format element index
* @param newFormat the new format
* @throws UnsupportedOperationException
*/ | Throws UnsupportedOperationException - see class Javadoc for details | setFormat | {
"repo_name": "dpisarewski/gka_wise12",
"path": "src/org/apache/commons/lang3/text/ExtendedMessageFormat.java",
"license": "lgpl-2.1",
"size": 19145
} | [
"java.text.Format"
] | import java.text.Format; | import java.text.*; | [
"java.text"
] | java.text; | 811,260 |
protected static void clearView() {
new InstanceScope().getNode(Activator.getPluginId()).remove(
RepositoryUtil.PREFS_DIRECTORIES);
} | static void function() { new InstanceScope().getNode(Activator.getPluginId()).remove( RepositoryUtil.PREFS_DIRECTORIES); } | /**
* remove all configured repositories from the view
*/ | remove all configured repositories from the view | clearView | {
"repo_name": "rickard-von-essen/egit",
"path": "org.eclipse.egit.ui.test/src/org/eclipse/egit/ui/view/repositories/GitRepositoriesViewTestBase.java",
"license": "epl-1.0",
"size": 10374
} | [
"org.eclipse.core.runtime.preferences.InstanceScope",
"org.eclipse.egit.core.Activator",
"org.eclipse.egit.core.RepositoryUtil"
] | import org.eclipse.core.runtime.preferences.InstanceScope; import org.eclipse.egit.core.Activator; import org.eclipse.egit.core.RepositoryUtil; | import org.eclipse.core.runtime.preferences.*; import org.eclipse.egit.core.*; | [
"org.eclipse.core",
"org.eclipse.egit"
] | org.eclipse.core; org.eclipse.egit; | 2,050,041 |
configure();
SpringApplication.run(Bootstrap.class, args);
} | configure(); SpringApplication.run(Bootstrap.class, args); } | /**
* Alien 4 cloud standalone entry point.
*
* @param args Arguments that will be delegated to spring.
*/ | Alien 4 cloud standalone entry point | main | {
"repo_name": "broly-git/alien4cloud",
"path": "alien4cloud-rest-api/src/main/java/org/alien4cloud/bootstrap/Bootstrap.java",
"license": "apache-2.0",
"size": 4123
} | [
"org.springframework.boot.SpringApplication"
] | import org.springframework.boot.SpringApplication; | import org.springframework.boot.*; | [
"org.springframework.boot"
] | org.springframework.boot; | 2,367,327 |
public YangString getRegisterNameValue() throws JNCException {
return (YangString)getValue("register-name");
} | YangString function() throws JNCException { return (YangString)getValue(STR); } | /**
* Gets the value for child leaf "register-name".
* @return The value of the leaf.
*/ | Gets the value for child leaf "register-name" | getRegisterNameValue | {
"repo_name": "jnpr-shinma/yangfile",
"path": "hitel/src/hctaEpc/mmeSgsn/statistics/gprsSm/PrimaryActFail.java",
"license": "apache-2.0",
"size": 11425
} | [
"com.tailf.jnc.YangString"
] | import com.tailf.jnc.YangString; | import com.tailf.jnc.*; | [
"com.tailf.jnc"
] | com.tailf.jnc; | 2,141,165 |
String xml = "<!DOCTYPE foo ["
+ " <!ENTITY % a \"android\">"
+ "]><foo></foo>";
XmlPullParser parser = newPullParser(xml);
while (parser.next() != XmlPullParser.END_DOCUMENT) {
}
} | String xml = STR + STRandroid\">" + STR; XmlPullParser parser = newPullParser(xml); while (parser.next() != XmlPullParser.END_DOCUMENT) { } } | /**
* Android's Expat pull parser permits parameter entities to be declared,
* but it doesn't permit such entities to be used.
*/ | Android's Expat pull parser permits parameter entities to be declared, but it doesn't permit such entities to be used | testDeclaringParameterEntities | {
"repo_name": "AdmireTheDistance/android_libcore",
"path": "luni/src/test/java/libcore/xml/PullParserDtdTest.java",
"license": "gpl-2.0",
"size": 23713
} | [
"org.xmlpull.v1.XmlPullParser"
] | import org.xmlpull.v1.XmlPullParser; | import org.xmlpull.v1.*; | [
"org.xmlpull.v1"
] | org.xmlpull.v1; | 1,903,183 |
Map<String, String> getInvalid() throws ServiceException; | Map<String, String> getInvalid() throws ServiceException; | /**
* Get invalid Dictionary value
*
* @return the Map<String, String> object if successful.
* @throws ServiceException the exception wrapped in ServiceException if failed.
*/ | Get invalid Dictionary value | getInvalid | {
"repo_name": "BretJohnson/autorest",
"path": "AutoRest/Generators/Java/Java.Tests/src/main/java/fixtures/bodydictionary/Dictionary.java",
"license": "mit",
"size": 53782
} | [
"com.microsoft.rest.ServiceException",
"java.util.Map"
] | import com.microsoft.rest.ServiceException; import java.util.Map; | import com.microsoft.rest.*; import java.util.*; | [
"com.microsoft.rest",
"java.util"
] | com.microsoft.rest; java.util; | 549,178 |
if (WorldManager.getInstance() == null
|| WorldManager.getInstance().getWorld() == null
|| WorldManager.getInstance().getWorld()
.getModifierManager() == null) {
return;
}
World world = WorldManager.getInstance().getWorld();
ModifierManager modifierManager = world.getModifierManager();
modifierManager.purge("Seasonal");
HashMap<String, String> modifierSettings = new HashMap<>();
modifierSettings.put("target", "all");
modifierSettings.put("tag", "Seasonal");
modifierSettings.put("position", (new Point(0, 0)).toString());
modifierSettings.put("end",
(new Point(world.getWidth(), world.getHeight())).toString());
modifierSettings.put("shape", "square");
modifierSettings.put("attribute", "season");
modifierManager.addNewAttributeModifier(modifierSettings);
} | if (WorldManager.getInstance() == null WorldManager.getInstance().getWorld() == null WorldManager.getInstance().getWorld() .getModifierManager() == null) { return; } World world = WorldManager.getInstance().getWorld(); ModifierManager modifierManager = world.getModifierManager(); modifierManager.purge(STR); HashMap<String, String> modifierSettings = new HashMap<>(); modifierSettings.put(STR, "all"); modifierSettings.put("tag", STR); modifierSettings.put(STR, (new Point(0, 0)).toString()); modifierSettings.put("end", (new Point(world.getWidth(), world.getHeight())).toString()); modifierSettings.put("shape", STR); modifierSettings.put(STR, STR); modifierManager.addNewAttributeModifier(modifierSettings); } | /**
* Applies a season modifier to the world.
*/ | Applies a season modifier to the world | setModifier | {
"repo_name": "UQdeco2800/farmsim",
"path": "farmsim/src/main/java/farmsim/world/weather/Season.java",
"license": "mit",
"size": 2918
} | [
"java.util.HashMap"
] | import java.util.HashMap; | import java.util.*; | [
"java.util"
] | java.util; | 773,359 |
void cancel(int loaderID)
{
ImagesImporter loader = loaders.get(loaderID);
if (loader != null) {
//loader.cancel();
loaders.remove(loaderID);
}
}
| void cancel(int loaderID) { ImagesImporter loader = loaders.get(loaderID); if (loader != null) { loaders.remove(loaderID); } } | /**
* Cancels the specified on-going import.
*
* @param loaderID The identifier of the loader.
*/ | Cancels the specified on-going import | cancel | {
"repo_name": "tp81/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/agents/fsimporter/view/ImporterModel.java",
"license": "gpl-2.0",
"size": 21525
} | [
"org.openmicroscopy.shoola.agents.fsimporter.ImagesImporter"
] | import org.openmicroscopy.shoola.agents.fsimporter.ImagesImporter; | import org.openmicroscopy.shoola.agents.fsimporter.*; | [
"org.openmicroscopy.shoola"
] | org.openmicroscopy.shoola; | 760,230 |
public static OcdMetricType lookupType(Class<? extends OcdMetric> metricClass) {
for (OcdMetricType type : OcdMetricType.values()) {
if (metricClass == type.getMetricClass()) {
return type;
}
}
throw new InvalidParameterException();
}
| static OcdMetricType function(Class<? extends OcdMetric> metricClass) { for (OcdMetricType type : OcdMetricType.values()) { if (metricClass == type.getMetricClass()) { return type; } } throw new InvalidParameterException(); } | /**
* Returns the type corresponding to an OcdMetric class.
* @param metricClass The class.
* @return The corresponding type.
*/ | Returns the type corresponding to an OcdMetric class | lookupType | {
"repo_name": "rwth-acis/REST-OCD-Services",
"path": "rest_ocd_services/src/main/java/i5/las2peer/services/ocd/metrics/OcdMetricType.java",
"license": "apache-2.0",
"size": 4966
} | [
"java.security.InvalidParameterException"
] | import java.security.InvalidParameterException; | import java.security.*; | [
"java.security"
] | java.security; | 474,383 |
public Camera next (); | Camera function (); | /** Switch light
* @return Current camera */ | Switch light | next | {
"repo_name": "saltares/libgdx",
"path": "tests/gdx-tests/src/com/badlogic/gdx/tests/g3d/shadows/system/ShadowSystem.java",
"license": "apache-2.0",
"size": 4720
} | [
"com.badlogic.gdx.graphics.Camera"
] | import com.badlogic.gdx.graphics.Camera; | import com.badlogic.gdx.graphics.*; | [
"com.badlogic.gdx"
] | com.badlogic.gdx; | 1,504,149 |
@Deprecated
void addTestDir(File dir) {
throw modificationNotPermitted();
} | void addTestDir(File dir) { throw modificationNotPermitted(); } | /**
* Should not be used - only for old plugins
*
* @deprecated since 4.0
*/ | Should not be used - only for old plugins | addTestDir | {
"repo_name": "jblievremont/sonarqube",
"path": "sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/DefaultModuleFileSystem.java",
"license": "lgpl-3.0",
"size": 8464
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 229,420 |
public void shutdownServer(boolean immediate) {
// Unregister the NFS server with the portmapper
try {
PortMapping[] mappings = new PortMapping[2];
mappings[0] = new PortMapping(NFS.ProgramId, NFS.VersionId, Rpc.UDP, m_udpHandler.getPort());
mappings[1] = new PortMapping(NFS.ProgramId, NFS.VersionId, Rpc.TCP, m_tcpHandler.getPort());
unregisterRPCServer(mappings);
}
catch ( IOException ex) {
// DEBUG
if ( hasDebugFlag(DBG_ERROR))
Debug.println( ex);
}
// Stop the RPC handlers
if (m_udpHandler != null) {
m_udpHandler.closeSessionHandler(this);
m_udpHandler = null;
}
if (m_tcpHandler != null) {
m_tcpHandler.closeSessionHandler(this);
m_tcpHandler = null;
}
// Stop the thread pool
m_threadPool.shutdownThreadPool();
// Fire a shutdown notification event
fireServerEvent(ServerListener.ServerShutdown);
// Indicate the server has been shutdown
setActive( false);
} | void function(boolean immediate) { try { PortMapping[] mappings = new PortMapping[2]; mappings[0] = new PortMapping(NFS.ProgramId, NFS.VersionId, Rpc.UDP, m_udpHandler.getPort()); mappings[1] = new PortMapping(NFS.ProgramId, NFS.VersionId, Rpc.TCP, m_tcpHandler.getPort()); unregisterRPCServer(mappings); } catch ( IOException ex) { if ( hasDebugFlag(DBG_ERROR)) Debug.println( ex); } if (m_udpHandler != null) { m_udpHandler.closeSessionHandler(this); m_udpHandler = null; } if (m_tcpHandler != null) { m_tcpHandler.closeSessionHandler(this); m_tcpHandler = null; } m_threadPool.shutdownThreadPool(); fireServerEvent(ServerListener.ServerShutdown); setActive( false); } | /**
* Shutdown the NFS server
*
* @param immediate boolean
*/ | Shutdown the NFS server | shutdownServer | {
"repo_name": "loftuxab/community-edition-old",
"path": "projects/alfresco-jlan/source/java/org/alfresco/jlan/oncrpc/nfs/NFSServer.java",
"license": "lgpl-3.0",
"size": 140029
} | [
"java.io.IOException",
"org.alfresco.jlan.debug.Debug",
"org.alfresco.jlan.oncrpc.PortMapping",
"org.alfresco.jlan.oncrpc.Rpc",
"org.alfresco.jlan.server.ServerListener"
] | import java.io.IOException; import org.alfresco.jlan.debug.Debug; import org.alfresco.jlan.oncrpc.PortMapping; import org.alfresco.jlan.oncrpc.Rpc; import org.alfresco.jlan.server.ServerListener; | import java.io.*; import org.alfresco.jlan.debug.*; import org.alfresco.jlan.oncrpc.*; import org.alfresco.jlan.server.*; | [
"java.io",
"org.alfresco.jlan"
] | java.io; org.alfresco.jlan; | 1,629,295 |
protected BufferedImage create_POINTER_Image(final int WIDTH, final PointerType POINTER_TYPE, final ColorDef POINTER_COLOR) {
return POINTER_FACTORY.createStandardPointer(WIDTH, POINTER_TYPE, POINTER_COLOR, getBackgroundColor());
} | BufferedImage function(final int WIDTH, final PointerType POINTER_TYPE, final ColorDef POINTER_COLOR) { return POINTER_FACTORY.createStandardPointer(WIDTH, POINTER_TYPE, POINTER_COLOR, getBackgroundColor()); } | /**
* Returns the image of the pointer. This pointer is centered in the gauge.
* @param WIDTH
* @param POINTER_TYPE
* @param POINTER_COLOR
* @return the pointer image that is used in all gauges that have a centered pointer
*/ | Returns the image of the pointer. This pointer is centered in the gauge | create_POINTER_Image | {
"repo_name": "hervegirod/j6dof-flight-sim",
"path": "src/steelseries/eu/hansolo/steelseries/gauges/AbstractRadial.java",
"license": "gpl-3.0",
"size": 156025
} | [
"eu.hansolo.steelseries.tools.ColorDef",
"eu.hansolo.steelseries.tools.PointerType",
"java.awt.image.BufferedImage"
] | import eu.hansolo.steelseries.tools.ColorDef; import eu.hansolo.steelseries.tools.PointerType; import java.awt.image.BufferedImage; | import eu.hansolo.steelseries.tools.*; import java.awt.image.*; | [
"eu.hansolo.steelseries",
"java.awt"
] | eu.hansolo.steelseries; java.awt; | 321,422 |
public ServiceProcessorAdapter service(); | ServiceProcessorAdapter function(); | /**
* Gets service processor.
*
* @return Service processor.
*/ | Gets service processor | service | {
"repo_name": "ptupitsyn/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/GridKernalContext.java",
"license": "apache-2.0",
"size": 20940
} | [
"org.apache.ignite.internal.processors.service.ServiceProcessorAdapter"
] | import org.apache.ignite.internal.processors.service.ServiceProcessorAdapter; | import org.apache.ignite.internal.processors.service.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 1,045,622 |
public void addToQueue(FECJob job, FECQueue queue, ObjectContainer container) {
queue.addToQueue(job, this, container);
}
| void function(FECJob job, FECQueue queue, ObjectContainer container) { queue.addToQueue(job, this, container); } | /**
* The method used to submit {@link FECJob}s to the pool
*
* @author Florent Daignière <[email protected]>
*
* @param FECJob
*/ | The method used to submit <code>FECJob</code>s to the pool | addToQueue | {
"repo_name": "vwoodzell/fred-staging",
"path": "src/freenet/client/FECCodec.java",
"license": "gpl-2.0",
"size": 16576
} | [
"com.db4o.ObjectContainer"
] | import com.db4o.ObjectContainer; | import com.db4o.*; | [
"com.db4o"
] | com.db4o; | 267,090 |
BatchConfiguration refresh(Context context); | BatchConfiguration refresh(Context context); | /**
* Refreshes the resource to sync with Azure.
*
* @param context The context to associate with this operation.
* @return the refreshed resource.
*/ | Refreshes the resource to sync with Azure | refresh | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/logic/azure-resourcemanager-logic/src/main/java/com/azure/resourcemanager/logic/models/BatchConfiguration.java",
"license": "mit",
"size": 7273
} | [
"com.azure.core.util.Context"
] | import com.azure.core.util.Context; | import com.azure.core.util.*; | [
"com.azure.core"
] | com.azure.core; | 423,095 |
public IPropertyDescriptor getSourceDescriptor() {
return sourceDescriptor;
} | IPropertyDescriptor function() { return sourceDescriptor; } | /**
* Gets the sourceDescriptor.
*
* @return the sourceDescriptor.
*/ | Gets the sourceDescriptor | getSourceDescriptor | {
"repo_name": "jspresso/jspresso-ce",
"path": "model/src/main/java/org/jspresso/framework/model/component/query/ComparableQueryStructure.java",
"license": "lgpl-3.0",
"size": 13243
} | [
"org.jspresso.framework.model.descriptor.IPropertyDescriptor"
] | import org.jspresso.framework.model.descriptor.IPropertyDescriptor; | import org.jspresso.framework.model.descriptor.*; | [
"org.jspresso.framework"
] | org.jspresso.framework; | 1,162,788 |
public BlockPos up(int n)
{
return this.offset(EnumFacing.UP, n);
} | BlockPos function(int n) { return this.offset(EnumFacing.UP, n); } | /**
* Offset this BlockPos n blocks up
*/ | Offset this BlockPos n blocks up | up | {
"repo_name": "TheValarProject/AwakenDreamsClient",
"path": "mcp/src/minecraft/net/minecraft/util/math/BlockPos.java",
"license": "gpl-3.0",
"size": 16631
} | [
"net.minecraft.util.EnumFacing"
] | import net.minecraft.util.EnumFacing; | import net.minecraft.util.*; | [
"net.minecraft.util"
] | net.minecraft.util; | 1,210,316 |
@SuppressWarnings("unchecked")
public final Request masterNodeTimeout(TimeValue timeout) {
this.masterNodeTimeout = timeout;
return (Request) this;
} | @SuppressWarnings(STR) final Request function(TimeValue timeout) { this.masterNodeTimeout = timeout; return (Request) this; } | /**
* A timeout value in case the master has not been discovered yet or disconnected.
*/ | A timeout value in case the master has not been discovered yet or disconnected | masterNodeTimeout | {
"repo_name": "s1monw/elasticsearch",
"path": "server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java",
"license": "apache-2.0",
"size": 2725
} | [
"org.elasticsearch.common.unit.TimeValue"
] | import org.elasticsearch.common.unit.TimeValue; | import org.elasticsearch.common.unit.*; | [
"org.elasticsearch.common"
] | org.elasticsearch.common; | 118,012 |
@ApiModelProperty(value = "The employees current balance for the corresponding leave type.")
public Double getBalance() {
return balance;
} | @ApiModelProperty(value = STR) Double function() { return balance; } | /**
* The employees current balance for the corresponding leave type.
*
* @return balance
*/ | The employees current balance for the corresponding leave type | getBalance | {
"repo_name": "SidneyAllen/Xero-Java",
"path": "src/main/java/com/xero/models/payrolluk/EmployeeLeaveBalance.java",
"license": "mit",
"size": 3971
} | [
"io.swagger.annotations.ApiModelProperty"
] | import io.swagger.annotations.ApiModelProperty; | import io.swagger.annotations.*; | [
"io.swagger.annotations"
] | io.swagger.annotations; | 2,360,892 |
CloseableReference<Bitmap> decodeJPEGFromEncodedImage(EncodedImage encodedImage, int length) {
boolean isJpegComplete = encodedImage.isCompleteAt(length);
final BitmapFactory.Options options = getDecodeOptionsForStream(encodedImage);
InputStream jpegDataStream = encodedImage.getInputStream();
// At this point the InputStream from the encoded image should not be null since in the
// pipeline,this comes from a call stack where this was checked before. Also this method needs
// the InputStream to decode the image so this can't be null.
Preconditions.checkNotNull(jpegDataStream);
if (encodedImage.getSize() > length) {
jpegDataStream = new LimitedInputStream(jpegDataStream, length);
}
if (!isJpegComplete) {
jpegDataStream = new TailAppendingInputStream(jpegDataStream, EOI_TAIL);
}
return decodeStaticImageFromStream(jpegDataStream, options);
} | CloseableReference<Bitmap> decodeJPEGFromEncodedImage(EncodedImage encodedImage, int length) { boolean isJpegComplete = encodedImage.isCompleteAt(length); final BitmapFactory.Options options = getDecodeOptionsForStream(encodedImage); InputStream jpegDataStream = encodedImage.getInputStream(); Preconditions.checkNotNull(jpegDataStream); if (encodedImage.getSize() > length) { jpegDataStream = new LimitedInputStream(jpegDataStream, length); } if (!isJpegComplete) { jpegDataStream = new TailAppendingInputStream(jpegDataStream, EOI_TAIL); } return decodeStaticImageFromStream(jpegDataStream, options); } | /**
* Creates a bitmap from encoded JPEG bytes. Supports a partial JPEG image.
*
* @param encodedImage the encoded image with reference to the encoded bytes
* @param length the number of encoded bytes in the buffer
* @return the bitmap
* @throws java.lang.OutOfMemoryError if the Bitmap cannot be allocated
*/ | Creates a bitmap from encoded JPEG bytes. Supports a partial JPEG image | decodeJPEGFromEncodedImage | {
"repo_name": "nickhargreaves/fresco",
"path": "imagepipeline/src/main/java/com/facebook/imagepipeline/bitmaps/ArtBitmapFactory.java",
"license": "bsd-3-clause",
"size": 6499
} | [
"android.graphics.Bitmap",
"android.graphics.BitmapFactory",
"com.facebook.common.internal.Preconditions",
"com.facebook.common.references.CloseableReference",
"com.facebook.common.streams.LimitedInputStream",
"com.facebook.common.streams.TailAppendingInputStream",
"com.facebook.imagepipeline.image.EncodedImage",
"java.io.InputStream"
] | import android.graphics.Bitmap; import android.graphics.BitmapFactory; import com.facebook.common.internal.Preconditions; import com.facebook.common.references.CloseableReference; import com.facebook.common.streams.LimitedInputStream; import com.facebook.common.streams.TailAppendingInputStream; import com.facebook.imagepipeline.image.EncodedImage; import java.io.InputStream; | import android.graphics.*; import com.facebook.common.internal.*; import com.facebook.common.references.*; import com.facebook.common.streams.*; import com.facebook.imagepipeline.image.*; import java.io.*; | [
"android.graphics",
"com.facebook.common",
"com.facebook.imagepipeline",
"java.io"
] | android.graphics; com.facebook.common; com.facebook.imagepipeline; java.io; | 1,474,380 |
EReference getAnonymous_traceDirective_1__TracePeriod_1(); | EReference getAnonymous_traceDirective_1__TracePeriod_1(); | /**
* Returns the meta object for the containment reference list '{@link cruise.umple.umple.Anonymous_traceDirective_1_#getTracePeriod_1 <em>Trace Period 1</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference list '<em>Trace Period 1</em>'.
* @see cruise.umple.umple.Anonymous_traceDirective_1_#getTracePeriod_1()
* @see #getAnonymous_traceDirective_1_()
* @generated
*/ | Returns the meta object for the containment reference list '<code>cruise.umple.umple.Anonymous_traceDirective_1_#getTracePeriod_1 Trace Period 1</code>'. | getAnonymous_traceDirective_1__TracePeriod_1 | {
"repo_name": "ahmedvc/umple",
"path": "cruise.umple.xtext/src-gen/cruise/umple/umple/UmplePackage.java",
"license": "mit",
"size": 485842
} | [
"org.eclipse.emf.ecore.EReference"
] | import org.eclipse.emf.ecore.EReference; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 392,369 |
long writeGlobalState(String reason, MetaData metaData) throws WriteStateException {
logger.trace("[_global] writing state, reason [{}]", reason);
try {
long generation = META_DATA_FORMAT.write(metaData, nodeEnv.nodeDataPaths());
logger.trace("[_global] state written");
return generation;
} catch (WriteStateException ex) {
throw new WriteStateException(false, "[_global]: failed to write global state", ex);
}
} | long writeGlobalState(String reason, MetaData metaData) throws WriteStateException { logger.trace(STR, reason); try { long generation = META_DATA_FORMAT.write(metaData, nodeEnv.nodeDataPaths()); logger.trace(STR); return generation; } catch (WriteStateException ex) { throw new WriteStateException(false, STR, ex); } } | /**
* Writes the global state, *without* the indices states.
*
* @throws WriteStateException if exception when writing state occurs. {@link WriteStateException#isDirty()} will always return
* false, because new global state file is not yet referenced by manifest file.
*/ | Writes the global state, *without* the indices states | writeGlobalState | {
"repo_name": "coding0011/elasticsearch",
"path": "server/src/main/java/org/elasticsearch/gateway/MetaStateService.java",
"license": "apache-2.0",
"size": 14194
} | [
"org.elasticsearch.cluster.metadata.MetaData"
] | import org.elasticsearch.cluster.metadata.MetaData; | import org.elasticsearch.cluster.metadata.*; | [
"org.elasticsearch.cluster"
] | org.elasticsearch.cluster; | 2,322,211 |
public Map getSubmitData(String publishedId, String agentId,Integer scoringoption)
{
try
{
GradingService service = new GradingService();
return service.getSubmitData(publishedId, agentId,scoringoption, null);
}
catch (Exception ex)
{
throw new GradingServiceException(ex);
}
} | Map function(String publishedId, String agentId,Integer scoringoption) { try { GradingService service = new GradingService(); return service.getSubmitData(publishedId, agentId,scoringoption, null); } catch (Exception ex) { throw new GradingServiceException(ex); } } | /**
* Get the last submission for a student per assessment
*/ | Get the last submission for a student per assessment | getSubmitData | {
"repo_name": "OpenCollabZA/sakai",
"path": "samigo/samigo-services/src/java/org/sakaiproject/tool/assessment/shared/impl/grading/GradingServiceImpl.java",
"license": "apache-2.0",
"size": 10365
} | [
"java.util.Map",
"org.sakaiproject.tool.assessment.services.GradingService",
"org.sakaiproject.tool.assessment.services.GradingServiceException"
] | import java.util.Map; import org.sakaiproject.tool.assessment.services.GradingService; import org.sakaiproject.tool.assessment.services.GradingServiceException; | import java.util.*; import org.sakaiproject.tool.assessment.services.*; | [
"java.util",
"org.sakaiproject.tool"
] | java.util; org.sakaiproject.tool; | 2,053,974 |
void delete(SQLiteDatabase db) {
db.delete(name, null, null);
} | void delete(SQLiteDatabase db) { db.delete(name, null, null); } | /**
* Delete all the rows of the table.
*
* @param db
* the SQLite database.
*/ | Delete all the rows of the table | delete | {
"repo_name": "ybonnel/AndroidYboDb",
"path": "AndroidYboDb/src/fr/ybo/database/modele/Table.java",
"license": "lgpl-3.0",
"size": 11890
} | [
"android.database.sqlite.SQLiteDatabase"
] | import android.database.sqlite.SQLiteDatabase; | import android.database.sqlite.*; | [
"android.database"
] | android.database; | 2,356,853 |
protected void reloadSwipeStateInView(View frontView, int position) {
if (!opened.get(position)) {
ViewCompat.setTranslationX(frontView, 0.0f);
} else {
if (openedRight.get(position)) {
ViewCompat.setTranslationX(frontView, swipeListView.getWidth());
} else {
ViewCompat.setTranslationX(frontView, -swipeListView.getWidth());
}
}
} | void function(View frontView, int position) { if (!opened.get(position)) { ViewCompat.setTranslationX(frontView, 0.0f); } else { if (openedRight.get(position)) { ViewCompat.setTranslationX(frontView, swipeListView.getWidth()); } else { ViewCompat.setTranslationX(frontView, -swipeListView.getWidth()); } } } | /**
* Reset the state of front view when the it's recycled by ListView
*
* @param frontView view to re-draw
* @param position na
*/ | Reset the state of front view when the it's recycled by ListView | reloadSwipeStateInView | {
"repo_name": "HKMOpen/UltimateRecyclerView",
"path": "UltimateRecyclerView/ultimaterecyclerview/src/main/java/com/marshalchen/ultimaterecyclerview/swipelistview/SwipeListViewTouchListener.java",
"license": "apache-2.0",
"size": 43091
} | [
"android.support.v4.view.ViewCompat",
"android.view.View"
] | import android.support.v4.view.ViewCompat; import android.view.View; | import android.support.v4.view.*; import android.view.*; | [
"android.support",
"android.view"
] | android.support; android.view; | 1,026,521 |
public static SymlinkTree createSharedLibrarySymlinkTree(
BuildTarget baseBuildTarget,
ProjectFilesystem filesystem,
CxxPlatform cxxPlatform,
Iterable<? extends BuildRule> deps,
Predicate<Object> traverse,
Predicate<Object> skip) {
BuildTarget symlinkTreeTarget =
createSharedLibrarySymlinkTreeTarget(baseBuildTarget, cxxPlatform.getFlavor());
Path symlinkTreeRoot =
getSharedLibrarySymlinkTreePath(filesystem, baseBuildTarget, cxxPlatform.getFlavor());
ImmutableSortedMap<String, SourcePath> libraries =
NativeLinkables.getTransitiveSharedLibraries(cxxPlatform, deps, traverse, skip);
ImmutableMap.Builder<Path, SourcePath> links = ImmutableMap.builder();
for (Map.Entry<String, SourcePath> ent : libraries.entrySet()) {
links.put(Paths.get(ent.getKey()), ent.getValue());
}
return new SymlinkTree(symlinkTreeTarget, filesystem, symlinkTreeRoot, links.build());
} | static SymlinkTree function( BuildTarget baseBuildTarget, ProjectFilesystem filesystem, CxxPlatform cxxPlatform, Iterable<? extends BuildRule> deps, Predicate<Object> traverse, Predicate<Object> skip) { BuildTarget symlinkTreeTarget = createSharedLibrarySymlinkTreeTarget(baseBuildTarget, cxxPlatform.getFlavor()); Path symlinkTreeRoot = getSharedLibrarySymlinkTreePath(filesystem, baseBuildTarget, cxxPlatform.getFlavor()); ImmutableSortedMap<String, SourcePath> libraries = NativeLinkables.getTransitiveSharedLibraries(cxxPlatform, deps, traverse, skip); ImmutableMap.Builder<Path, SourcePath> links = ImmutableMap.builder(); for (Map.Entry<String, SourcePath> ent : libraries.entrySet()) { links.put(Paths.get(ent.getKey()), ent.getValue()); } return new SymlinkTree(symlinkTreeTarget, filesystem, symlinkTreeRoot, links.build()); } | /**
* Build a {@link HeaderSymlinkTree} of all the shared libraries found via the top-level rule's
* transitive dependencies.
*/ | Build a <code>HeaderSymlinkTree</code> of all the shared libraries found via the top-level rule's transitive dependencies | createSharedLibrarySymlinkTree | {
"repo_name": "shybovycha/buck",
"path": "src/com/facebook/buck/cxx/CxxDescriptionEnhancer.java",
"license": "apache-2.0",
"size": 52897
} | [
"com.facebook.buck.cxx.toolchain.CxxPlatform",
"com.facebook.buck.cxx.toolchain.nativelink.NativeLinkables",
"com.facebook.buck.io.filesystem.ProjectFilesystem",
"com.facebook.buck.model.BuildTarget",
"com.facebook.buck.rules.BuildRule",
"com.facebook.buck.rules.SourcePath",
"com.facebook.buck.rules.SymlinkTree",
"com.google.common.base.Predicate",
"com.google.common.collect.ImmutableMap",
"com.google.common.collect.ImmutableSortedMap",
"java.nio.file.Path",
"java.nio.file.Paths",
"java.util.Map"
] | import com.facebook.buck.cxx.toolchain.CxxPlatform; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkables; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SymlinkTree; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedMap; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; | import com.facebook.buck.cxx.toolchain.*; import com.facebook.buck.cxx.toolchain.nativelink.*; import com.facebook.buck.io.filesystem.*; import com.facebook.buck.model.*; import com.facebook.buck.rules.*; import com.google.common.base.*; import com.google.common.collect.*; import java.nio.file.*; import java.util.*; | [
"com.facebook.buck",
"com.google.common",
"java.nio",
"java.util"
] | com.facebook.buck; com.google.common; java.nio; java.util; | 2,660,301 |
private ArrayList<Instruction> getCell2BlockInstructions(
ArrayList<DataIdentifier> outputParams,
HashMap<String, String> blockedFileNames) {
ArrayList<Instruction> c2binst = null;
//list of matrices that need to be reblocked
ArrayList<DataIdentifier> matrices = new ArrayList<DataIdentifier>();
ArrayList<DataIdentifier> matricesNoReblock = new ArrayList<DataIdentifier>();
// identify outputs that are matrices
for (int i = 0; i < outputParams.size(); i++) {
if (outputParams.get(i).getDataType() == DataType.MATRIX) {
if( _skipOutReblock.contains(outputParams.get(i).getName()) )
matricesNoReblock.add(outputParams.get(i));
else
matrices.add(outputParams.get(i));
}
}
if( !matrices.isEmpty() )
{
c2binst = new ArrayList<Instruction>();
MRJobInstruction reblkInst = new MRJobInstruction(JobType.REBLOCK);
TreeMap<Integer, ArrayList<String>> MRJobLineNumbers = null;
if(DMLScript.ENABLE_DEBUG_MODE) {
MRJobLineNumbers = new TreeMap<Integer, ArrayList<String>>();
}
ArrayList<String> inLabels = new ArrayList<String>();
ArrayList<String> outLabels = new ArrayList<String>();
String[] outputs = new String[matrices.size()];
byte[] resultIndex = new byte[matrices.size()];
String reblock = "";
String reblockStr = ""; //Keep a copy of a single MR reblock instruction
String scratchSpaceLoc = ConfigurationManager.getConfig().getTextValue(DMLConfig.SCRATCH_SPACE);
try {
// create a RBLK job that transforms each output matrix from cell to block
for (int i = 0; i < matrices.size(); i++) {
inLabels.add(matrices.get(i).getName());
outLabels.add(matrices.get(i).getName() + "_extFnOutput");
outputs[i] = scratchSpaceLoc +
Lop.FILE_SEPARATOR + Lop.PROCESS_PREFIX + DMLScript.getUUID() + Lop.FILE_SEPARATOR +
_otherParams.get(ExternalFunctionStatement.CLASS_NAME) + _runID + "_" + i + "Output";
blockedFileNames.put(matrices.get(i).getName(), outputs[i]);
resultIndex[i] = (byte) i; // (matrices.size()+i);
if (i > 0)
reblock += Lop.INSTRUCTION_DELIMITOR;
reblock += "MR" + ReBlock.OPERAND_DELIMITOR + "rblk" + ReBlock.OPERAND_DELIMITOR +
i + ReBlock.DATATYPE_PREFIX + matrices.get(i).getDataType() + ReBlock.VALUETYPE_PREFIX + matrices.get(i).getValueType() + ReBlock.OPERAND_DELIMITOR +
i + ReBlock.DATATYPE_PREFIX + matrices.get(i).getDataType() + ReBlock.VALUETYPE_PREFIX + matrices.get(i).getValueType() + ReBlock.OPERAND_DELIMITOR +
DMLTranslator.DMLBlockSize + ReBlock.OPERAND_DELIMITOR + DMLTranslator.DMLBlockSize + ReBlock.OPERAND_DELIMITOR + "true";
if(DMLScript.ENABLE_DEBUG_MODE) {
//Create a copy of reblock instruction but as a single instruction (FOR DEBUGGER)
reblockStr = "MR" + ReBlock.OPERAND_DELIMITOR + "rblk" + ReBlock.OPERAND_DELIMITOR +
i + ReBlock.DATATYPE_PREFIX + matrices.get(i).getDataType() + ReBlock.VALUETYPE_PREFIX + matrices.get(i).getValueType() + ReBlock.OPERAND_DELIMITOR +
i + ReBlock.DATATYPE_PREFIX + matrices.get(i).getDataType() + ReBlock.VALUETYPE_PREFIX + matrices.get(i).getValueType() + ReBlock.OPERAND_DELIMITOR +
DMLTranslator.DMLBlockSize + ReBlock.OPERAND_DELIMITOR + DMLTranslator.DMLBlockSize + ReBlock.OPERAND_DELIMITOR + "true";
//Set MR reblock instruction line number (FOR DEBUGGER)
if (!MRJobLineNumbers.containsKey(matrices.get(i).getBeginLine())) {
MRJobLineNumbers.put(matrices.get(i).getBeginLine(), new ArrayList<String>());
}
MRJobLineNumbers.get(matrices.get(i).getBeginLine()).add(reblockStr);
}
// create metadata instructions to populate symbol table
// with variables that hold blocked matrices
Instruction createInst = VariableCPInstruction.prepareCreateVariableInstruction(outLabels.get(i), outputs[i], false, OutputInfo.outputInfoToString(OutputInfo.BinaryBlockOutputInfo));
createInst.setLocation(matrices.get(i));
c2binst.add(createInst);
}
reblkInst.setReBlockInstructions(inLabels.toArray(new String[inLabels.size()]), "", reblock, "",
outLabels.toArray(new String[inLabels.size()]), resultIndex, 1, 1);
c2binst.add(reblkInst);
// generate instructions that rename the output variables of REBLOCK job
Instruction cpInst = null, rmInst = null;
for (int i = 0; i < matrices.size(); i++) {
cpInst = VariableCPInstruction.prepareCopyInstruction(outLabels.get(i), matrices.get(i).getName());
rmInst = VariableCPInstruction.prepareRemoveInstruction(outLabels.get(i));
cpInst.setLocation(matrices.get(i));
rmInst.setLocation(matrices.get(i));
c2binst.add(cpInst);
c2binst.add(rmInst);
//c2binst.add(CPInstructionParser.parseSingleInstruction("CP" + Lops.OPERAND_DELIMITOR + "cpvar"+Lops.OPERAND_DELIMITOR+ outLabels.get(i) + Lops.OPERAND_DELIMITOR + matrices.get(i).getName()));
}
} catch (Exception e) {
throw new PackageRuntimeException(this.printBlockErrorLocation() + "error generating instructions", e);
}
//LOGGING instructions
if (LOG.isTraceEnabled()){
LOG.trace("\n--- Cell-2-Block Instructions ---");
for(Instruction i : c2binst) {
LOG.trace(i.toString());
}
LOG.trace("----------------------------------");
}
}
return c2binst; //null if no output matrices
} | ArrayList<Instruction> function( ArrayList<DataIdentifier> outputParams, HashMap<String, String> blockedFileNames) { ArrayList<Instruction> c2binst = null; ArrayList<DataIdentifier> matrices = new ArrayList<DataIdentifier>(); ArrayList<DataIdentifier> matricesNoReblock = new ArrayList<DataIdentifier>(); for (int i = 0; i < outputParams.size(); i++) { if (outputParams.get(i).getDataType() == DataType.MATRIX) { if( _skipOutReblock.contains(outputParams.get(i).getName()) ) matricesNoReblock.add(outputParams.get(i)); else matrices.add(outputParams.get(i)); } } if( !matrices.isEmpty() ) { c2binst = new ArrayList<Instruction>(); MRJobInstruction reblkInst = new MRJobInstruction(JobType.REBLOCK); TreeMap<Integer, ArrayList<String>> MRJobLineNumbers = null; if(DMLScript.ENABLE_DEBUG_MODE) { MRJobLineNumbers = new TreeMap<Integer, ArrayList<String>>(); } ArrayList<String> inLabels = new ArrayList<String>(); ArrayList<String> outLabels = new ArrayList<String>(); String[] outputs = new String[matrices.size()]; byte[] resultIndex = new byte[matrices.size()]; String reblock = STRSTR_extFnOutputSTR_STROutputSTRMRSTRrblkSTRtrueSTRMRSTRrblkSTRtrueSTRSTRSTRerror generating instructionsSTR\n--- Cell-2-Block Instructions ---STR----------------------------------"); } } return c2binst; } | /**
* Method to generate a reblock job to convert the cell representation into block representation
* @param outputParams
* @param blockedFileNames
* @return
*/ | Method to generate a reblock job to convert the cell representation into block representation | getCell2BlockInstructions | {
"repo_name": "Myasuka/systemml",
"path": "src/main/java/org/apache/sysml/runtime/controlprogram/ExternalFunctionProgramBlock.java",
"license": "apache-2.0",
"size": 36240
} | [
"java.util.ArrayList",
"java.util.HashMap",
"java.util.TreeMap",
"org.apache.sysml.api.DMLScript",
"org.apache.sysml.lops.compile.JobType",
"org.apache.sysml.parser.DataIdentifier",
"org.apache.sysml.parser.Expression",
"org.apache.sysml.runtime.instructions.Instruction",
"org.apache.sysml.runtime.instructions.MRJobInstruction"
] | import java.util.ArrayList; import java.util.HashMap; import java.util.TreeMap; import org.apache.sysml.api.DMLScript; import org.apache.sysml.lops.compile.JobType; import org.apache.sysml.parser.DataIdentifier; import org.apache.sysml.parser.Expression; import org.apache.sysml.runtime.instructions.Instruction; import org.apache.sysml.runtime.instructions.MRJobInstruction; | import java.util.*; import org.apache.sysml.api.*; import org.apache.sysml.lops.compile.*; import org.apache.sysml.parser.*; import org.apache.sysml.runtime.instructions.*; | [
"java.util",
"org.apache.sysml"
] | java.util; org.apache.sysml; | 1,037,769 |
@Test
public void testBgpRoutePreference() throws InterruptedException {
ChannelBuffer message;
BgpRouteEntry bgpRouteEntry;
Collection<BgpRouteEntry> bgpRibIn1;
Collection<BgpRouteEntry> bgpRibIn2;
Collection<BgpRouteEntry> bgpRibIn3;
Collection<BgpRouteEntry> bgpRoutes;
Collection<Ip4Prefix> addedRoutes = new LinkedList<>();
Collection<Ip4Prefix> withdrawnRoutes = new LinkedList<>();
// Initiate the connections
peer1.connect(connectToSocket);
peer2.connect(connectToSocket);
peer3.connect(connectToSocket);
//
// Setup the initial set of routes to Peer1
//
addedRoutes.add(Ip4Prefix.valueOf("20.0.0.0/8"));
addedRoutes.add(Ip4Prefix.valueOf("30.0.0.0/16"));
// Write the routes
message = peer1.peerChannelHandler.prepareBgpUpdate(
NEXT_HOP1_ROUTER,
DEFAULT_LOCAL_PREF,
DEFAULT_MULTI_EXIT_DISC,
asPathLong,
addedRoutes,
withdrawnRoutes);
peer1.peerChannelHandler.savedCtx.getChannel().write(message);
bgpRoutes = waitForBgpRoutes(2);
assertThat(bgpRoutes, hasSize(2));
//
// Add a route entry to Peer2 with a better LOCAL_PREF
//
addedRoutes = new LinkedList<>();
withdrawnRoutes = new LinkedList<>();
addedRoutes.add(Ip4Prefix.valueOf("20.0.0.0/8"));
// Write the routes
message = peer2.peerChannelHandler.prepareBgpUpdate(
NEXT_HOP2_ROUTER,
BETTER_LOCAL_PREF,
DEFAULT_MULTI_EXIT_DISC,
asPathLong,
addedRoutes,
withdrawnRoutes);
peer2.peerChannelHandler.savedCtx.getChannel().write(message);
//
// Check that the routes have been received, processed and stored
//
bgpRibIn2 = waitForBgpRibIn(bgpSession2, 1);
assertThat(bgpRibIn2, hasSize(1));
bgpRoutes = waitForBgpRoutes(2);
assertThat(bgpRoutes, hasSize(2));
//
bgpRouteEntry =
new BgpRouteEntry(bgpSession2,
Ip4Prefix.valueOf("20.0.0.0/8"),
NEXT_HOP2_ROUTER,
(byte) BgpConstants.Update.Origin.IGP,
asPathLong,
BETTER_LOCAL_PREF);
bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC);
assertThat(bgpRibIn2, hasBgpRouteEntry(bgpRouteEntry));
assertThat(waitForBgpRoute(bgpRouteEntry), notNullValue());
//
// Add a route entry to Peer3 with a shorter AS path
//
addedRoutes = new LinkedList<>();
withdrawnRoutes = new LinkedList<>();
addedRoutes.add(Ip4Prefix.valueOf("20.0.0.0/8"));
// Write the routes
message = peer3.peerChannelHandler.prepareBgpUpdate(
NEXT_HOP3_ROUTER,
BETTER_LOCAL_PREF,
DEFAULT_MULTI_EXIT_DISC,
asPathShort,
addedRoutes,
withdrawnRoutes);
peer3.peerChannelHandler.savedCtx.getChannel().write(message);
//
// Check that the routes have been received, processed and stored
//
bgpRibIn3 = waitForBgpRibIn(bgpSession3, 1);
assertThat(bgpRibIn3, hasSize(1));
bgpRoutes = waitForBgpRoutes(2);
assertThat(bgpRoutes, hasSize(2));
//
bgpRouteEntry =
new BgpRouteEntry(bgpSession3,
Ip4Prefix.valueOf("20.0.0.0/8"),
NEXT_HOP3_ROUTER,
(byte) BgpConstants.Update.Origin.IGP,
asPathShort,
BETTER_LOCAL_PREF);
bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC);
assertThat(bgpRibIn3, hasBgpRouteEntry(bgpRouteEntry));
assertThat(waitForBgpRoute(bgpRouteEntry), notNullValue());
//
// Cleanup in preparation for next test: delete old route entry from
// Peer2
//
addedRoutes = new LinkedList<>();
withdrawnRoutes = new LinkedList<>();
withdrawnRoutes.add(Ip4Prefix.valueOf("20.0.0.0/8"));
// Write the routes
message = peer2.peerChannelHandler.prepareBgpUpdate(
NEXT_HOP2_ROUTER,
BETTER_LOCAL_PREF,
BETTER_MULTI_EXIT_DISC,
asPathShort,
addedRoutes,
withdrawnRoutes);
peer2.peerChannelHandler.savedCtx.getChannel().write(message);
//
// Check that the routes have been received, processed and stored
//
bgpRibIn2 = waitForBgpRibIn(bgpSession2, 0);
assertThat(bgpRibIn2, hasSize(0));
//
// Add a route entry to Peer2 with a better MED
//
addedRoutes = new LinkedList<>();
withdrawnRoutes = new LinkedList<>();
addedRoutes.add(Ip4Prefix.valueOf("20.0.0.0/8"));
// Write the routes
message = peer2.peerChannelHandler.prepareBgpUpdate(
NEXT_HOP2_ROUTER,
BETTER_LOCAL_PREF,
BETTER_MULTI_EXIT_DISC,
asPathShort,
addedRoutes,
withdrawnRoutes);
peer2.peerChannelHandler.savedCtx.getChannel().write(message);
//
// Check that the routes have been received, processed and stored
//
bgpRibIn2 = waitForBgpRibIn(bgpSession2, 1);
assertThat(bgpRibIn2, hasSize(1));
bgpRoutes = waitForBgpRoutes(2);
assertThat(bgpRoutes, hasSize(2));
//
bgpRouteEntry =
new BgpRouteEntry(bgpSession2,
Ip4Prefix.valueOf("20.0.0.0/8"),
NEXT_HOP2_ROUTER,
(byte) BgpConstants.Update.Origin.IGP,
asPathShort,
BETTER_LOCAL_PREF);
bgpRouteEntry.setMultiExitDisc(BETTER_MULTI_EXIT_DISC);
assertThat(bgpRibIn2, hasBgpRouteEntry(bgpRouteEntry));
assertThat(waitForBgpRoute(bgpRouteEntry), notNullValue());
//
// Add a route entry to Peer1 with a better (lower) BGP ID
//
addedRoutes = new LinkedList<>();
withdrawnRoutes = new LinkedList<>();
addedRoutes.add(Ip4Prefix.valueOf("20.0.0.0/8"));
withdrawnRoutes.add(Ip4Prefix.valueOf("30.0.0.0/16"));
// Write the routes
message = peer1.peerChannelHandler.prepareBgpUpdate(
NEXT_HOP1_ROUTER,
BETTER_LOCAL_PREF,
BETTER_MULTI_EXIT_DISC,
asPathShort,
addedRoutes,
withdrawnRoutes);
peer1.peerChannelHandler.savedCtx.getChannel().write(message);
//
// Check that the routes have been received, processed and stored
//
bgpRibIn1 = waitForBgpRibIn(bgpSession1, 1);
assertThat(bgpRibIn1, hasSize(1));
bgpRoutes = waitForBgpRoutes(1);
assertThat(bgpRoutes, hasSize(1));
//
bgpRouteEntry =
new BgpRouteEntry(bgpSession1,
Ip4Prefix.valueOf("20.0.0.0/8"),
NEXT_HOP1_ROUTER,
(byte) BgpConstants.Update.Origin.IGP,
asPathShort,
BETTER_LOCAL_PREF);
bgpRouteEntry.setMultiExitDisc(BETTER_MULTI_EXIT_DISC);
assertThat(bgpRibIn1, hasBgpRouteEntry(bgpRouteEntry));
assertThat(waitForBgpRoute(bgpRouteEntry), notNullValue());
// Close the channels and test there are no routes
peer1.peerChannelHandler.closeChannel();
peer2.peerChannelHandler.closeChannel();
peer3.peerChannelHandler.closeChannel();
bgpRoutes = waitForBgpRoutes(0);
assertThat(bgpRoutes, hasSize(0));
} | void function() throws InterruptedException { ChannelBuffer message; BgpRouteEntry bgpRouteEntry; Collection<BgpRouteEntry> bgpRibIn1; Collection<BgpRouteEntry> bgpRibIn2; Collection<BgpRouteEntry> bgpRibIn3; Collection<BgpRouteEntry> bgpRoutes; Collection<Ip4Prefix> addedRoutes = new LinkedList<>(); Collection<Ip4Prefix> withdrawnRoutes = new LinkedList<>(); peer1.connect(connectToSocket); peer2.connect(connectToSocket); peer3.connect(connectToSocket); addedRoutes.add(Ip4Prefix.valueOf(STR)); message = peer1.peerChannelHandler.prepareBgpUpdate( NEXT_HOP1_ROUTER, DEFAULT_LOCAL_PREF, DEFAULT_MULTI_EXIT_DISC, asPathLong, addedRoutes, withdrawnRoutes); peer1.peerChannelHandler.savedCtx.getChannel().write(message); bgpRoutes = waitForBgpRoutes(2); assertThat(bgpRoutes, hasSize(2)); withdrawnRoutes = new LinkedList<>(); addedRoutes.add(Ip4Prefix.valueOf(STR)); message = peer2.peerChannelHandler.prepareBgpUpdate( NEXT_HOP2_ROUTER, BETTER_LOCAL_PREF, DEFAULT_MULTI_EXIT_DISC, asPathLong, addedRoutes, withdrawnRoutes); peer2.peerChannelHandler.savedCtx.getChannel().write(message); assertThat(bgpRibIn2, hasSize(1)); bgpRoutes = waitForBgpRoutes(2); assertThat(bgpRoutes, hasSize(2)); new BgpRouteEntry(bgpSession2, Ip4Prefix.valueOf(STR), NEXT_HOP2_ROUTER, (byte) BgpConstants.Update.Origin.IGP, asPathLong, BETTER_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn2, hasBgpRouteEntry(bgpRouteEntry)); assertThat(waitForBgpRoute(bgpRouteEntry), notNullValue()); withdrawnRoutes = new LinkedList<>(); addedRoutes.add(Ip4Prefix.valueOf(STR)); message = peer3.peerChannelHandler.prepareBgpUpdate( NEXT_HOP3_ROUTER, BETTER_LOCAL_PREF, DEFAULT_MULTI_EXIT_DISC, asPathShort, addedRoutes, withdrawnRoutes); peer3.peerChannelHandler.savedCtx.getChannel().write(message); assertThat(bgpRibIn3, hasSize(1)); bgpRoutes = waitForBgpRoutes(2); assertThat(bgpRoutes, hasSize(2)); new BgpRouteEntry(bgpSession3, Ip4Prefix.valueOf(STR), NEXT_HOP3_ROUTER, (byte) BgpConstants.Update.Origin.IGP, asPathShort, BETTER_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn3, hasBgpRouteEntry(bgpRouteEntry)); assertThat(waitForBgpRoute(bgpRouteEntry), notNullValue()); withdrawnRoutes = new LinkedList<>(); withdrawnRoutes.add(Ip4Prefix.valueOf(STR)); message = peer2.peerChannelHandler.prepareBgpUpdate( NEXT_HOP2_ROUTER, BETTER_LOCAL_PREF, BETTER_MULTI_EXIT_DISC, asPathShort, addedRoutes, withdrawnRoutes); peer2.peerChannelHandler.savedCtx.getChannel().write(message); assertThat(bgpRibIn2, hasSize(0)); withdrawnRoutes = new LinkedList<>(); addedRoutes.add(Ip4Prefix.valueOf(STR)); message = peer2.peerChannelHandler.prepareBgpUpdate( NEXT_HOP2_ROUTER, BETTER_LOCAL_PREF, BETTER_MULTI_EXIT_DISC, asPathShort, addedRoutes, withdrawnRoutes); peer2.peerChannelHandler.savedCtx.getChannel().write(message); assertThat(bgpRibIn2, hasSize(1)); bgpRoutes = waitForBgpRoutes(2); assertThat(bgpRoutes, hasSize(2)); new BgpRouteEntry(bgpSession2, Ip4Prefix.valueOf(STR), NEXT_HOP2_ROUTER, (byte) BgpConstants.Update.Origin.IGP, asPathShort, BETTER_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(BETTER_MULTI_EXIT_DISC); assertThat(bgpRibIn2, hasBgpRouteEntry(bgpRouteEntry)); assertThat(waitForBgpRoute(bgpRouteEntry), notNullValue()); withdrawnRoutes = new LinkedList<>(); addedRoutes.add(Ip4Prefix.valueOf(STR)); withdrawnRoutes.add(Ip4Prefix.valueOf(STR)); message = peer1.peerChannelHandler.prepareBgpUpdate( NEXT_HOP1_ROUTER, BETTER_LOCAL_PREF, BETTER_MULTI_EXIT_DISC, asPathShort, addedRoutes, withdrawnRoutes); peer1.peerChannelHandler.savedCtx.getChannel().write(message); assertThat(bgpRibIn1, hasSize(1)); bgpRoutes = waitForBgpRoutes(1); assertThat(bgpRoutes, hasSize(1)); new BgpRouteEntry(bgpSession1, Ip4Prefix.valueOf(STR), NEXT_HOP1_ROUTER, (byte) BgpConstants.Update.Origin.IGP, asPathShort, BETTER_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(BETTER_MULTI_EXIT_DISC); assertThat(bgpRibIn1, hasBgpRouteEntry(bgpRouteEntry)); assertThat(waitForBgpRoute(bgpRouteEntry), notNullValue()); peer1.peerChannelHandler.closeChannel(); peer2.peerChannelHandler.closeChannel(); peer3.peerChannelHandler.closeChannel(); bgpRoutes = waitForBgpRoutes(0); assertThat(bgpRoutes, hasSize(0)); } | /**
* Tests the BGP route preference.
*/ | Tests the BGP route preference | testBgpRoutePreference | {
"repo_name": "packet-tracker/onos",
"path": "apps/routing/src/test/java/org/onosproject/routing/bgp/BgpSessionManagerTest.java",
"license": "apache-2.0",
"size": 35721
} | [
"java.util.Collection",
"java.util.LinkedList",
"org.hamcrest.Matchers",
"org.jboss.netty.buffer.ChannelBuffer",
"org.junit.Assert",
"org.onlab.packet.Ip4Prefix"
] | import java.util.Collection; import java.util.LinkedList; import org.hamcrest.Matchers; import org.jboss.netty.buffer.ChannelBuffer; import org.junit.Assert; import org.onlab.packet.Ip4Prefix; | import java.util.*; import org.hamcrest.*; import org.jboss.netty.buffer.*; import org.junit.*; import org.onlab.packet.*; | [
"java.util",
"org.hamcrest",
"org.jboss.netty",
"org.junit",
"org.onlab.packet"
] | java.util; org.hamcrest; org.jboss.netty; org.junit; org.onlab.packet; | 690,810 |
// ===================================================================================
// Generate Key
// ============
public String generateKey(String dbName, String tableName, List<Object> searchkeyList) {
final StringBuilder sb = new StringBuilder();
final String keyDelimiter = KEY_DELIMITER;
sb.append(dbName);
sb.append(keyDelimiter).append(tableName);
searchkeyList.forEach(searchKey -> {
sb.append(keyDelimiter);
sb.append(Objects.toString(searchKey, ""));
});
return sb.toString();
} | String function(String dbName, String tableName, List<Object> searchkeyList) { final StringBuilder sb = new StringBuilder(); final String keyDelimiter = KEY_DELIMITER; sb.append(dbName); sb.append(keyDelimiter).append(tableName); searchkeyList.forEach(searchKey -> { sb.append(keyDelimiter); sb.append(Objects.toString(searchKey, "")); }); return sb.toString(); } | /**
* Generate a key-string for KVS to store a value by concatenating information of the value.
* e.g.) "DB name|table name|sercheKeyList(0)|searchKeyList(1)|..."
* @param dbName The name of DB to connect to
* @param tableName The name of table in RDB that contains the rows stored into KVS (NotNull)
* @param searchkeyList A list of keys for search (NotNull)
* @return A key-string used to store data into KVS (NotNull)
*/ | Generate a key-string for KVS to store a value by concatenating information of the value. e.g.) "DB name|table name|sercheKeyList(0)|searchKeyList(1)|..." | generateKey | {
"repo_name": "dbflute-example/dbflute-example-with-non-rdb",
"path": "src/main/java/org/dbflute/kvs/store/facade/AbstractKvsStoreHashFacade.java",
"license": "apache-2.0",
"size": 8658
} | [
"java.util.List",
"java.util.Objects"
] | import java.util.List; import java.util.Objects; | import java.util.*; | [
"java.util"
] | java.util; | 1,177,189 |
public void desactivar(Producto producto) throws ControllerException {
try {
PersistenceContext.getInstance().beginTransaction();
ServiceFactory.getProductoService().desactivar(producto);
PersistenceContext.getInstance().commit();
AppContext.getInstance().getProductoObserver().objectUpdated(producto);
} catch (ServiceException e) {
try {
PersistenceContext.getInstance().rollback();
} catch (PersistenceContextException e1) {
throw new ControllerException( e1.getMessage() );
}
throw new ControllerException( e.getMessage() );
} catch (PersistenceContextException e) {
throw new ControllerException( e.getMessage() );
}
}
| void function(Producto producto) throws ControllerException { try { PersistenceContext.getInstance().beginTransaction(); ServiceFactory.getProductoService().desactivar(producto); PersistenceContext.getInstance().commit(); AppContext.getInstance().getProductoObserver().objectUpdated(producto); } catch (ServiceException e) { try { PersistenceContext.getInstance().rollback(); } catch (PersistenceContextException e1) { throw new ControllerException( e1.getMessage() ); } throw new ControllerException( e.getMessage() ); } catch (PersistenceContextException e) { throw new ControllerException( e.getMessage() ); } } | /**
* se desactiva un producto.
*/ | se desactiva un producto | desactivar | {
"repo_name": "iriber/miGestionUI",
"path": "src/main/java/com/migestion/ui/service/UIProductoService.java",
"license": "gpl-2.0",
"size": 7616
} | [
"com.migestion.dao.PersistenceContext",
"com.migestion.dao.exception.PersistenceContextException",
"com.migestion.model.Producto",
"com.migestion.services.ServiceFactory",
"com.migestion.services.exception.ServiceException",
"com.migestion.swing.controller.exception.ControllerException",
"com.migestion.ui.context.AppContext"
] | import com.migestion.dao.PersistenceContext; import com.migestion.dao.exception.PersistenceContextException; import com.migestion.model.Producto; import com.migestion.services.ServiceFactory; import com.migestion.services.exception.ServiceException; import com.migestion.swing.controller.exception.ControllerException; import com.migestion.ui.context.AppContext; | import com.migestion.dao.*; import com.migestion.dao.exception.*; import com.migestion.model.*; import com.migestion.services.*; import com.migestion.services.exception.*; import com.migestion.swing.controller.exception.*; import com.migestion.ui.context.*; | [
"com.migestion.dao",
"com.migestion.model",
"com.migestion.services",
"com.migestion.swing",
"com.migestion.ui"
] | com.migestion.dao; com.migestion.model; com.migestion.services; com.migestion.swing; com.migestion.ui; | 1,117,336 |
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")
@VisibleForTesting
static void configureCompression(
HTable table, Configuration conf) throws IOException {
StringBuilder compressionConfigValue = new StringBuilder();
HTableDescriptor tableDescriptor = table.getTableDescriptor();
if(tableDescriptor == null){
// could happen with mock table instance
return;
}
Collection<HColumnDescriptor> families = tableDescriptor.getFamilies();
int i = 0;
for (HColumnDescriptor familyDescriptor : families) {
if (i++ > 0) {
compressionConfigValue.append('&');
}
compressionConfigValue.append(URLEncoder.encode(
familyDescriptor.getNameAsString(), "UTF-8"));
compressionConfigValue.append('=');
compressionConfigValue.append(URLEncoder.encode(
familyDescriptor.getCompression().getName(), "UTF-8"));
}
// Get rid of the last ampersand
conf.set(COMPRESSION_FAMILIES_CONF_KEY, compressionConfigValue.toString());
} | @edu.umd.cs.findbugs.annotations.SuppressWarnings( value=STR) static void configureCompression( HTable table, Configuration conf) throws IOException { StringBuilder compressionConfigValue = new StringBuilder(); HTableDescriptor tableDescriptor = table.getTableDescriptor(); if(tableDescriptor == null){ return; } Collection<HColumnDescriptor> families = tableDescriptor.getFamilies(); int i = 0; for (HColumnDescriptor familyDescriptor : families) { if (i++ > 0) { compressionConfigValue.append('&'); } compressionConfigValue.append(URLEncoder.encode( familyDescriptor.getNameAsString(), "UTF-8")); compressionConfigValue.append('='); compressionConfigValue.append(URLEncoder.encode( familyDescriptor.getCompression().getName(), "UTF-8")); } conf.set(COMPRESSION_FAMILIES_CONF_KEY, compressionConfigValue.toString()); } | /**
* Serialize column family to compression algorithm map to configuration.
* Invoked while configuring the MR job for incremental load.
*
* @param table to read the properties from
* @param conf to persist serialized values into
* @throws IOException
* on failure to read column family descriptors
*/ | Serialize column family to compression algorithm map to configuration. Invoked while configuring the MR job for incremental load | configureCompression | {
"repo_name": "throughsky/lywebank",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java",
"license": "apache-2.0",
"size": 27700
} | [
"java.io.IOException",
"java.net.URLEncoder",
"java.util.Collection",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.hbase.HColumnDescriptor",
"org.apache.hadoop.hbase.HTableDescriptor",
"org.apache.hadoop.hbase.client.HTable"
] | import java.io.IOException; import java.net.URLEncoder; import java.util.Collection; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.HTable; | import java.io.*; import java.net.*; import java.util.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.*; | [
"java.io",
"java.net",
"java.util",
"org.apache.hadoop"
] | java.io; java.net; java.util; org.apache.hadoop; | 942,888 |
default void addListener(AtomicMapEventListener<K, V> listener) {
addListener(listener, MoreExecutors.directExecutor());
} | default void addListener(AtomicMapEventListener<K, V> listener) { addListener(listener, MoreExecutors.directExecutor()); } | /**
* Registers the specified listener to be notified whenever the map is updated.
*
* @param listener listener to notify about map events
*/ | Registers the specified listener to be notified whenever the map is updated | addListener | {
"repo_name": "atomix/atomix",
"path": "core/src/main/java/io/atomix/core/map/AtomicMap.java",
"license": "apache-2.0",
"size": 16612
} | [
"com.google.common.util.concurrent.MoreExecutors"
] | import com.google.common.util.concurrent.MoreExecutors; | import com.google.common.util.concurrent.*; | [
"com.google.common"
] | com.google.common; | 219,484 |
public void setCacheManager(CacheManager cacheManager) {
this._cacheManager = cacheManager;
} | void function(CacheManager cacheManager) { this._cacheManager = cacheManager; } | /**
* Sets the cache manager.
* @param cacheManager the new value of the property
*/ | Sets the cache manager | setCacheManager | {
"repo_name": "codeaudit/OG-Platform",
"path": "projects/OG-Component/src/main/java/com/opengamma/component/factory/source/UserFinancialPositionSourceComponentFactory.java",
"license": "apache-2.0",
"size": 28227
} | [
"net.sf.ehcache.CacheManager"
] | import net.sf.ehcache.CacheManager; | import net.sf.ehcache.*; | [
"net.sf.ehcache"
] | net.sf.ehcache; | 531,695 |
java.lang.Object readAny(InputStream in); | java.lang.Object readAny(InputStream in); | /**
* Delegation call for {@link Util#readAny}.
*/ | Delegation call for <code>Util#readAny</code> | readAny | {
"repo_name": "TheTypoMaster/Scaper",
"path": "openjdk/corba/src/share/classes/javax/rmi/CORBA/UtilDelegate.java",
"license": "gpl-2.0",
"size": 4060
} | [
"org.omg.CORBA"
] | import org.omg.CORBA; | import org.omg.*; | [
"org.omg"
] | org.omg; | 112,012 |
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedFlux<DeploymentExtendedInner> listAsync(String filter, Integer top); | @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<DeploymentExtendedInner> listAsync(String filter, Integer top); | /**
* Get all the deployments for a subscription.
*
* @param filter The filter to apply on the operation. For example, you can use $filter=provisioningState eq
* '{state}'.
* @param top The number of results to get. If null is passed, returns all deployments.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all the deployments for a subscription as paginated response with {@link PagedFlux}.
*/ | Get all the deployments for a subscription | listAsync | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-resources/src/main/java/com/azure/resourcemanager/resources/fluent/DeploymentsClient.java",
"license": "mit",
"size": 218889
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.PagedFlux",
"com.azure.resourcemanager.resources.fluent.models.DeploymentExtendedInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedFlux; import com.azure.resourcemanager.resources.fluent.models.DeploymentExtendedInner; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.resources.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 217,552 |
@POST
@Path("bulk")
@Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
@Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
BulkActionResult bulk(@NotNull BulkAction bulkAction); | @Path("bulk") @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON }) @Consumes({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON }) BulkActionResult bulk(@NotNull BulkAction bulkAction); | /**
* Executes the provided bulk action.
*
* @param bulkAction list of connector instance ids against which the bulk action will be performed.
* @return Bulk action result
*/ | Executes the provided bulk action | bulk | {
"repo_name": "massx1/syncope",
"path": "common/rest-api/src/main/java/org/apache/syncope/common/rest/api/service/ConnectorService.java",
"license": "apache-2.0",
"size": 8193
} | [
"javax.validation.constraints.NotNull",
"javax.ws.rs.Consumes",
"javax.ws.rs.Path",
"javax.ws.rs.Produces",
"javax.ws.rs.core.MediaType",
"org.apache.syncope.common.lib.to.BulkAction",
"org.apache.syncope.common.lib.to.BulkActionResult"
] | import javax.validation.constraints.NotNull; import javax.ws.rs.Consumes; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import org.apache.syncope.common.lib.to.BulkAction; import org.apache.syncope.common.lib.to.BulkActionResult; | import javax.validation.constraints.*; import javax.ws.rs.*; import javax.ws.rs.core.*; import org.apache.syncope.common.lib.to.*; | [
"javax.validation",
"javax.ws",
"org.apache.syncope"
] | javax.validation; javax.ws; org.apache.syncope; | 1,441,193 |
public void testLoggerMarshalling() throws Exception {
GridMarshallerTestBean inBean = newTestBean(grid().log());
byte[] buf = marshal(inBean);
GridMarshallerTestBean outBean = unmarshal(buf);
assert inBean.getObjectField() != null;
assert outBean.getObjectField() != null;
assert IgniteLogger.class.isAssignableFrom(inBean.getObjectField().getClass());
assert IgniteLogger.class.isAssignableFrom(outBean.getObjectField().getClass());
assert inBean != outBean;
assert inBean.equals(outBean);
outBean.checkNullResources();
} | void function() throws Exception { GridMarshallerTestBean inBean = newTestBean(grid().log()); byte[] buf = marshal(inBean); GridMarshallerTestBean outBean = unmarshal(buf); assert inBean.getObjectField() != null; assert outBean.getObjectField() != null; assert IgniteLogger.class.isAssignableFrom(inBean.getObjectField().getClass()); assert IgniteLogger.class.isAssignableFrom(outBean.getObjectField().getClass()); assert inBean != outBean; assert inBean.equals(outBean); outBean.checkNullResources(); } | /**
* Tests marshal {@link org.apache.ignite.IgniteLogger} instance.
*
* @throws Exception If test failed.
*/ | Tests marshal <code>org.apache.ignite.IgniteLogger</code> instance | testLoggerMarshalling | {
"repo_name": "pperalta/ignite",
"path": "modules/core/src/test/java/org/apache/ignite/marshaller/GridMarshallerAbstractTest.java",
"license": "apache-2.0",
"size": 29986
} | [
"org.apache.ignite.IgniteLogger"
] | import org.apache.ignite.IgniteLogger; | import org.apache.ignite.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 1,943,752 |
public static void storeMessage(Message message) {
Timber.d("Storing persistent message");
sStoredMessage = message;
} | static void function(Message message) { Timber.d(STR); sStoredMessage = message; } | /**
* Store a persistent message to static variable
* @param message Message to store
*/ | Store a persistent message to static variable | storeMessage | {
"repo_name": "John0079/Anki-Android-2.5.4",
"path": "AnkiDroid/src/main/java/com/ichi2/anki/dialogs/DialogHandler.java",
"license": "gpl-3.0",
"size": 6034
} | [
"android.os.Message"
] | import android.os.Message; | import android.os.*; | [
"android.os"
] | android.os; | 658,099 |
public void login(String username, String password) throws InterruptedException
{
loginFrame.setInfo("Please try again.");
try
{
Connection.Response res = Jsoup.connect("https://leho.howest.be/main/ssl/index.php").data(new String[] { "login", username, "password", password, "submitAuth", "OK", "_qf__formLogin", "" }).method(Connection.Method.POST).execute();
if (res.parse().select("#login_fail").size() > 0) {
this.isLoggedIn = false;
loginFrame.setInfo("Incorrect login.");
} else {
this.isLoggedIn = true;
loginFrame.setInfo("Correct login.");
config.setCookies(res.cookies());
loginFrame.dispose();
LehoAnnounceHandler.getInstance(config);
}
} catch (IOException e) {
this.isLoggedIn = false;
}
}
| void function(String username, String password) throws InterruptedException { loginFrame.setInfo(STR); try { Connection.Response res = Jsoup.connect(STR#login_failSTRIncorrect login.STRCorrect login."); config.setCookies(res.cookies()); loginFrame.dispose(); LehoAnnounceHandler.getInstance(config); } } catch (IOException e) { this.isLoggedIn = false; } } | /**
* author: Nathan Desmet & Niels Gunst
* @param username
* @param password
* @throws java.lang.InterruptedException
*/ | author: Nathan Desmet & Niels Gunst | login | {
"repo_name": "ShahNami/LehoAnnounce",
"path": "src/model/LehoLoginHandler.java",
"license": "gpl-2.0",
"size": 2200
} | [
"java.io.IOException",
"org.jsoup.Connection",
"org.jsoup.Jsoup"
] | import java.io.IOException; import org.jsoup.Connection; import org.jsoup.Jsoup; | import java.io.*; import org.jsoup.*; | [
"java.io",
"org.jsoup"
] | java.io; org.jsoup; | 2,373,481 |
private Color computeColor() {
if ( field == null ) {
return defaultColor;
}
final Object o = getDataRow().get( field );
if ( o instanceof Number == false ) {
return defaultColor;
}
final Number n = (Number) o;
final Number value;
if ( useAbsoluteValue ) {
if ( n instanceof BigDecimal ) {
final BigDecimal td = (BigDecimal) n;
value = td.abs();
} else {
final BigDecimal td = new BigDecimal( n.toString() );
value = td.abs();
}
} else {
value = n;
}
if ( lightDefArray == null ) {
lightDefArray = limits.toArray( new LightDefinition[limits.size()] );
Arrays.sort( lightDefArray );
}
if ( useOppositeLogic ) {
// Inverse logic. The first interval ranging from '-INF' to the first limit will use the
// first color. If the value is in the range 'limit[i]' and 'limit[i+1]', the color[i+1]
// will be used. If the value is greater than the last limit, the default color is used.
if ( limits.isEmpty() ) {
return defaultColor;
}
Color returnColor = defaultColor;
for ( int i = lightDefArray.length - 1; i >= 0; i-- ) {
final LightDefinition definition = lightDefArray[i];
if ( definition == null ) {
continue;
}
final Number limit = definition.getLimit();
if ( limit == null ) {
continue;
}
if ( value.doubleValue() < limit.doubleValue() ) {
returnColor = definition.getColor();
}
}
if ( returnColor == null ) {
return defaultColor;
}
return returnColor;
} else {
// Standard logic. The first interval from '-INF' to the first limit uses the default color.
// from there, the color for the first limit that is greater than the given value is used.
// For the interval ranging from the last limit to '+INF', the last color is used.
// If there are no limits defined, the default color is always used.
Color returnColor = defaultColor;
for ( int i = 0; i < lightDefArray.length; i++ ) {
final LightDefinition definition = lightDefArray[i];
if ( definition == null ) {
continue;
}
final Number limit = definition.getLimit();
if ( limit == null ) {
continue;
}
if ( value.doubleValue() >= limit.doubleValue() ) {
returnColor = definition.getColor();
}
}
if ( returnColor == null ) {
return defaultColor;
}
return returnColor;
}
} | Color function() { if ( field == null ) { return defaultColor; } final Object o = getDataRow().get( field ); if ( o instanceof Number == false ) { return defaultColor; } final Number n = (Number) o; final Number value; if ( useAbsoluteValue ) { if ( n instanceof BigDecimal ) { final BigDecimal td = (BigDecimal) n; value = td.abs(); } else { final BigDecimal td = new BigDecimal( n.toString() ); value = td.abs(); } } else { value = n; } if ( lightDefArray == null ) { lightDefArray = limits.toArray( new LightDefinition[limits.size()] ); Arrays.sort( lightDefArray ); } if ( useOppositeLogic ) { if ( limits.isEmpty() ) { return defaultColor; } Color returnColor = defaultColor; for ( int i = lightDefArray.length - 1; i >= 0; i-- ) { final LightDefinition definition = lightDefArray[i]; if ( definition == null ) { continue; } final Number limit = definition.getLimit(); if ( limit == null ) { continue; } if ( value.doubleValue() < limit.doubleValue() ) { returnColor = definition.getColor(); } } if ( returnColor == null ) { return defaultColor; } return returnColor; } else { Color returnColor = defaultColor; for ( int i = 0; i < lightDefArray.length; i++ ) { final LightDefinition definition = lightDefArray[i]; if ( definition == null ) { continue; } final Number limit = definition.getLimit(); if ( limit == null ) { continue; } if ( value.doubleValue() >= limit.doubleValue() ) { returnColor = definition.getColor(); } } if ( returnColor == null ) { return defaultColor; } return returnColor; } } | /**
* Computes the color that corresponds to the LightDefinition entry for which the limits match the value read from
* field.
*
* @return the computed color.
*/ | Computes the color that corresponds to the LightDefinition entry for which the limits match the value read from field | computeColor | {
"repo_name": "EgorZhuk/pentaho-reporting",
"path": "engine/core/src/main/java/org/pentaho/reporting/engine/classic/core/function/ElementTrafficLightFunction.java",
"license": "lgpl-2.1",
"size": 19461
} | [
"java.awt.Color",
"java.math.BigDecimal",
"java.util.Arrays"
] | import java.awt.Color; import java.math.BigDecimal; import java.util.Arrays; | import java.awt.*; import java.math.*; import java.util.*; | [
"java.awt",
"java.math",
"java.util"
] | java.awt; java.math; java.util; | 14,230 |
public LogisticRegressionModel trainWithLBFGS(JavaRDD<LabeledPoint> trainingDataset, String regularizationType,
int noOfClasses) {
LogisticRegressionWithLBFGS lbfgs = new LogisticRegressionWithLBFGS();
if (MLConstants.L1.equals(regularizationType)) {
lbfgs.optimizer().setUpdater(new L1Updater());
} else if (MLConstants.L2.equals(regularizationType)) {
lbfgs.optimizer().setUpdater(new SquaredL2Updater());
}
lbfgs.setIntercept(true);
return lbfgs.setNumClasses(noOfClasses < 2 ? 2 : noOfClasses).run(trainingDataset.rdd());
} | LogisticRegressionModel function(JavaRDD<LabeledPoint> trainingDataset, String regularizationType, int noOfClasses) { LogisticRegressionWithLBFGS lbfgs = new LogisticRegressionWithLBFGS(); if (MLConstants.L1.equals(regularizationType)) { lbfgs.optimizer().setUpdater(new L1Updater()); } else if (MLConstants.L2.equals(regularizationType)) { lbfgs.optimizer().setUpdater(new SquaredL2Updater()); } lbfgs.setIntercept(true); return lbfgs.setNumClasses(noOfClasses < 2 ? 2 : noOfClasses).run(trainingDataset.rdd()); } | /**
* This method uses LBFGS optimizer to train a logistic regression model for a given dataset
*
* @param trainingDataset Training dataset as a JavaRDD of labeled points
* @param noOfClasses No of classes
* @param regularizationType Regularization type
* @return Logistic regression model
*/ | This method uses LBFGS optimizer to train a logistic regression model for a given dataset | trainWithLBFGS | {
"repo_name": "danula/carbon-ml",
"path": "components/ml/org.wso2.carbon.ml.core/src/main/java/org/wso2/carbon/ml/core/spark/algorithms/LogisticRegression.java",
"license": "apache-2.0",
"size": 4954
} | [
"org.apache.spark.api.java.JavaRDD",
"org.apache.spark.mllib.classification.LogisticRegressionModel",
"org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS",
"org.apache.spark.mllib.optimization.L1Updater",
"org.apache.spark.mllib.optimization.SquaredL2Updater",
"org.apache.spark.mllib.regression.LabeledPoint",
"org.wso2.carbon.ml.commons.constants.MLConstants"
] | import org.apache.spark.api.java.JavaRDD; import org.apache.spark.mllib.classification.LogisticRegressionModel; import org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS; import org.apache.spark.mllib.optimization.L1Updater; import org.apache.spark.mllib.optimization.SquaredL2Updater; import org.apache.spark.mllib.regression.LabeledPoint; import org.wso2.carbon.ml.commons.constants.MLConstants; | import org.apache.spark.api.java.*; import org.apache.spark.mllib.classification.*; import org.apache.spark.mllib.optimization.*; import org.apache.spark.mllib.regression.*; import org.wso2.carbon.ml.commons.constants.*; | [
"org.apache.spark",
"org.wso2.carbon"
] | org.apache.spark; org.wso2.carbon; | 1,821,807 |
private PrintStream getStream(int level)
{
switch (level)
{
case LOG_ERROR:
System.err.print("ERROR: ");
return System.err;
case LOG_WARNING:
System.err.print("WARNING: ");
return System.err;
case LOG_INFO:
System.out.print("INFO: ");
return System.out;
case LOG_DEBUG:
System.out.print("DEBUG: ");
return System.out;
default:
System.out.print("UNKNOWN: ");
return System.out;
}
} | PrintStream function(int level) { switch (level) { case LOG_ERROR: System.err.print(STR); return System.err; case LOG_WARNING: System.err.print(STR); return System.err; case LOG_INFO: System.out.print(STR); return System.out; case LOG_DEBUG: System.out.print(STR); return System.out; default: System.out.print(STR); return System.out; } } | /**
* Return the standard print streams to use depending on log level.
*
* @param level log level
* @return print stream corresponding to log level
*/ | Return the standard print streams to use depending on log level | getStream | {
"repo_name": "boneman1231/org.apache.felix",
"path": "trunk/utils/src/main/java/org/apache/felix/utils/log/Logger.java",
"license": "apache-2.0",
"size": 4204
} | [
"java.io.PrintStream"
] | import java.io.PrintStream; | import java.io.*; | [
"java.io"
] | java.io; | 1,557,439 |
protected synchronized boolean isOnline() {
Context context = mApplicationReference.get();
if (context != null) {
ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
if (connectivityManager != null) {
NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo();
return activeNetworkInfo != null && activeNetworkInfo.isConnected();
}
}
return false;
} | synchronized boolean function() { Context context = mApplicationReference.get(); if (context != null) { ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); if (connectivityManager != null) { NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo(); return activeNetworkInfo != null && activeNetworkInfo.isConnected(); } } return false; } | /**
* Returns true if an Internet connection is detected.
* @return whether Internet connection exists
*/ | Returns true if an Internet connection is detected | isOnline | {
"repo_name": "MobileAppTracking/sdk-release",
"path": "sdk-android/TuneMarketingConsoleSDK/src/main/java/com/tune/TuneInternal.java",
"license": "gpl-3.0",
"size": 38933
} | [
"android.content.Context",
"android.net.ConnectivityManager",
"android.net.NetworkInfo"
] | import android.content.Context; import android.net.ConnectivityManager; import android.net.NetworkInfo; | import android.content.*; import android.net.*; | [
"android.content",
"android.net"
] | android.content; android.net; | 2,329,828 |
@Override
public DataByteArray getValue() {
if (accumSketch_ != null) {
return new DataByteArray(accumSketch_.toByteArray());
}
// return empty sketch
return new DataByteArray(new KllFloatsSketch(k_).toByteArray());
} | DataByteArray function() { if (accumSketch_ != null) { return new DataByteArray(accumSketch_.toByteArray()); } return new DataByteArray(new KllFloatsSketch(k_).toByteArray()); } | /**
* Returns the result that has been built up by multiple calls to {@link #accumulate}.
*
* @return serialized sketch
* @see "org.apache.pig.Accumulator.getValue()"
*/ | Returns the result that has been built up by multiple calls to <code>#accumulate</code> | getValue | {
"repo_name": "DataSketches/sketches-pig",
"path": "src/main/java/org/apache/datasketches/pig/kll/DataToSketch.java",
"license": "apache-2.0",
"size": 11535
} | [
"org.apache.datasketches.kll.KllFloatsSketch",
"org.apache.pig.data.DataByteArray"
] | import org.apache.datasketches.kll.KllFloatsSketch; import org.apache.pig.data.DataByteArray; | import org.apache.datasketches.kll.*; import org.apache.pig.data.*; | [
"org.apache.datasketches",
"org.apache.pig"
] | org.apache.datasketches; org.apache.pig; | 2,254,939 |
public static HiveTablePartition ofTable(
HiveConf hiveConf, @Nullable String hiveVersion, String dbName, String tableName) {
HiveShim hiveShim = getHiveShim(hiveVersion);
try (HiveMetastoreClientWrapper client =
new HiveMetastoreClientWrapper(hiveConf, hiveShim)) {
Table hiveTable = client.getTable(dbName, tableName);
return new HiveTablePartition(
hiveTable.getSd(), HiveReflectionUtils.getTableMetadata(hiveShim, hiveTable));
} catch (TException e) {
throw new FlinkHiveException(
String.format(
"Failed to create HiveTablePartition for hive table %s.%s",
dbName, tableName),
e);
}
} | static HiveTablePartition function( HiveConf hiveConf, @Nullable String hiveVersion, String dbName, String tableName) { HiveShim hiveShim = getHiveShim(hiveVersion); try (HiveMetastoreClientWrapper client = new HiveMetastoreClientWrapper(hiveConf, hiveShim)) { Table hiveTable = client.getTable(dbName, tableName); return new HiveTablePartition( hiveTable.getSd(), HiveReflectionUtils.getTableMetadata(hiveShim, hiveTable)); } catch (TException e) { throw new FlinkHiveException( String.format( STR, dbName, tableName), e); } } | /**
* Creates a HiveTablePartition to represent a hive table.
*
* @param hiveConf the HiveConf used to connect to HMS
* @param hiveVersion the version of hive in use, if it's null the version will be automatically
* detected
* @param dbName name of the database
* @param tableName name of the table
*/ | Creates a HiveTablePartition to represent a hive table | ofTable | {
"repo_name": "apache/flink",
"path": "flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/connectors/hive/HiveTablePartition.java",
"license": "apache-2.0",
"size": 8266
} | [
"javax.annotation.Nullable",
"org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper",
"org.apache.flink.table.catalog.hive.client.HiveShim",
"org.apache.flink.table.catalog.hive.util.HiveReflectionUtils",
"org.apache.hadoop.hive.conf.HiveConf",
"org.apache.hadoop.hive.metastore.api.Table",
"org.apache.thrift.TException"
] | import javax.annotation.Nullable; import org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper; import org.apache.flink.table.catalog.hive.client.HiveShim; import org.apache.flink.table.catalog.hive.util.HiveReflectionUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.thrift.TException; | import javax.annotation.*; import org.apache.flink.table.catalog.hive.client.*; import org.apache.flink.table.catalog.hive.util.*; import org.apache.hadoop.hive.conf.*; import org.apache.hadoop.hive.metastore.api.*; import org.apache.thrift.*; | [
"javax.annotation",
"org.apache.flink",
"org.apache.hadoop",
"org.apache.thrift"
] | javax.annotation; org.apache.flink; org.apache.hadoop; org.apache.thrift; | 2,258,442 |
@Override
public IAutoEditStrategy[] getAutoEditStrategies(ISourceViewer sourceViewer, String contentType) {
return new IAutoEditStrategy[] { getPyAutoIndentStrategy(null) };
} | IAutoEditStrategy[] function(ISourceViewer sourceViewer, String contentType) { return new IAutoEditStrategy[] { getPyAutoIndentStrategy(null) }; } | /**
* Cache the result, because we'll get asked for it multiple times Now, we always return the PyAutoIndentStrategy. (even on commented lines).
*
* @return PyAutoIndentStrategy which deals with spaces/tabs
*/ | Cache the result, because we'll get asked for it multiple times Now, we always return the PyAutoIndentStrategy. (even on commented lines) | getAutoEditStrategies | {
"repo_name": "RandallDW/Aruba_plugin",
"path": "plugins/org.python.pydev/src/org/python/pydev/editor/PyEditConfigurationWithoutEditor.java",
"license": "epl-1.0",
"size": 16934
} | [
"org.eclipse.jface.text.IAutoEditStrategy",
"org.eclipse.jface.text.source.ISourceViewer"
] | import org.eclipse.jface.text.IAutoEditStrategy; import org.eclipse.jface.text.source.ISourceViewer; | import org.eclipse.jface.text.*; import org.eclipse.jface.text.source.*; | [
"org.eclipse.jface"
] | org.eclipse.jface; | 294,795 |
public int countDifferencesBetweenAnd(String first, String second) {
if (first == null) {
if (second == null) {
return 0;
} else {
first = "";
}
} else if (second == null) {
second = "";
}
LinkedList<DiffMatchPatch.Diff> diffs = getDiffs(first, second);
int diffCount = 0;
for (DiffMatchPatch.Diff diff : diffs) {
if (diff.operation != DiffMatchPatch.Operation.EQUAL) {
diffCount++;
}
}
return diffCount;
} | int function(String first, String second) { if (first == null) { if (second == null) { return 0; } else { first = STR"; } LinkedList<DiffMatchPatch.Diff> diffs = getDiffs(first, second); int diffCount = 0; for (DiffMatchPatch.Diff diff : diffs) { if (diff.operation != DiffMatchPatch.Operation.EQUAL) { diffCount++; } } return diffCount; } | /**
* Determines number of differences (substrings that are not equal) between two strings.
* @param first first string to compare.
* @param second second string to compare.
* @return number of different substrings.
*/ | Determines number of differences (substrings that are not equal) between two strings | countDifferencesBetweenAnd | {
"repo_name": "fhoeben/hsac-fitnesse-fixtures",
"path": "src/main/java/nl/hsac/fitnesse/fixture/slim/CompareFixture.java",
"license": "apache-2.0",
"size": 7267
} | [
"com.sksamuel.diffpatch.DiffMatchPatch",
"java.util.LinkedList"
] | import com.sksamuel.diffpatch.DiffMatchPatch; import java.util.LinkedList; | import com.sksamuel.diffpatch.*; import java.util.*; | [
"com.sksamuel.diffpatch",
"java.util"
] | com.sksamuel.diffpatch; java.util; | 1,519,745 |
public static File getResourcesDirectory() {
if (provider != null) {
return provider.getResourcesDirectory();
} else {
return new File("");
}
} | static File function() { if (provider != null) { return provider.getResourcesDirectory(); } else { return new File(""); } } | /**
* Gets a directory where all the content that is being created should be saved.
*
* @return The directory.
*/ | Gets a directory where all the content that is being created should be saved | getResourcesDirectory | {
"repo_name": "JKatzwinkel/settlers-remake",
"path": "jsettlers.common/src/jsettlers/common/resources/ResourceManager.java",
"license": "mit",
"size": 2696
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 1,944,371 |
protected boolean isCancelledOrInterrupted(Exchange exchange) {
boolean answer = false;
if (ExchangeHelper.isInterrupted(exchange)) {
// mark the exchange to stop continue routing when interrupted
// as we do not want to continue routing (for example a task has been cancelled)
exchange.setProperty(Exchange.ROUTE_STOP, Boolean.TRUE);
answer = true;
}
LOG.trace("Is exchangeId: {} interrupted? {}", exchange.getExchangeId(), answer);
return answer;
} | boolean function(Exchange exchange) { boolean answer = false; if (ExchangeHelper.isInterrupted(exchange)) { exchange.setProperty(Exchange.ROUTE_STOP, Boolean.TRUE); answer = true; } LOG.trace(STR, exchange.getExchangeId(), answer); return answer; } | /**
* Strategy to determine if the exchange was cancelled or interrupted
*/ | Strategy to determine if the exchange was cancelled or interrupted | isCancelledOrInterrupted | {
"repo_name": "CodeSmell/camel",
"path": "core/camel-base/src/main/java/org/apache/camel/processor/errorhandler/RedeliveryErrorHandler.java",
"license": "apache-2.0",
"size": 61537
} | [
"org.apache.camel.Exchange",
"org.apache.camel.support.ExchangeHelper"
] | import org.apache.camel.Exchange; import org.apache.camel.support.ExchangeHelper; | import org.apache.camel.*; import org.apache.camel.support.*; | [
"org.apache.camel"
] | org.apache.camel; | 6,169 |
public static String toISOString(Date date, String format, TimeZone tz) {
if (format == null) format = FORMAT_DATE_ISO;
if (tz == null) tz = TimeZone.getDefault();
DateFormat f = new SimpleDateFormat(format);
f.setTimeZone(tz);
return f.format(date);
} | static String function(Date date, String format, TimeZone tz) { if (format == null) format = FORMAT_DATE_ISO; if (tz == null) tz = TimeZone.getDefault(); DateFormat f = new SimpleDateFormat(format); f.setTimeZone(tz); return f.format(date); } | /**
* Render date
*
* @param date the date obj
* @param format - if not specified, will use FORMAT_DATE_ISO
* @param tz - tz to set to, if not specified uses local timezone
* @return the iso-formatted date string
*/ | Render date | toISOString | {
"repo_name": "TecMunky/xDrip",
"path": "app/src/main/java/com/eveningoutpost/dexdrip/Models/DateUtil.java",
"license": "gpl-3.0",
"size": 3340
} | [
"java.text.DateFormat",
"java.text.SimpleDateFormat",
"java.util.Date",
"java.util.TimeZone"
] | import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.TimeZone; | import java.text.*; import java.util.*; | [
"java.text",
"java.util"
] | java.text; java.util; | 1,284,428 |
@RequestMapping(value = "/logout")
@ResponseBody
public final String appLogout(final HttpSession sc, final HttpServletRequest request) {
getLog().info("App Logout");
SecurityContextHolder.clearContext();
request.getSession(false).invalidate();
return "{ \"loggedout\": true }";
} | @RequestMapping(value = STR) final String function(final HttpSession sc, final HttpServletRequest request) { getLog().info(STR); SecurityContextHolder.clearContext(); request.getSession(false).invalidate(); return STRloggedout\STR; } | /**
* Logs out the user from the app.
*
* @return JSON
*/ | Logs out the user from the app | appLogout | {
"repo_name": "MinHalsoplan/netcare-healthplan",
"path": "netcare-web/src/main/java/org/callistasoftware/netcare/web/mobile/controller/MobileController.java",
"license": "agpl-3.0",
"size": 3187
} | [
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpSession",
"org.springframework.security.core.context.SecurityContextHolder",
"org.springframework.web.bind.annotation.RequestMapping"
] | import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.web.bind.annotation.RequestMapping; | import javax.servlet.http.*; import org.springframework.security.core.context.*; import org.springframework.web.bind.annotation.*; | [
"javax.servlet",
"org.springframework.security",
"org.springframework.web"
] | javax.servlet; org.springframework.security; org.springframework.web; | 715,814 |
@Override
public Optional<Concept> getAssociation(String code, Terminology terminology, IncludeParam ip)
throws JsonMappingException, JsonParseException, IOException {
List<Concept> associations = getAssociations(terminology, ip);
return associations.stream().filter(a -> a.getCode().equals(code)).findFirst();
} | Optional<Concept> function(String code, Terminology terminology, IncludeParam ip) throws JsonMappingException, JsonParseException, IOException { List<Concept> associations = getAssociations(terminology, ip); return associations.stream().filter(a -> a.getCode().equals(code)).findFirst(); } | /**
* see superclass *.
*
* @param code the code
* @param terminology the terminology
* @param ip the ip
* @return the association
* @throws JsonMappingException the json mapping exception
* @throws JsonParseException the json parse exception
* @throws IOException Signals that an I/O exception has occurred.
*/ | see superclass * | getAssociation | {
"repo_name": "NCIEVS/evsrestapi",
"path": "src/main/java/gov/nih/nci/evs/api/service/ElasticQueryServiceImpl.java",
"license": "bsd-3-clause",
"size": 28163
} | [
"com.fasterxml.jackson.core.JsonParseException",
"com.fasterxml.jackson.databind.JsonMappingException",
"gov.nih.nci.evs.api.model.Concept",
"gov.nih.nci.evs.api.model.IncludeParam",
"gov.nih.nci.evs.api.model.Terminology",
"java.io.IOException",
"java.util.List",
"java.util.Optional"
] | import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; import gov.nih.nci.evs.api.model.Concept; import gov.nih.nci.evs.api.model.IncludeParam; import gov.nih.nci.evs.api.model.Terminology; import java.io.IOException; import java.util.List; import java.util.Optional; | import com.fasterxml.jackson.core.*; import com.fasterxml.jackson.databind.*; import gov.nih.nci.evs.api.model.*; import java.io.*; import java.util.*; | [
"com.fasterxml.jackson",
"gov.nih.nci",
"java.io",
"java.util"
] | com.fasterxml.jackson; gov.nih.nci; java.io; java.util; | 859,951 |
@Test
public void testReadWriteBytes() throws Exception {
long start = System.currentTimeMillis();
File file1 = system.createFile("testFile1");
assertEquals(0, file1.getLength());
OutputStream outputStream1 = file1.getOutputStream();
// Write some random data. Make sure it fills several chunks
outputStream1.write(2);
byte[] data = new byte[LARGE_CHUNK];
rand.nextBytes(data);
outputStream1.write(data);
outputStream1.write(44);
outputStream1.close();
assertEquals(2 + LARGE_CHUNK, file1.getLength());
assertTrue(file1.getModified() >= start);
// Append to the file with a new outputstream
OutputStream outputStream2 = file1.getOutputStream();
outputStream2.write(123);
byte[] data2 = new byte[SMALL_CHUNK];
rand.nextBytes(data2);
outputStream2.write(data2);
outputStream2.close();
assertEquals(3 + LARGE_CHUNK + SMALL_CHUNK, file1.getLength());
// Make sure we can read all of the data back and it matches
InputStream is = file1.getInputStream();
assertEquals(2, is.read());
byte[] resultData = new byte[LARGE_CHUNK];
assertEquals(LARGE_CHUNK, is.read(resultData));
assertArrayEquals(data, resultData);
assertEquals(44, is.read());
assertEquals(123, is.read());
// Test read to an offset
Arrays.fill(resultData, (byte) 0);
assertEquals(SMALL_CHUNK, is.read(resultData, 50, SMALL_CHUNK));
// Make sure the data read matches
byte[] expectedData = new byte[LARGE_CHUNK];
Arrays.fill(expectedData, (byte) 0);
System.arraycopy(data2, 0, expectedData, 50, data2.length);
assertArrayEquals(expectedData, resultData);
assertEquals(-1, is.read());
assertEquals(-1, is.read(data));
is.close();
// Test the skip interface
is = file1.getInputStream();
is.skip(LARGE_CHUNK + 3);
Arrays.fill(resultData, (byte) 0);
assertEquals(SMALL_CHUNK, is.read(resultData));
Arrays.fill(expectedData, (byte) 0);
System.arraycopy(data2, 0, expectedData, 0, data2.length);
assertArrayEquals(expectedData, resultData);
assertEquals(-1, is.read());
} | void function() throws Exception { long start = System.currentTimeMillis(); File file1 = system.createFile(STR); assertEquals(0, file1.getLength()); OutputStream outputStream1 = file1.getOutputStream(); outputStream1.write(2); byte[] data = new byte[LARGE_CHUNK]; rand.nextBytes(data); outputStream1.write(data); outputStream1.write(44); outputStream1.close(); assertEquals(2 + LARGE_CHUNK, file1.getLength()); assertTrue(file1.getModified() >= start); OutputStream outputStream2 = file1.getOutputStream(); outputStream2.write(123); byte[] data2 = new byte[SMALL_CHUNK]; rand.nextBytes(data2); outputStream2.write(data2); outputStream2.close(); assertEquals(3 + LARGE_CHUNK + SMALL_CHUNK, file1.getLength()); InputStream is = file1.getInputStream(); assertEquals(2, is.read()); byte[] resultData = new byte[LARGE_CHUNK]; assertEquals(LARGE_CHUNK, is.read(resultData)); assertArrayEquals(data, resultData); assertEquals(44, is.read()); assertEquals(123, is.read()); Arrays.fill(resultData, (byte) 0); assertEquals(SMALL_CHUNK, is.read(resultData, 50, SMALL_CHUNK)); byte[] expectedData = new byte[LARGE_CHUNK]; Arrays.fill(expectedData, (byte) 0); System.arraycopy(data2, 0, expectedData, 50, data2.length); assertArrayEquals(expectedData, resultData); assertEquals(-1, is.read()); assertEquals(-1, is.read(data)); is.close(); is = file1.getInputStream(); is.skip(LARGE_CHUNK + 3); Arrays.fill(resultData, (byte) 0); assertEquals(SMALL_CHUNK, is.read(resultData)); Arrays.fill(expectedData, (byte) 0); System.arraycopy(data2, 0, expectedData, 0, data2.length); assertArrayEquals(expectedData, resultData); assertEquals(-1, is.read()); } | /**
* A test of reading and writing to a file.
*/ | A test of reading and writing to a file | testReadWriteBytes | {
"repo_name": "smgoller/geode",
"path": "geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/filesystem/FileSystemJUnitTest.java",
"license": "apache-2.0",
"size": 18891
} | [
"java.io.InputStream",
"java.io.OutputStream",
"java.util.Arrays",
"org.junit.Assert"
] | import java.io.InputStream; import java.io.OutputStream; import java.util.Arrays; import org.junit.Assert; | import java.io.*; import java.util.*; import org.junit.*; | [
"java.io",
"java.util",
"org.junit"
] | java.io; java.util; org.junit; | 1,186,006 |
// * @return - Arraylist< P >
public ArrayList< P > getOrderedNearestNeighboringPoints(){ return neighbors; } | public ArrayList< P > getOrderedNearestNeighboringPoints(){ return neighbors; } | /**
* Returns a certain nearest neighbor (relative to the basis point) of this {@link AbstractPointDescriptor}
*
* @param index - the index (0 means first nearest neighbor)
* @return the {@link Point} instance
*/ | Returns a certain nearest neighbor (relative to the basis point) of this <code>AbstractPointDescriptor</code> | getDescriptorPoint | {
"repo_name": "fiji/SPIM_Registration",
"path": "src/main/java/mpicbg/pointdescriptor/AbstractPointDescriptor.java",
"license": "gpl-2.0",
"size": 9343
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 1,110,544 |
public T getFirst() throws DataConnectionException {
List<T> items = getList();
return items.size() > 0 ? items.get(0) : null;
} | T function() throws DataConnectionException { List<T> items = getList(); return items.size() > 0 ? items.get(0) : null; } | /**
* A convenience method to return the first object of a given query which would normally be returned by {@link SqlExecutor#getList}.
* @return An object of type T that is the result of querying the database.
* @throws DataConnectionException
*/ | A convenience method to return the first object of a given query which would normally be returned by <code>SqlExecutor#getList</code> | getFirst | {
"repo_name": "njkremer/SqliteORM",
"path": "src/com/njkremer/Sqlite/SqlExecutor.java",
"license": "apache-2.0",
"size": 44225
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 243,709 |
public void addDvPrePaidRegistrantLine(DisbursementVoucherPreConferenceRegistrant line) {
line.setFinancialDocumentLineNumber(getFinDocNextRegistrantLineNbr());
this.getDvPreConferenceDetail().getDvPreConferenceRegistrants().add(line);
this.finDocNextRegistrantLineNbr = new Integer(getFinDocNextRegistrantLineNbr().intValue() + 1);
} | void function(DisbursementVoucherPreConferenceRegistrant line) { line.setFinancialDocumentLineNumber(getFinDocNextRegistrantLineNbr()); this.getDvPreConferenceDetail().getDvPreConferenceRegistrants().add(line); this.finDocNextRegistrantLineNbr = new Integer(getFinDocNextRegistrantLineNbr().intValue() + 1); } | /**
* Adds a dv pre-paid registrant line
*
* @param line
*/ | Adds a dv pre-paid registrant line | addDvPrePaidRegistrantLine | {
"repo_name": "bhutchinson/kfs",
"path": "kfs-core/src/main/java/org/kuali/kfs/fp/document/DisbursementVoucherDocument.java",
"license": "agpl-3.0",
"size": 80760
} | [
"org.kuali.kfs.fp.businessobject.DisbursementVoucherPreConferenceRegistrant"
] | import org.kuali.kfs.fp.businessobject.DisbursementVoucherPreConferenceRegistrant; | import org.kuali.kfs.fp.businessobject.*; | [
"org.kuali.kfs"
] | org.kuali.kfs; | 899,699 |
public List<String> getRuleTypes()
{
return ruleTypes;
}
| List<String> function() { return ruleTypes; } | /**
* Get the rules rule types.
*
* @return a list of rule types
*/ | Get the rules rule types | getRuleTypes | {
"repo_name": "Alfresco/alfresco-repository",
"path": "src/main/java/org/alfresco/service/cmr/rule/Rule.java",
"license": "lgpl-3.0",
"size": 7538
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,895,026 |
public int read() throws IOException {
if (fromQueue) {
int ch = rxQueue.dequeue();
fromQueue = rxQueue.availToRead() > 0;
return ch;
} else return US.GET4(UART0_FIFO + diff);
}
| int function() throws IOException { if (fromQueue) { int ch = rxQueue.dequeue(); fromQueue = rxQueue.availToRead() > 0; return ch; } else return US.GET4(UART0_FIFO + diff); } | /**
* Reads one byte from the UART. A call of
* this method is not blocking!
*
* @return byte read.
* @throws IOException
* if no byte available.
*/ | Reads one byte from the UART. A call of this method is not blocking | read | {
"repo_name": "deepjava/runtime-library",
"path": "src/org/deepjava/runtime/zynq7000/driver/UART.java",
"license": "apache-2.0",
"size": 10944
} | [
"java.io.IOException",
"org.deepjava.unsafe.arm.US"
] | import java.io.IOException; import org.deepjava.unsafe.arm.US; | import java.io.*; import org.deepjava.unsafe.arm.*; | [
"java.io",
"org.deepjava.unsafe"
] | java.io; org.deepjava.unsafe; | 906,487 |
protected Map<String, Object> getCasPrincipalAttributes(final Map<String, Object> model, final RegisteredService registeredService) {
return super.getPrincipalAttributesAsMultiValuedAttributes(model);
} | Map<String, Object> function(final Map<String, Object> model, final RegisteredService registeredService) { return super.getPrincipalAttributesAsMultiValuedAttributes(model); } | /**
* Put cas principal attributes into model.
*
* @param model the model
* @param registeredService the registered service
* @return the cas principal attributes
*/ | Put cas principal attributes into model | getCasPrincipalAttributes | {
"repo_name": "Unicon/cas",
"path": "support/cas-server-support-validation/src/main/java/org/apereo/cas/web/view/Cas30ResponseView.java",
"license": "apache-2.0",
"size": 7855
} | [
"java.util.Map",
"org.apereo.cas.services.RegisteredService"
] | import java.util.Map; import org.apereo.cas.services.RegisteredService; | import java.util.*; import org.apereo.cas.services.*; | [
"java.util",
"org.apereo.cas"
] | java.util; org.apereo.cas; | 2,274,174 |
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
response.setContentType("text/html;charset=UTF-8");
try (PrintWriter out = response.getWriter()) {
out.println("<!DOCTYPE html>");
out.println("<html>");
out.println("<head>");
out.println("<title>Servlet Servlet2</title>");
out.println("</head>");
out.println("<body>");
out.println("<h1>Servlet Servlet2 at " + request.getContextPath() + "</h1>");
out.println("</body>");
out.println("</html>");
}
} | void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType(STR); try (PrintWriter out = response.getWriter()) { out.println(STR); out.println(STR); out.println(STR); out.println(STR); out.println(STR); out.println(STR); out.println(STR + request.getContextPath() + "</h1>"); out.println(STR); out.println(STR); } } | /**
* Processes requests for both HTTP <code>GET</code> and <code>POST</code>
* methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/ | Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods | processRequest | {
"repo_name": "liobouchan/TecnologiasWeb",
"path": "Marcos/src/java/Servlet2.java",
"license": "gpl-2.0",
"size": 2871
} | [
"java.io.IOException",
"java.io.PrintWriter",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse"
] | import java.io.IOException; import java.io.PrintWriter; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; | import java.io.*; import javax.servlet.*; import javax.servlet.http.*; | [
"java.io",
"javax.servlet"
] | java.io; javax.servlet; | 123,633 |
public void exitParam_clause(SQLParser.Param_clauseContext ctx) { } | public void exitParam_clause(SQLParser.Param_clauseContext ctx) { } | /**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/ | The default implementation does nothing | enterParam_clause | {
"repo_name": "HEIG-GAPS/slasher",
"path": "slasher.corrector/src/main/java/ch/gaps/slasher/corrector/SQLParserBaseListener.java",
"license": "mit",
"size": 73849
} | [
"ch.gaps.slasher.corrector.SQLParser"
] | import ch.gaps.slasher.corrector.SQLParser; | import ch.gaps.slasher.corrector.*; | [
"ch.gaps.slasher"
] | ch.gaps.slasher; | 761,019 |
public Key unwrapMapValueType(Key key) {
if (MapType.isMap(key)) {
MapType mapType = MapType.from(key);
if (!mapType.isRawType()) {
for (Class<?> frameworkClass : asList(Provider.class, Producer.class, Produced.class)) {
if (mapType.valuesAreTypeOf(frameworkClass)) {
return key.toBuilder()
.type(
fromJava(mapOf(mapType.keyType(), mapType.unwrappedValueType(frameworkClass))))
.build();
}
}
}
}
return key;
} | Key function(Key key) { if (MapType.isMap(key)) { MapType mapType = MapType.from(key); if (!mapType.isRawType()) { for (Class<?> frameworkClass : asList(Provider.class, Producer.class, Produced.class)) { if (mapType.valuesAreTypeOf(frameworkClass)) { return key.toBuilder() .type( fromJava(mapOf(mapType.keyType(), mapType.unwrappedValueType(frameworkClass)))) .build(); } } } } return key; } | /**
* If {@code key}'s type is {@code Map<K, Provider<V>>}, {@code Map<K, Producer<V>>}, or {@code
* Map<K, Produced<V>>}, returns a key with the same qualifier and {@link
* Key#multibindingContributionIdentifier()} whose type is simply {@code Map<K, V>}.
*
* <p>Otherwise, returns {@code key}.
*/ | If key's type is Map>, Map>, or Map>, returns a key with the same qualifier and <code>Key#multibindingContributionIdentifier()</code> whose type is simply Map. Otherwise, returns key | unwrapMapValueType | {
"repo_name": "dushmis/dagger",
"path": "java/dagger/internal/codegen/binding/KeyFactory.java",
"license": "apache-2.0",
"size": 18630
} | [
"java.util.Arrays",
"javax.inject.Provider"
] | import java.util.Arrays; import javax.inject.Provider; | import java.util.*; import javax.inject.*; | [
"java.util",
"javax.inject"
] | java.util; javax.inject; | 2,059,716 |
public GraphEdge getEdgeByTriples(Triple triple1, Triple triple2) {
// Retrieve the GraphNodes which correspond to both the triple1 and
// triple2
GraphNode node1 = getNodeByTriple(triple1);
GraphNode node2 = getNodeByTriple(triple2);
// Get the OID for the edge
int oid = node1.oid() * node2.oid();
// Integer oid = - (int)((Math.pow(node1.oid() + node2.oid() - 1, 2) +
// node1.oid() - node2.oid() + 1) / 2) ;
if (!edges.containsKey(new Integer(oid)))
SWANTLogger.severe("GraphEdge not found: " + oid);
return (GraphEdge) edges.get(new Integer(oid));
}
| GraphEdge function(Triple triple1, Triple triple2) { GraphNode node1 = getNodeByTriple(triple1); GraphNode node2 = getNodeByTriple(triple2); int oid = node1.oid() * node2.oid(); if (!edges.containsKey(new Integer(oid))) SWANTLogger.severe(STR + oid); return (GraphEdge) edges.get(new Integer(oid)); } | /**
* The method allows a retrieval of a GraphEdge, a specific edge of the
* ConnectedGraph by given the two triples which correspond to the nodes of
* the edge.
*
* @param triple1
* @param triple2
* @return GraphEdge
*/ | The method allows a retrieval of a GraphEdge, a specific edge of the ConnectedGraph by given the two triples which correspond to the nodes of the edge | getEdgeByTriples | {
"repo_name": "tekrei/ARQ-ACO",
"path": "src/stocker/core/ConnectedGraph.java",
"license": "apache-2.0",
"size": 16096
} | [
"com.hp.hpl.jena.graph.Triple"
] | import com.hp.hpl.jena.graph.Triple; | import com.hp.hpl.jena.graph.*; | [
"com.hp.hpl"
] | com.hp.hpl; | 1,388,339 |
private KeyManager[] getKeyManagers(String keys) throws Exception {
byte[] bytes = new Base64().decode(keys.getBytes());
InputStream inputStream = new ByteArrayInputStream(bytes);
KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
keyStore.load(inputStream, PASSWORD.toCharArray());
inputStream.close();
String algorithm = KeyManagerFactory.getDefaultAlgorithm();
KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(algorithm);
keyManagerFactory.init(keyStore, PASSWORD.toCharArray());
return keyManagerFactory.getKeyManagers();
} | KeyManager[] function(String keys) throws Exception { byte[] bytes = new Base64().decode(keys.getBytes()); InputStream inputStream = new ByteArrayInputStream(bytes); KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); keyStore.load(inputStream, PASSWORD.toCharArray()); inputStream.close(); String algorithm = KeyManagerFactory.getDefaultAlgorithm(); KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(algorithm); keyManagerFactory.init(keyStore, PASSWORD.toCharArray()); return keyManagerFactory.getKeyManagers(); } | /**
* Loads a keystore from a base64-encoded String. Returns the KeyManager[]
* for the result.
*/ | Loads a keystore from a base64-encoded String. Returns the KeyManager[] for the result | getKeyManagers | {
"repo_name": "JSDemos/android-sdk-20",
"path": "src/android/core/SSLSocketTest.java",
"license": "apache-2.0",
"size": 46271
} | [
"java.io.ByteArrayInputStream",
"java.io.InputStream",
"java.security.KeyStore",
"javax.net.ssl.KeyManager",
"javax.net.ssl.KeyManagerFactory",
"org.apache.commons.codec.binary.Base64"
] | import java.io.ByteArrayInputStream; import java.io.InputStream; import java.security.KeyStore; import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; import org.apache.commons.codec.binary.Base64; | import java.io.*; import java.security.*; import javax.net.ssl.*; import org.apache.commons.codec.binary.*; | [
"java.io",
"java.security",
"javax.net",
"org.apache.commons"
] | java.io; java.security; javax.net; org.apache.commons; | 418,186 |
public TSelf constraint(Collection<OperatorConstraint> constraints) {
TOperator owner = getOwner();
owner.constraints.addAll(constraints);
return getSelf();
} | TSelf function(Collection<OperatorConstraint> constraints) { TOperator owner = getOwner(); owner.constraints.addAll(constraints); return getSelf(); } | /**
* Adds constraints to the building operator.
* @param constraints the constraints
* @return this
*/ | Adds constraints to the building operator | constraint | {
"repo_name": "akirakw/asakusafw-compiler",
"path": "compiler-project/model/src/main/java/com/asakusafw/lang/compiler/model/graph/Operator.java",
"license": "apache-2.0",
"size": 15323
} | [
"java.util.Collection"
] | import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 636,258 |
public static Path deleteTestFile(Path rootDir) {
Path testFilePath = rootDir.resolve(Config.DEFAULT.getTestFileName1());
File file = testFilePath.toFile();
if (file.exists()) {
file.delete();
}
return testFilePath;
} | static Path function(Path rootDir) { Path testFilePath = rootDir.resolve(Config.DEFAULT.getTestFileName1()); File file = testFilePath.toFile(); if (file.exists()) { file.delete(); } return testFilePath; } | /**
* Deletes the test file from the given root directory
*
* @param rootDir The root directory from which the test file is removed
*
* @return The path of the deleted test file
*/ | Deletes the test file from the given root directory | deleteTestFile | {
"repo_name": "p2p-sync/commons",
"path": "src/test/java/org/rmatil/sync/commons/test/util/FileUtil.java",
"license": "apache-2.0",
"size": 2789
} | [
"java.io.File",
"java.nio.file.Path",
"org.rmatil.sync.commons.test.config.Config"
] | import java.io.File; import java.nio.file.Path; import org.rmatil.sync.commons.test.config.Config; | import java.io.*; import java.nio.file.*; import org.rmatil.sync.commons.test.config.*; | [
"java.io",
"java.nio",
"org.rmatil.sync"
] | java.io; java.nio; org.rmatil.sync; | 49,043 |
public void tick()
{
long i = System.nanoTime();
net.minecraftforge.fml.common.FMLCommonHandler.instance().onPreServerTick();
++this.tickCounter;
if (this.startProfiling)
{
this.startProfiling = false;
this.theProfiler.profilingEnabled = true;
this.theProfiler.clearProfiling();
}
this.theProfiler.startSection("root");
this.updateTimeLightAndEntities();
if (i - this.nanoTimeSinceStatusRefresh >= 5000000000L)
{
this.nanoTimeSinceStatusRefresh = i;
this.statusResponse.setPlayers(new ServerStatusResponse.Players(this.getMaxPlayers(), this.getCurrentPlayerCount()));
GameProfile[] agameprofile = new GameProfile[Math.min(this.getCurrentPlayerCount(), 12)];
int j = MathHelper.getRandomIntegerInRange(this.random, 0, this.getCurrentPlayerCount() - agameprofile.length);
for (int k = 0; k < agameprofile.length; ++k)
{
agameprofile[k] = ((EntityPlayerMP)this.playerList.getPlayerList().get(j + k)).getGameProfile();
}
Collections.shuffle(Arrays.asList(agameprofile));
this.statusResponse.getPlayers().setPlayers(agameprofile);
this.statusResponse.invalidateJson();
}
if (this.tickCounter % 900 == 0)
{
this.theProfiler.startSection("save");
this.playerList.saveAllPlayerData();
this.saveAllWorlds(true);
this.theProfiler.endSection();
}
this.theProfiler.startSection("tallying");
this.tickTimeArray[this.tickCounter % 100] = System.nanoTime() - i;
this.theProfiler.endSection();
this.theProfiler.startSection("snooper");
if (!this.usageSnooper.isSnooperRunning() && this.tickCounter > 100)
{
this.usageSnooper.startSnooper();
}
if (this.tickCounter % 6000 == 0)
{
this.usageSnooper.addMemoryStatsToSnooper();
}
this.theProfiler.endSection();
this.theProfiler.endSection();
net.minecraftforge.fml.common.FMLCommonHandler.instance().onPostServerTick();
} | void function() { long i = System.nanoTime(); net.minecraftforge.fml.common.FMLCommonHandler.instance().onPreServerTick(); ++this.tickCounter; if (this.startProfiling) { this.startProfiling = false; this.theProfiler.profilingEnabled = true; this.theProfiler.clearProfiling(); } this.theProfiler.startSection("root"); this.updateTimeLightAndEntities(); if (i - this.nanoTimeSinceStatusRefresh >= 5000000000L) { this.nanoTimeSinceStatusRefresh = i; this.statusResponse.setPlayers(new ServerStatusResponse.Players(this.getMaxPlayers(), this.getCurrentPlayerCount())); GameProfile[] agameprofile = new GameProfile[Math.min(this.getCurrentPlayerCount(), 12)]; int j = MathHelper.getRandomIntegerInRange(this.random, 0, this.getCurrentPlayerCount() - agameprofile.length); for (int k = 0; k < agameprofile.length; ++k) { agameprofile[k] = ((EntityPlayerMP)this.playerList.getPlayerList().get(j + k)).getGameProfile(); } Collections.shuffle(Arrays.asList(agameprofile)); this.statusResponse.getPlayers().setPlayers(agameprofile); this.statusResponse.invalidateJson(); } if (this.tickCounter % 900 == 0) { this.theProfiler.startSection("save"); this.playerList.saveAllPlayerData(); this.saveAllWorlds(true); this.theProfiler.endSection(); } this.theProfiler.startSection(STR); this.tickTimeArray[this.tickCounter % 100] = System.nanoTime() - i; this.theProfiler.endSection(); this.theProfiler.startSection(STR); if (!this.usageSnooper.isSnooperRunning() && this.tickCounter > 100) { this.usageSnooper.startSnooper(); } if (this.tickCounter % 6000 == 0) { this.usageSnooper.addMemoryStatsToSnooper(); } this.theProfiler.endSection(); this.theProfiler.endSection(); net.minecraftforge.fml.common.FMLCommonHandler.instance().onPostServerTick(); } | /**
* Main function called by run() every loop.
*/ | Main function called by run() every loop | tick | {
"repo_name": "danielyc/test-1.9.4",
"path": "build/tmp/recompileMc/sources/net/minecraft/server/MinecraftServer.java",
"license": "gpl-3.0",
"size": 55327
} | [
"com.mojang.authlib.GameProfile",
"java.util.Arrays",
"java.util.Collections",
"net.minecraft.entity.player.EntityPlayerMP",
"net.minecraft.network.ServerStatusResponse",
"net.minecraft.util.math.MathHelper"
] | import com.mojang.authlib.GameProfile; import java.util.Arrays; import java.util.Collections; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.network.ServerStatusResponse; import net.minecraft.util.math.MathHelper; | import com.mojang.authlib.*; import java.util.*; import net.minecraft.entity.player.*; import net.minecraft.network.*; import net.minecraft.util.math.*; | [
"com.mojang.authlib",
"java.util",
"net.minecraft.entity",
"net.minecraft.network",
"net.minecraft.util"
] | com.mojang.authlib; java.util; net.minecraft.entity; net.minecraft.network; net.minecraft.util; | 264,031 |
public void ensureUnitsAreKilledFirst(
final Collection<Unit> targets,
final Predicate<Unit> matcher,
final Comparator<Unit> shouldBeKilledFirst) {
final Map<UnitOwner, List<Unit>> targetsGroupedByOwnerAndType =
targets.stream().collect(Collectors.groupingBy(UnitOwner::new, Collectors.toList()));
final List<Unit> killedWithCorrectOrder =
ensureUnitsAreKilledFirst(
shouldBeKilledFirst,
targetsGroupedByOwnerAndType,
getKilled().stream()
.filter(matcher)
.collect(Collectors.groupingBy(UnitOwner::new, Collectors.toList())));
killed.addAll(
killedWithCorrectOrder.stream()
.filter(unit -> !killed.contains(unit))
.collect(Collectors.toList()));
killed.removeAll(
killed.stream()
.filter(matcher)
.filter(unit -> !killedWithCorrectOrder.contains(unit))
.collect(Collectors.toList()));
} | void function( final Collection<Unit> targets, final Predicate<Unit> matcher, final Comparator<Unit> shouldBeKilledFirst) { final Map<UnitOwner, List<Unit>> targetsGroupedByOwnerAndType = targets.stream().collect(Collectors.groupingBy(UnitOwner::new, Collectors.toList())); final List<Unit> killedWithCorrectOrder = ensureUnitsAreKilledFirst( shouldBeKilledFirst, targetsGroupedByOwnerAndType, getKilled().stream() .filter(matcher) .collect(Collectors.groupingBy(UnitOwner::new, Collectors.toList()))); killed.addAll( killedWithCorrectOrder.stream() .filter(unit -> !killed.contains(unit)) .collect(Collectors.toList())); killed.removeAll( killed.stream() .filter(matcher) .filter(unit -> !killedWithCorrectOrder.contains(unit)) .collect(Collectors.toList())); } | /**
* replaces the units in <code>killed</code> that match the <code>matcher</code> by the same
* number of units in <code>targets</code> that match the <code>matcher</code> and are first
* according to <code>shouldBeKilledFirst</code>
*/ | replaces the units in <code>killed</code> that match the <code>matcher</code> by the same number of units in <code>targets</code> that match the <code>matcher</code> and are first according to <code>shouldBeKilledFirst</code> | ensureUnitsAreKilledFirst | {
"repo_name": "RoiEXLab/triplea",
"path": "game-app/game-core/src/main/java/games/strategy/triplea/delegate/data/CasualtyDetails.java",
"license": "gpl-3.0",
"size": 8219
} | [
"games.strategy.engine.data.Unit",
"games.strategy.triplea.util.UnitOwner",
"java.util.Collection",
"java.util.Comparator",
"java.util.List",
"java.util.Map",
"java.util.function.Predicate",
"java.util.stream.Collectors"
] | import games.strategy.engine.data.Unit; import games.strategy.triplea.util.UnitOwner; import java.util.Collection; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.function.Predicate; import java.util.stream.Collectors; | import games.strategy.engine.data.*; import games.strategy.triplea.util.*; import java.util.*; import java.util.function.*; import java.util.stream.*; | [
"games.strategy.engine",
"games.strategy.triplea",
"java.util"
] | games.strategy.engine; games.strategy.triplea; java.util; | 2,403,128 |
public void testSubuqeryInSelectListOfDerivedTable() throws Exception {
Statement st = createStatement();
ResultSet rs = null;
String[][] expRS;
String[] expColNames;
rs = st.executeQuery(
"select * from " +
"(select (select iii from ttt " +
"where sss > i and " +
"sss = iii and iii <> 11) " +
"from s) a");
expColNames = new String [] {"1"};
JDBC.assertColumnNames(rs, expColNames);
expRS = new String [][] { {null}, {"22"}, {"22"} };
JDBC.assertFullResultSet(rs, expRS, true);
// bigint and subqueries
st.executeUpdate("create table li(i int, s smallint, l bigint)");
st.executeUpdate("insert into li values (null, null, null)");
st.executeUpdate("insert into li values (1, 1, 1)");
st.executeUpdate("insert into li values (2, 2, 2)");
rs = st.executeQuery(
"select l from li o where l = " +
"(select i from li i where o.l = i.i)");
expColNames = new String [] {"L"};
JDBC.assertColumnNames(rs, expColNames);
expRS = new String [][] { {"1"}, {"2"} };
JDBC.assertFullResultSet(rs, expRS, true);
rs = st.executeQuery(
"select l from li o where l = " +
"(select s from li i where o.l = i.s)");
expColNames = new String [] {"L"};
JDBC.assertColumnNames(rs, expColNames);
expRS = new String [][] { {"1"}, {"2"} };
JDBC.assertFullResultSet(rs, expRS, true);
rs = st.executeQuery(
"select l from li o where l = " +
"(select l from li i where o.l = i.l)");
expColNames = new String [] {"L"};
JDBC.assertColumnNames(rs, expColNames);
expRS = new String [][] { {"1"}, {"2"} };
JDBC.assertFullResultSet(rs, expRS, true);
rs = st.executeQuery(
"select l from li where l in (select i from li)");
expColNames = new String [] {"L"};
JDBC.assertColumnNames(rs, expColNames);
expRS = new String [][] { {"1"}, {"2"} };
JDBC.assertFullResultSet(rs, expRS, true);
rs = st.executeQuery(
"select l from li where l in (select s from li)");
expColNames = new String [] {"L"};
JDBC.assertColumnNames(rs, expColNames);
expRS = new String [][] { {"1"}, {"2"} };
JDBC.assertFullResultSet(rs, expRS, true);
rs = st.executeQuery(
"select l from li where l in (select l from li)");
expColNames = new String [] {"L"};
JDBC.assertColumnNames(rs, expColNames);
expRS = new String [][] { {"1"}, {"2"} };
JDBC.assertFullResultSet(rs, expRS, true);
} | void function() throws Exception { Statement st = createStatement(); ResultSet rs = null; String[][] expRS; String[] expColNames; rs = st.executeQuery( STR + STR + STR + STR + STR); expColNames = new String [] {"1"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {null}, {"22"}, {"22"} }; JDBC.assertFullResultSet(rs, expRS, true); st.executeUpdate(STR); st.executeUpdate(STR); st.executeUpdate(STR); st.executeUpdate(STR); rs = st.executeQuery( STR + STR); expColNames = new String [] {"L"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1"}, {"2"} }; JDBC.assertFullResultSet(rs, expRS, true); rs = st.executeQuery( STR + STR); expColNames = new String [] {"L"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1"}, {"2"} }; JDBC.assertFullResultSet(rs, expRS, true); rs = st.executeQuery( STR + STR); expColNames = new String [] {"L"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1"}, {"2"} }; JDBC.assertFullResultSet(rs, expRS, true); rs = st.executeQuery( STR); expColNames = new String [] {"L"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1"}, {"2"} }; JDBC.assertFullResultSet(rs, expRS, true); rs = st.executeQuery( STR); expColNames = new String [] {"L"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1"}, {"2"} }; JDBC.assertFullResultSet(rs, expRS, true); rs = st.executeQuery( STR); expColNames = new String [] {"L"}; JDBC.assertColumnNames(rs, expColNames); expRS = new String [][] { {"1"}, {"2"} }; JDBC.assertFullResultSet(rs, expRS, true); } | /**
* correlated subquery in select list of a derived table
* @throws Exception
*/ | correlated subquery in select list of a derived table | testSubuqeryInSelectListOfDerivedTable | {
"repo_name": "kavin256/Derby",
"path": "java/testing/org/apache/derbyTesting/functionTests/tests/lang/SubqueryTest.java",
"license": "apache-2.0",
"size": 86764
} | [
"java.sql.ResultSet",
"java.sql.Statement",
"org.apache.derbyTesting.junit.JDBC"
] | import java.sql.ResultSet; import java.sql.Statement; import org.apache.derbyTesting.junit.JDBC; | import java.sql.*; import org.apache.*; | [
"java.sql",
"org.apache"
] | java.sql; org.apache; | 2,328,195 |
NativeMouseInfo getNativeMouseInfo() {
return wtk.getNativeMouseInfo();
}
| NativeMouseInfo getNativeMouseInfo() { return wtk.getNativeMouseInfo(); } | /**
* Returns implementation of org.apache.harmony.awt.wtk.NativeMouseInfo
* for current platform.
* @return implementation of NativeMouseInfo
*/ | Returns implementation of org.apache.harmony.awt.wtk.NativeMouseInfo for current platform | getNativeMouseInfo | {
"repo_name": "skyHALud/codenameone",
"path": "Ports/iOSPort/xmlvm/apache-harmony-6.0-src-r991881/classlib/modules/awt/src/main/java/common/java/awt/Toolkit.java",
"license": "gpl-2.0",
"size": 48214
} | [
"org.apache.harmony.awt.wtk.NativeMouseInfo"
] | import org.apache.harmony.awt.wtk.NativeMouseInfo; | import org.apache.harmony.awt.wtk.*; | [
"org.apache.harmony"
] | org.apache.harmony; | 909,251 |
@SuppressWarnings("unchecked")
private void insertDataSet(Element eAppXml, Element eDatasets) {
// Adding DS definition in the coordinator XML
Element inputList = eAppXml.getChild("input-events", eAppXml.getNamespace());
if (inputList != null) {
for (Element dataIn : (List<Element>) inputList.getChildren("data-in", eAppXml.getNamespace())) {
Element eDataset = findDataSet(eDatasets, dataIn.getAttributeValue("dataset"));
dataIn.getContent().add(0, eDataset);
}
}
Element outputList = eAppXml.getChild("output-events", eAppXml.getNamespace());
if (outputList != null) {
for (Element dataOut : (List<Element>) outputList.getChildren("data-out", eAppXml.getNamespace())) {
Element eDataset = findDataSet(eDatasets, dataOut.getAttributeValue("dataset"));
dataOut.getContent().add(0, eDataset);
}
}
} | @SuppressWarnings(STR) void function(Element eAppXml, Element eDatasets) { Element inputList = eAppXml.getChild(STR, eAppXml.getNamespace()); if (inputList != null) { for (Element dataIn : (List<Element>) inputList.getChildren(STR, eAppXml.getNamespace())) { Element eDataset = findDataSet(eDatasets, dataIn.getAttributeValue(STR)); dataIn.getContent().add(0, eDataset); } } Element outputList = eAppXml.getChild(STR, eAppXml.getNamespace()); if (outputList != null) { for (Element dataOut : (List<Element>) outputList.getChildren(STR, eAppXml.getNamespace())) { Element eDataset = findDataSet(eDatasets, dataOut.getAttributeValue(STR)); dataOut.getContent().add(0, eDataset); } } } | /**
* Insert data set into data-in and data-out tags.
*
* @param eAppXml : coordinator application XML
* @param eDatasets : DataSet XML
*/ | Insert data set into data-in and data-out tags | insertDataSet | {
"repo_name": "sunmeng007/oozie",
"path": "core/src/main/java/org/apache/oozie/command/coord/CoordSubmitXCommand.java",
"license": "apache-2.0",
"size": 44340
} | [
"java.util.List",
"org.jdom.Element"
] | import java.util.List; import org.jdom.Element; | import java.util.*; import org.jdom.*; | [
"java.util",
"org.jdom"
] | java.util; org.jdom; | 2,075,191 |
public final Artifact getRelatedArtifact(PathFragment pathFragment, String extension) {
PathFragment file = FileSystemUtils.replaceExtension(pathFragment, extension);
return getDerivedArtifact(file, getConfiguration().getBinDirectory(rule.getRepository()));
} | final Artifact function(PathFragment pathFragment, String extension) { PathFragment file = FileSystemUtils.replaceExtension(pathFragment, extension); return getDerivedArtifact(file, getConfiguration().getBinDirectory(rule.getRepository())); } | /**
* Returns an artifact with a given file extension. All other path components
* are the same as in {@code pathFragment}.
*/ | Returns an artifact with a given file extension. All other path components are the same as in pathFragment | getRelatedArtifact | {
"repo_name": "damienmg/bazel",
"path": "src/main/java/com/google/devtools/build/lib/analysis/RuleContext.java",
"license": "apache-2.0",
"size": 78605
} | [
"com.google.devtools.build.lib.actions.Artifact",
"com.google.devtools.build.lib.vfs.FileSystemUtils",
"com.google.devtools.build.lib.vfs.PathFragment"
] | import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.PathFragment; | import com.google.devtools.build.lib.actions.*; import com.google.devtools.build.lib.vfs.*; | [
"com.google.devtools"
] | com.google.devtools; | 542,084 |
static String filterSecrets(String in) {
Matcher m = REMOVE_SECRETS.matcher(in);
return m.replaceAll("REMOVED");
} | static String filterSecrets(String in) { Matcher m = REMOVE_SECRETS.matcher(in); return m.replaceAll(STR); } | /**
* Removes security sensitive parameters from requests and responses.
*/ | Removes security sensitive parameters from requests and responses | filterSecrets | {
"repo_name": "hoatle/gatein-shindig",
"path": "java/gadgets/src/main/java/org/apache/shindig/gadgets/oauth/OAuthResponseParams.java",
"license": "apache-2.0",
"size": 7488
} | [
"java.util.regex.Matcher"
] | import java.util.regex.Matcher; | import java.util.regex.*; | [
"java.util"
] | java.util; | 2,049,309 |
public List<BatchRequestPart> parseBatchRequest(InputStream content, String boundary, BatchOptions options)
throws BatchDeserializerException; | List<BatchRequestPart> function(InputStream content, String boundary, BatchOptions options) throws BatchDeserializerException; | /**
* Reads batch data from an InputStream.
* @param content the data as multipart input stream
* @param boundary the boundary between the parts
* @param options options for the deserializer
* @return a list of batch-request parts
*/ | Reads batch data from an InputStream | parseBatchRequest | {
"repo_name": "mtaal/olingo-odata4-jpa",
"path": "lib/server-api/src/main/java/org/apache/olingo/server/api/deserializer/FixedFormatDeserializer.java",
"license": "apache-2.0",
"size": 2109
} | [
"java.io.InputStream",
"java.util.List",
"org.apache.olingo.server.api.deserializer.batch.BatchDeserializerException",
"org.apache.olingo.server.api.deserializer.batch.BatchOptions",
"org.apache.olingo.server.api.deserializer.batch.BatchRequestPart"
] | import java.io.InputStream; import java.util.List; import org.apache.olingo.server.api.deserializer.batch.BatchDeserializerException; import org.apache.olingo.server.api.deserializer.batch.BatchOptions; import org.apache.olingo.server.api.deserializer.batch.BatchRequestPart; | import java.io.*; import java.util.*; import org.apache.olingo.server.api.deserializer.batch.*; | [
"java.io",
"java.util",
"org.apache.olingo"
] | java.io; java.util; org.apache.olingo; | 1,087,963 |
public static String getMimeTypeFromFileName(@NonNull String fileName) {
return getMimeTypeFromExtension(getExtension(fileName));
} | static String function(@NonNull String fileName) { return getMimeTypeFromExtension(getExtension(fileName)); } | /**
* Get the most relevant mime-type for the given file name
*
* @param fileName File name to get the mime-type for
* @return Most relevant mime-type for the given file name; generic mime-type if none found
*/ | Get the most relevant mime-type for the given file name | getMimeTypeFromFileName | {
"repo_name": "AVnetWS/Hentoid",
"path": "app/src/main/java/me/devsaki/hentoid/util/FileHelper.java",
"license": "apache-2.0",
"size": 53994
} | [
"androidx.annotation.NonNull"
] | import androidx.annotation.NonNull; | import androidx.annotation.*; | [
"androidx.annotation"
] | androidx.annotation; | 1,809,020 |
public void cancelPreviousLoader() {
if (getCurrentLocation() != null
&& getLoaderManager().getLoader(mIdLoaderData) != null)
BaseFileProviderUtils.cancelTask(getActivity(),
getCurrentLocation().getAuthority(), mIdLoaderData);
mLoading = false;
}// cancelPreviousLoader() | void function() { if (getCurrentLocation() != null && getLoaderManager().getLoader(mIdLoaderData) != null) BaseFileProviderUtils.cancelTask(getActivity(), getCurrentLocation().getAuthority(), mIdLoaderData); mLoading = false; } | /**
* Cancels the loader in progress.
*/ | Cancels the loader in progress | cancelPreviousLoader | {
"repo_name": "red13dotnet/keepass2android",
"path": "src/java/android-filechooser/code/src/group/pals/android/lib/ui/filechooser/FragmentFiles.java",
"license": "gpl-3.0",
"size": 90560
} | [
"group.pals.android.lib.ui.filechooser.providers.BaseFileProviderUtils"
] | import group.pals.android.lib.ui.filechooser.providers.BaseFileProviderUtils; | import group.pals.android.lib.ui.filechooser.providers.*; | [
"group.pals.android"
] | group.pals.android; | 701,766 |
public static InstructionJsonMatcher matchesInstruction(Instruction instruction) {
return new InstructionJsonMatcher(instruction);
} | static InstructionJsonMatcher function(Instruction instruction) { return new InstructionJsonMatcher(instruction); } | /**
* Factory to allocate an instruction matcher.
*
* @param instruction instruction object we are looking for
* @return matcher
*/ | Factory to allocate an instruction matcher | matchesInstruction | {
"repo_name": "opennetworkinglab/onos",
"path": "core/common/src/test/java/org/onosproject/codec/impl/InstructionJsonMatcher.java",
"license": "apache-2.0",
"size": 27950
} | [
"org.onosproject.net.flow.instructions.Instruction"
] | import org.onosproject.net.flow.instructions.Instruction; | import org.onosproject.net.flow.instructions.*; | [
"org.onosproject.net"
] | org.onosproject.net; | 1,553,409 |
public Menu getSecondMenu() {
if (secondMenu == null) {
secondMenu = new ScreenMenu(KuixConstants.SCREEN_SECOND_MENU_WIDGET_TAG, false, false);
getBottomBar().add(secondMenu);
}
return secondMenu;
}
| Menu function() { if (secondMenu == null) { secondMenu = new ScreenMenu(KuixConstants.SCREEN_SECOND_MENU_WIDGET_TAG, false, false); getBottomBar().add(secondMenu); } return secondMenu; } | /**
* Create the secondMenu instance if it doesn't exist and return it.
*
* @return the internal secondMenu instance
*/ | Create the secondMenu instance if it doesn't exist and return it | getSecondMenu | {
"repo_name": "mariotaku/twidere.j2me",
"path": "src/org/kalmeo/kuix/widget/Screen.java",
"license": "gpl-3.0",
"size": 20154
} | [
"org.kalmeo.kuix.core.KuixConstants"
] | import org.kalmeo.kuix.core.KuixConstants; | import org.kalmeo.kuix.core.*; | [
"org.kalmeo.kuix"
] | org.kalmeo.kuix; | 2,565,835 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.