method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
list | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
list | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
UserRecord getUserRecord(long id); | UserRecord getUserRecord(long id); | /**
* Retrieves a UserRecord with @param id
*/ | Retrieves a UserRecord with @param id | getUserRecord | {
"repo_name": "AndrewJack/moment-for-android-wear",
"path": "backend/src/main/java/technology/mainthread/service/moment/data/dao/UserDAO.java",
"license": "apache-2.0",
"size": 966
} | [
"technology.mainthread.service.moment.data.record.UserRecord"
] | import technology.mainthread.service.moment.data.record.UserRecord; | import technology.mainthread.service.moment.data.record.*; | [
"technology.mainthread.service"
] | technology.mainthread.service; | 2,706,261 |
EAttribute getP2Task_LicenseConfirmationDisabled(); | EAttribute getP2Task_LicenseConfirmationDisabled(); | /**
* Returns the meta object for the attribute '{@link org.eclipse.oomph.setup.p2.P2Task#isLicenseConfirmationDisabled <em>License Confirmation Disabled</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>License Confirmation Disabled</em>'.
* @see org.eclipse.oomph.setup.p2.P2Task#isLicenseConfirmationDisabled()
* @see #getP2Task()
* @generated
*/ | Returns the meta object for the attribute '<code>org.eclipse.oomph.setup.p2.P2Task#isLicenseConfirmationDisabled License Confirmation Disabled</code>'. | getP2Task_LicenseConfirmationDisabled | {
"repo_name": "peterkir/org.eclipse.oomph",
"path": "plugins/org.eclipse.oomph.setup.p2/src/org/eclipse/oomph/setup/p2/SetupP2Package.java",
"license": "epl-1.0",
"size": 10764
} | [
"org.eclipse.emf.ecore.EAttribute"
] | import org.eclipse.emf.ecore.EAttribute; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 1,242,437 |
@PostConstruct
public void initialize() {
this.geoCodingEnabled = StringUtils.toBoolean(this.geoCodingEnabledStringValue, false);
if (this.geoCodingEnabled) {
log.info("Activating GeoCoding Service Provider Interface - Google Maps Rest Services Implementation.");
log.info("Configured Allowed Requests Per Day:[" +
((this.geoCodingServiceProviderAllowedRequestsPerDay != null) &&
(this.geoCodingServiceProviderAllowedRequestsPerDay > 0) ?
this.geoCodingServiceProviderAllowedRequestsPerDay :
"OFF")
+ "]");
log.info("Configured Allowed Requests Per Second:[" +
((this.geoCodingServiceProviderAllowedRequestsPerSecond != null) &&
(this.geoCodingServiceProviderAllowedRequestsPerSecond > 0) ?
this.geoCodingServiceProviderAllowedRequestsPerSecond :
"OFF")
+ "]");
log.info("Configured Throttle Wait:[" +
((this.geoCodingServiceProviderThrottleSecondsWaitPerRequest != null) &&
(this.geoCodingServiceProviderThrottleSecondsWaitPerRequest > 0) ?
this.geoCodingServiceProviderThrottleSecondsWaitPerRequest + " seconds" :
"OFF")
+ "]");
// Determine if I have a Signature or Not.
if (StringUtils.isNotEmpty(this.geoCodingServiceProviderClientSignature)) {
// Convert the key from 'web safe' base 64 to binary
String keyString = this.geoCodingServiceProviderClientSignature.replace('-', '+');
keyString = keyString.replace('_', '/');
try {
this.binaryClientSignature = Base64.decode(keyString);
} catch (IOException ioe) {
log.error("Base64 decoding Exception occurred for Client Signature, removing Client Information so GMap Requests will not be Signed!");
this.geoCodingServiceProviderClientSignature = null;
this.geoCodingServiceProviderClientId = null;
}
} else {
log.warn("No Client Signature Supplied, GMap Requests will not be Signed!");
}
// ***********************************************
// Now Prepare Internal Cache for all Geo
// Queries
//
// TODO - Have Geo Coding service Provider use Memcached as Internal cache for Queries.
//
// Initialized.
this.initialized = true;
} else {
log.info("GeoCoding Service Provider Interface has been Disabled.");
this.initialized = false;
}
} | void function() { this.geoCodingEnabled = StringUtils.toBoolean(this.geoCodingEnabledStringValue, false); if (this.geoCodingEnabled) { log.info(STR); log.info(STR + ((this.geoCodingServiceProviderAllowedRequestsPerDay != null) && (this.geoCodingServiceProviderAllowedRequestsPerDay > 0) ? this.geoCodingServiceProviderAllowedRequestsPerDay : "OFF") + "]"); log.info(STR + ((this.geoCodingServiceProviderAllowedRequestsPerSecond != null) && (this.geoCodingServiceProviderAllowedRequestsPerSecond > 0) ? this.geoCodingServiceProviderAllowedRequestsPerSecond : "OFF") + "]"); log.info(STR + ((this.geoCodingServiceProviderThrottleSecondsWaitPerRequest != null) && (this.geoCodingServiceProviderThrottleSecondsWaitPerRequest > 0) ? this.geoCodingServiceProviderThrottleSecondsWaitPerRequest + STR : "OFF") + "]"); if (StringUtils.isNotEmpty(this.geoCodingServiceProviderClientSignature)) { String keyString = this.geoCodingServiceProviderClientSignature.replace('-', '+'); keyString = keyString.replace('_', '/'); try { this.binaryClientSignature = Base64.decode(keyString); } catch (IOException ioe) { log.error(STR); this.geoCodingServiceProviderClientSignature = null; this.geoCodingServiceProviderClientId = null; } } else { log.warn(STR); } this.initialized = true; } else { log.info(STR); this.initialized = false; } } | /**
* Initialize the Content Management System Interface
*/ | Initialize the Content Management System Interface | initialize | {
"repo_name": "jaschenk/jeffaschenk-commons",
"path": "src/main/java/jeffaschenk/commons/system/external/geocoding/GeoCodingServiceProviderImpl.java",
"license": "apache-2.0",
"size": 22587
} | [
"java.io.IOException",
"net.iharder.Base64"
] | import java.io.IOException; import net.iharder.Base64; | import java.io.*; import net.iharder.*; | [
"java.io",
"net.iharder"
] | java.io; net.iharder; | 540,424 |
//////////////////////////////////////////////
//
// FORMATABLE
//
//////////////////////////////////////////////
public void writeExternal(ObjectOutput out) throws IOException
{
out.writeObject(baseName);
out.writeObject(exposedName);
out.writeObject(schemaName);
} | void function(ObjectOutput out) throws IOException { out.writeObject(baseName); out.writeObject(exposedName); out.writeObject(schemaName); } | /**
* Write this object out
*
* @param out write bytes here
*
* @exception IOException thrown on error
*/ | Write this object out | writeExternal | {
"repo_name": "apache/derby",
"path": "java/org.apache.derby.engine/org/apache/derby/impl/sql/CursorTableReference.java",
"license": "apache-2.0",
"size": 4085
} | [
"java.io.IOException",
"java.io.ObjectOutput"
] | import java.io.IOException; import java.io.ObjectOutput; | import java.io.*; | [
"java.io"
] | java.io; | 1,969,941 |
CamelEvent createExchangeRedeliveryEvent(Exchange exchange, int attempt); | CamelEvent createExchangeRedeliveryEvent(Exchange exchange, int attempt); | /**
* Creates an {@link CamelEvent} when an {@link org.apache.camel.Exchange} is about to be redelivered
*
* @param exchange the exchange
* @param attempt the current redelivery attempt (starts from 1)
* @return the created event
*/ | Creates an <code>CamelEvent</code> when an <code>org.apache.camel.Exchange</code> is about to be redelivered | createExchangeRedeliveryEvent | {
"repo_name": "pax95/camel",
"path": "core/camel-api/src/main/java/org/apache/camel/spi/EventFactory.java",
"license": "apache-2.0",
"size": 12684
} | [
"org.apache.camel.Exchange"
] | import org.apache.camel.Exchange; | import org.apache.camel.*; | [
"org.apache.camel"
] | org.apache.camel; | 959,382 |
AnyModel convert(AnyModel model); | AnyModel convert(AnyModel model); | /**
* Convert a model to a later.
* @param model The model
* @return The model converted to a later
* @throws IllegalArgumentException if the converter does not know how to convert supplied model
*/ | Convert a model to a later | convert | {
"repo_name": "pushtechnology/diffusion-rest-adapter",
"path": "configuration-model-conversion/src/main/java/com/pushtechnology/adapters/rest/model/conversion/ModelConverter.java",
"license": "apache-2.0",
"size": 1277
} | [
"com.pushtechnology.adapters.rest.model.AnyModel"
] | import com.pushtechnology.adapters.rest.model.AnyModel; | import com.pushtechnology.adapters.rest.model.*; | [
"com.pushtechnology.adapters"
] | com.pushtechnology.adapters; | 2,201,653 |
public static Collection<Namespace> getStandardNamespaces() {
return NAMESPACES_BY_PREFIX.values();
} | static Collection<Namespace> function() { return NAMESPACES_BY_PREFIX.values(); } | /**
* Returns a list of standard namespaces used in MyCoRe. Additional
* namespaces can be configured using properties like
* MCR.Namespace.<prefix>=<uri>
*/ | Returns a list of standard namespaces used in MyCoRe. Additional namespaces can be configured using properties like MCR.Namespace.<prefix>=<uri> | getStandardNamespaces | {
"repo_name": "MyCoRe-Org/mycore",
"path": "mycore-base/src/main/java/org/mycore/common/MCRConstants.java",
"license": "gpl-3.0",
"size": 6682
} | [
"java.util.Collection",
"org.jdom2.Namespace"
] | import java.util.Collection; import org.jdom2.Namespace; | import java.util.*; import org.jdom2.*; | [
"java.util",
"org.jdom2"
] | java.util; org.jdom2; | 1,145,725 |
EAttribute getJointConstraint_Name(); | EAttribute getJointConstraint_Name(); | /**
* Returns the meta object for the attribute '{@link uk.ac.kcl.inf.robotics.rigidBodies.JointConstraint#getName <em>Name</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Name</em>'.
* @see uk.ac.kcl.inf.robotics.rigidBodies.JointConstraint#getName()
* @see #getJointConstraint()
* @generated
*/ | Returns the meta object for the attribute '<code>uk.ac.kcl.inf.robotics.rigidBodies.JointConstraint#getName Name</code>'. | getJointConstraint_Name | {
"repo_name": "szschaler/RigidBodies",
"path": "uk.ac.kcl.inf.robotics.rigid_bodies/src-gen/uk/ac/kcl/inf/robotics/rigidBodies/RigidBodiesPackage.java",
"license": "mit",
"size": 163741
} | [
"org.eclipse.emf.ecore.EAttribute"
] | import org.eclipse.emf.ecore.EAttribute; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 842,734 |
@Override
public Polzovateli remove(Serializable primaryKey)
throws NoSuchPolzovateliException {
Session session = null;
try {
session = openSession();
Polzovateli polzovateli = (Polzovateli)session.get(PolzovateliImpl.class,
primaryKey);
if (polzovateli == null) {
if (_log.isDebugEnabled()) {
_log.debug(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey);
}
throw new NoSuchPolzovateliException(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY +
primaryKey);
}
return remove(polzovateli);
}
catch (NoSuchPolzovateliException nsee) {
throw nsee;
}
catch (Exception e) {
throw processException(e);
}
finally {
closeSession(session);
}
} | Polzovateli function(Serializable primaryKey) throws NoSuchPolzovateliException { Session session = null; try { session = openSession(); Polzovateli polzovateli = (Polzovateli)session.get(PolzovateliImpl.class, primaryKey); if (polzovateli == null) { if (_log.isDebugEnabled()) { _log.debug(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey); } throw new NoSuchPolzovateliException(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey); } return remove(polzovateli); } catch (NoSuchPolzovateliException nsee) { throw nsee; } catch (Exception e) { throw processException(e); } finally { closeSession(session); } } | /**
* Removes the polzovateli with the primary key from the database. Also notifies the appropriate model listeners.
*
* @param primaryKey the primary key of the polzovateli
* @return the polzovateli that was removed
* @throws NoSuchPolzovateliException if a polzovateli with the primary key could not be found
*/ | Removes the polzovateli with the primary key from the database. Also notifies the appropriate model listeners | remove | {
"repo_name": "falko0000/moduleEProc",
"path": "Polzovateli/Polzovateli-service/src/main/java/tj/polzovateli/service/persistence/impl/PolzovateliPersistenceImpl.java",
"license": "lgpl-2.1",
"size": 23998
} | [
"com.liferay.portal.kernel.dao.orm.Session",
"java.io.Serializable"
] | import com.liferay.portal.kernel.dao.orm.Session; import java.io.Serializable; | import com.liferay.portal.kernel.dao.orm.*; import java.io.*; | [
"com.liferay.portal",
"java.io"
] | com.liferay.portal; java.io; | 1,130,214 |
StatefulKnowledgeSession getStatefulKnowlegeSession(String kSessionName); | StatefulKnowledgeSession getStatefulKnowlegeSession(String kSessionName); | /**
* Look up for the definition of a StatefulKnowledgeSession with the given name in a KnowledgeJar in the classpath
* @param kSessionName
* The name of the StatefulKnowledgeSession
* @return
* The StatefulKnowledgeSession
*/ | Look up for the definition of a StatefulKnowledgeSession with the given name in a KnowledgeJar in the classpath | getStatefulKnowlegeSession | {
"repo_name": "psiroky/droolsjbpm-knowledge",
"path": "knowledge-api/src/main/java/org/drools/KnowledgeBaseFactoryService.java",
"license": "apache-2.0",
"size": 5362
} | [
"org.drools.runtime.StatefulKnowledgeSession"
] | import org.drools.runtime.StatefulKnowledgeSession; | import org.drools.runtime.*; | [
"org.drools.runtime"
] | org.drools.runtime; | 2,051,372 |
public static int hash(int seed, Object obj) {
int result = seed;
if (obj == null) {
result = hash(result, 0);
} else if (!isArray(obj)) {
result = hash(result, obj.hashCode());
} else {
int length = Array.getLength(obj);
for (int index = 0; index < length; ++index) {
Object item = Array.get(obj, index);
if (!(item == obj)) {
result = hash(result, item);
}
}
}
return result;
} | static int function(int seed, Object obj) { int result = seed; if (obj == null) { result = hash(result, 0); } else if (!isArray(obj)) { result = hash(result, obj.hashCode()); } else { int length = Array.getLength(obj); for (int index = 0; index < length; ++index) { Object item = Array.get(obj, index); if (!(item == obj)) { result = hash(result, item); } } } return result; } | /**
* Hash an object
* @param seed The seed (value of previous hashes or SEED).
* @param obj the object to be hashed
* @return Return the hashed value
*/ | Hash an object | hash | {
"repo_name": "Sollace/CompCore",
"path": "source/main/net/acomputerdog/core/hash/Hash.java",
"license": "bsd-2-clause",
"size": 3701
} | [
"java.lang.reflect.Array"
] | import java.lang.reflect.Array; | import java.lang.reflect.*; | [
"java.lang"
] | java.lang; | 1,902,255 |
CloseableIterable<String> readFromEvaluatorLog() throws IOException; | CloseableIterable<String> readFromEvaluatorLog() throws IOException; | /**
* Reads a formatted entry (addition or removal) from the DFS evaluator log.
* @return the formatted entry.
* @throws IOException
*/ | Reads a formatted entry (addition or removal) from the DFS evaluator log | readFromEvaluatorLog | {
"repo_name": "dongjoon-hyun/reef",
"path": "lang/java/reef-runtime-yarn/src/main/java/org/apache/reef/runtime/yarn/driver/restart/DFSEvaluatorLogReaderWriter.java",
"license": "apache-2.0",
"size": 1759
} | [
"java.io.IOException",
"org.apache.reef.util.CloseableIterable"
] | import java.io.IOException; import org.apache.reef.util.CloseableIterable; | import java.io.*; import org.apache.reef.util.*; | [
"java.io",
"org.apache.reef"
] | java.io; org.apache.reef; | 2,796,759 |
public static Map<Integer, MimeMessage> decomposeBlob(byte[] digestBlob)
throws ServiceException {
Map<Integer, MimeMessage> map = new HashMap<Integer, MimeMessage>();
try {
InputStream bais = new SharedByteArrayInputStream(digestBlob);
FixedMimeMessage digestMm = new FixedMimeMessage(JMSession.getSession(), bais);
// It should be multipart/digest.
MimeMultipart mmp;
Object obj = digestMm.getContent();
if (obj instanceof MimeMultipart)
mmp = (MimeMultipart) obj;
else
throw ServiceException.FAILURE(
"Expected MimeMultipart, but got " + obj.getClass().getName() + ": " +
obj.toString(), null);
int numParts = mmp.getCount();
for (int i = 0; i < numParts; i++) {
MimeBodyPart mbp = (MimeBodyPart) mmp.getBodyPart(i);
int invId = 0;
String[] hdrs = mbp.getHeader("invId");
if (hdrs != null && hdrs.length > 0) {
invId = Integer.parseInt(hdrs[0]);
MimeMessage mm;
Object objMbp = mbp.getContent();
if (objMbp instanceof MimeMessage)
mm = (MimeMessage) objMbp;
else
throw ServiceException.FAILURE(
"Expected MimeMessage, but got " + objMbp.getClass().getName() + ": " +
objMbp.toString(), null);
map.put(invId, mm);
}
}
} catch (MessagingException e) {
throw ServiceException.FAILURE("Can't parse calendar item blob", e);
} catch (IOException e) {
throw ServiceException.FAILURE("Can't parse calendar item blob", e);
} catch (NumberFormatException e) {
throw ServiceException.FAILURE("Can't parse calendar item blob", e);
}
return map;
} | static Map<Integer, MimeMessage> function(byte[] digestBlob) throws ServiceException { Map<Integer, MimeMessage> map = new HashMap<Integer, MimeMessage>(); try { InputStream bais = new SharedByteArrayInputStream(digestBlob); FixedMimeMessage digestMm = new FixedMimeMessage(JMSession.getSession(), bais); MimeMultipart mmp; Object obj = digestMm.getContent(); if (obj instanceof MimeMultipart) mmp = (MimeMultipart) obj; else throw ServiceException.FAILURE( STR + obj.getClass().getName() + STR + obj.toString(), null); int numParts = mmp.getCount(); for (int i = 0; i < numParts; i++) { MimeBodyPart mbp = (MimeBodyPart) mmp.getBodyPart(i); int invId = 0; String[] hdrs = mbp.getHeader("invId"); if (hdrs != null && hdrs.length > 0) { invId = Integer.parseInt(hdrs[0]); MimeMessage mm; Object objMbp = mbp.getContent(); if (objMbp instanceof MimeMessage) mm = (MimeMessage) objMbp; else throw ServiceException.FAILURE( STR + objMbp.getClass().getName() + STR + objMbp.toString(), null); map.put(invId, mm); } } } catch (MessagingException e) { throw ServiceException.FAILURE(STR, e); } catch (IOException e) { throw ServiceException.FAILURE(STR, e); } catch (NumberFormatException e) { throw ServiceException.FAILURE(STR, e); } return map; } | /**
* Break up a multipart/digest blob into separate MimeMessages keyed by InvId header value.
* @param digestStream
* @return
* @throws MessagingException
*/ | Break up a multipart/digest blob into separate MimeMessages keyed by InvId header value | decomposeBlob | {
"repo_name": "nico01f/z-pec",
"path": "ZimbraServer/src/java/com/zimbra/cs/mailbox/CalendarItem.java",
"license": "mit",
"size": 181472
} | [
"com.zimbra.common.service.ServiceException",
"com.zimbra.cs.mime.Mime",
"com.zimbra.cs.util.JMSession",
"java.io.IOException",
"java.io.InputStream",
"java.util.HashMap",
"java.util.Map",
"javax.mail.MessagingException",
"javax.mail.internet.MimeBodyPart",
"javax.mail.internet.MimeMessage",
"javax.mail.internet.MimeMultipart",
"javax.mail.util.SharedByteArrayInputStream"
] | import com.zimbra.common.service.ServiceException; import com.zimbra.cs.mime.Mime; import com.zimbra.cs.util.JMSession; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import javax.mail.MessagingException; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; import javax.mail.util.SharedByteArrayInputStream; | import com.zimbra.common.service.*; import com.zimbra.cs.mime.*; import com.zimbra.cs.util.*; import java.io.*; import java.util.*; import javax.mail.*; import javax.mail.internet.*; import javax.mail.util.*; | [
"com.zimbra.common",
"com.zimbra.cs",
"java.io",
"java.util",
"javax.mail"
] | com.zimbra.common; com.zimbra.cs; java.io; java.util; javax.mail; | 561,139 |
public void setInstruments(final InstrumentDerivative[] instruments) {
_instruments = instruments;
} | void function(final InstrumentDerivative[] instruments) { _instruments = instruments; } | /**
* Sets the instruments to calibrate.
* @param instruments The instruments.
*/ | Sets the instruments to calibrate | setInstruments | {
"repo_name": "McLeodMoores/starling",
"path": "projects/analytics/src/main/java/com/opengamma/analytics/financial/provider/method/SuccessiveLeastSquareCalibrationObjective.java",
"license": "apache-2.0",
"size": 2598
} | [
"com.opengamma.analytics.financial.interestrate.InstrumentDerivative"
] | import com.opengamma.analytics.financial.interestrate.InstrumentDerivative; | import com.opengamma.analytics.financial.interestrate.*; | [
"com.opengamma.analytics"
] | com.opengamma.analytics; | 1,019,099 |
private void deleteNonFolders() {
if (nonFolderCount == 0) {
Logger.debug(LOG_TAG, "No non-folders to delete.");
return;
}
Logger.debug(LOG_TAG, "Applying deletion of " + nonFolderCount + " non-folders.");
final String[] nonFolderGUIDs = nonFolders.toArray(new String[nonFolderCount]);
final String nonFolderWhere = RepoUtils.computeSQLInClause(nonFolderCount, BrowserContract.Bookmarks.GUID);
dataAccessor.delete(nonFolderWhere, nonFolderGUIDs);
invokeCallbacks(delegate, nonFolderGUIDs);
// Discard these.
// Note that we maintain folderParents and nonFolderParents; we need them later.
nonFolders.clear();
nonFolderCount = 0;
} | void function() { if (nonFolderCount == 0) { Logger.debug(LOG_TAG, STR); return; } Logger.debug(LOG_TAG, STR + nonFolderCount + STR); final String[] nonFolderGUIDs = nonFolders.toArray(new String[nonFolderCount]); final String nonFolderWhere = RepoUtils.computeSQLInClause(nonFolderCount, BrowserContract.Bookmarks.GUID); dataAccessor.delete(nonFolderWhere, nonFolderGUIDs); invokeCallbacks(delegate, nonFolderGUIDs); nonFolders.clear(); nonFolderCount = 0; } | /**
* Flush non-folder deletions. This can be called at any time.
*/ | Flush non-folder deletions. This can be called at any time | deleteNonFolders | {
"repo_name": "wilebeast/FireFox-OS",
"path": "B2G/gecko/mobile/android/base/sync/repositories/android/BookmarksDeletionManager.java",
"license": "apache-2.0",
"size": 8396
} | [
"org.mozilla.gecko.db.BrowserContract",
"org.mozilla.gecko.sync.Logger"
] | import org.mozilla.gecko.db.BrowserContract; import org.mozilla.gecko.sync.Logger; | import org.mozilla.gecko.db.*; import org.mozilla.gecko.sync.*; | [
"org.mozilla.gecko"
] | org.mozilla.gecko; | 2,213,879 |
public Control createControl(Composite parent) {
Display display= parent.getDisplay();
fBackgroundColor= display.getSystemColor(SWT.COLOR_LIST_BACKGROUND);
fForegroundColor= display.getSystemColor(SWT.COLOR_LIST_FOREGROUND);
fSeparatorColor= new Color(display, 152, 170, 203);
JFaceResources.getFontRegistry().addListener(this); | Control function(Composite parent) { Display display= parent.getDisplay(); fBackgroundColor= display.getSystemColor(SWT.COLOR_LIST_BACKGROUND); fForegroundColor= display.getSystemColor(SWT.COLOR_LIST_FOREGROUND); fSeparatorColor= new Color(display, 152, 170, 203); JFaceResources.getFontRegistry().addListener(this); | /**
* Creates the control of the source attachment form.
*
* @param parent the parent composite
* @return the creates source attachment form
*/ | Creates the control of the source attachment form | createControl | {
"repo_name": "brunyuriy/quick-fix-scout",
"path": "org.eclipse.jdt.ui_3.7.1.r371_v20110824-0800/src/org/eclipse/jdt/internal/ui/javaeditor/ClassFileEditor.java",
"license": "mit",
"size": 30762
} | [
"org.eclipse.jface.resource.JFaceResources",
"org.eclipse.swt.graphics.Color",
"org.eclipse.swt.widgets.Composite",
"org.eclipse.swt.widgets.Control",
"org.eclipse.swt.widgets.Display"
] | import org.eclipse.jface.resource.JFaceResources; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; | import org.eclipse.jface.resource.*; import org.eclipse.swt.graphics.*; import org.eclipse.swt.widgets.*; | [
"org.eclipse.jface",
"org.eclipse.swt"
] | org.eclipse.jface; org.eclipse.swt; | 532,016 |
public static Rectangle2D getBounds(Shape shape, AffineTransform transform,
Rectangle2D r) throws EmptyPathException {
PathIterator i = shape.getPathIterator(transform);
return getBounds(i, r);
} | static Rectangle2D function(Shape shape, AffineTransform transform, Rectangle2D r) throws EmptyPathException { PathIterator i = shape.getPathIterator(transform); return getBounds(i, r); } | /**
* This calculates the precise bounds of a shape.
*
* @param shape
* the shape you want the bounds of. This method throws a
* NullPointerException if this is null.
* @param transform
* if this is non-null, then this method returns the bounds of
* <code>shape</code> as seen through <code>t</code>.
* @param r
* if this is non-null, then the result is stored in this
* rectangle. This is useful when you need to call this method
* repeatedly without allocating a lot of memory.
* @return the bounds of <code>shape</code>, as seen through
* <code>transform</code>.
*
* @throws EmptyPathException
* if the shape argument is empty.
*/ | This calculates the precise bounds of a shape | getBounds | {
"repo_name": "mickleness/pumpernickel",
"path": "src/main/java/com/pump/geom/ShapeBounds.java",
"license": "mit",
"size": 15913
} | [
"java.awt.Shape",
"java.awt.geom.AffineTransform",
"java.awt.geom.PathIterator",
"java.awt.geom.Rectangle2D"
] | import java.awt.Shape; import java.awt.geom.AffineTransform; import java.awt.geom.PathIterator; import java.awt.geom.Rectangle2D; | import java.awt.*; import java.awt.geom.*; | [
"java.awt"
] | java.awt; | 1,807,991 |
boolean maybeHandlePrototypePrefix(JSModule module, Scope scope,
Node n, Node parent, String name) {
// We use a string-based approach instead of inspecting the parse tree
// to avoid complexities with object literals, possibly nested, beneath
// assignments.
int numLevelsToRemove;
String prefix;
if (name.endsWith(".prototype")) {
numLevelsToRemove = 1;
prefix = name.substring(0, name.length() - 10);
} else {
int i = name.indexOf(".prototype.");
if (i == -1) {
return false;
}
prefix = name.substring(0, i);
numLevelsToRemove = 2;
i = name.indexOf('.', i + 11);
while (i >= 0) {
numLevelsToRemove++;
i = name.indexOf('.', i + 1);
}
}
if (parent != null && NodeUtil.isObjectLitKey(n)) {
// Object literal keys have no prefix that's referenced directly per
// key, so we're done.
return true;
}
for (int i = 0; i < numLevelsToRemove; i++) {
parent = n;
n = n.getFirstChild();
}
handleGet(module, scope, n, parent, prefix, Ref.Type.PROTOTYPE_GET);
return true;
} | boolean maybeHandlePrototypePrefix(JSModule module, Scope scope, Node n, Node parent, String name) { int numLevelsToRemove; String prefix; if (name.endsWith(STR)) { numLevelsToRemove = 1; prefix = name.substring(0, name.length() - 10); } else { int i = name.indexOf(STR); if (i == -1) { return false; } prefix = name.substring(0, i); numLevelsToRemove = 2; i = name.indexOf('.', i + 11); while (i >= 0) { numLevelsToRemove++; i = name.indexOf('.', i + 1); } } if (parent != null && NodeUtil.isObjectLitKey(n)) { return true; } for (int i = 0; i < numLevelsToRemove; i++) { parent = n; n = n.getFirstChild(); } handleGet(module, scope, n, parent, prefix, Ref.Type.PROTOTYPE_GET); return true; } | /**
* Updates our representation of the global namespace to reflect a read
* of a global name's longest prefix before the "prototype" property if the
* name includes the "prototype" property. Does nothing otherwise.
*
* @param module The current module
* @param scope The current scope
* @param n The node currently being visited
* @param parent {@code n}'s parent
* @param name The global name (e.g. "a" or "a.b.c.d")
* @return Whether the name was handled
*/ | Updates our representation of the global namespace to reflect a read of a global name's longest prefix before the "prototype" property if the name includes the "prototype" property. Does nothing otherwise | maybeHandlePrototypePrefix | {
"repo_name": "robbert/closure-compiler",
"path": "src/com/google/javascript/jscomp/GlobalNamespace.java",
"license": "apache-2.0",
"size": 41903
} | [
"com.google.javascript.rhino.Node"
] | import com.google.javascript.rhino.Node; | import com.google.javascript.rhino.*; | [
"com.google.javascript"
] | com.google.javascript; | 2,374,028 |
@Override
public void append(final LogEvent event) {
if (!isStarted()) {
error("FailoverAppender " + getName() + " did not start successfully");
return;
}
final long localCheckNanos = nextCheckNanos;
if (localCheckNanos == 0 || System.nanoTime() - localCheckNanos > 0) {
callAppender(event);
} else {
failover(event, null);
}
} | void function(final LogEvent event) { if (!isStarted()) { error(STR + getName() + STR); return; } final long localCheckNanos = nextCheckNanos; if (localCheckNanos == 0 System.nanoTime() - localCheckNanos > 0) { callAppender(event); } else { failover(event, null); } } | /**
* Handle the Log event.
* @param event The LogEvent.
*/ | Handle the Log event | append | {
"repo_name": "xnslong/logging-log4j2",
"path": "log4j-core/src/main/java/org/apache/logging/log4j/core/appender/FailoverAppender.java",
"license": "apache-2.0",
"size": 8628
} | [
"org.apache.logging.log4j.core.LogEvent"
] | import org.apache.logging.log4j.core.LogEvent; | import org.apache.logging.log4j.core.*; | [
"org.apache.logging"
] | org.apache.logging; | 2,556,448 |
public static boolean allNotEmpty(final Set<Pair>... parameters) {
for (final Set<Pair> set : parameters) {
if (CollectionUtils.isEmpty(set)) {
return false;
}
}
return true;
} | static boolean function(final Set<Pair>... parameters) { for (final Set<Pair> set : parameters) { if (CollectionUtils.isEmpty(set)) { return false; } } return true; } | /**
* Checks if all the provided maps are not empty
*
* @param parameters
* @return
*/ | Checks if all the provided maps are not empty | allNotEmpty | {
"repo_name": "dryazanov/mockenger",
"path": "core/src/main/java/org/mockenger/core/util/CommonUtils.java",
"license": "gpl-2.0",
"size": 7042
} | [
"java.util.Set",
"org.mockenger.data.model.persistent.mock.request.part.Pair",
"org.springframework.util.CollectionUtils",
"org.springframework.util.StringUtils"
] | import java.util.Set; import org.mockenger.data.model.persistent.mock.request.part.Pair; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; | import java.util.*; import org.mockenger.data.model.persistent.mock.request.part.*; import org.springframework.util.*; | [
"java.util",
"org.mockenger.data",
"org.springframework.util"
] | java.util; org.mockenger.data; org.springframework.util; | 1,556,854 |
private static void polyStringToElement(Element polyStringElement, PolyString polyString) {
if (polyString.getOrig() != null) {
Element origElement = DOMUtil.createSubElement(polyStringElement, PrismConstants.POLYSTRING_ELEMENT_ORIG_QNAME);
origElement.setTextContent(polyString.getOrig());
}
if (polyString.getNorm() != null) {
Element origElement = DOMUtil.createSubElement(polyStringElement, PrismConstants.POLYSTRING_ELEMENT_NORM_QNAME);
origElement.setTextContent(polyString.getNorm());
}
} | static void function(Element polyStringElement, PolyString polyString) { if (polyString.getOrig() != null) { Element origElement = DOMUtil.createSubElement(polyStringElement, PrismConstants.POLYSTRING_ELEMENT_ORIG_QNAME); origElement.setTextContent(polyString.getOrig()); } if (polyString.getNorm() != null) { Element origElement = DOMUtil.createSubElement(polyStringElement, PrismConstants.POLYSTRING_ELEMENT_NORM_QNAME); origElement.setTextContent(polyString.getNorm()); } } | /**
* Serialize PolyString to DOM element.
*/ | Serialize PolyString to DOM element | polyStringToElement | {
"repo_name": "bshp/midPoint",
"path": "infra/prism-impl/src/main/java/com/evolveum/midpoint/prism/impl/xml/XmlTypeConverterInternal.java",
"license": "apache-2.0",
"size": 7453
} | [
"com.evolveum.midpoint.prism.PrismConstants",
"com.evolveum.midpoint.prism.polystring.PolyString",
"com.evolveum.midpoint.util.DOMUtil",
"org.w3c.dom.Element"
] | import com.evolveum.midpoint.prism.PrismConstants; import com.evolveum.midpoint.prism.polystring.PolyString; import com.evolveum.midpoint.util.DOMUtil; import org.w3c.dom.Element; | import com.evolveum.midpoint.prism.*; import com.evolveum.midpoint.prism.polystring.*; import com.evolveum.midpoint.util.*; import org.w3c.dom.*; | [
"com.evolveum.midpoint",
"org.w3c.dom"
] | com.evolveum.midpoint; org.w3c.dom; | 862,350 |
private synchronized List<StaffStatus> cloneAndResetRunningStaffStatuses() {
List<StaffStatus> result = new ArrayList<StaffStatus>(runningStaffs.size());
for (StaffInProgress sip : runningStaffs.values()) {
StaffStatus status = sip.getStatus();
result.add((StaffStatus) status.clone());
}
return result;
}
| synchronized List<StaffStatus> function() { List<StaffStatus> result = new ArrayList<StaffStatus>(runningStaffs.size()); for (StaffInProgress sip : runningStaffs.values()) { StaffStatus status = sip.getStatus(); result.add((StaffStatus) status.clone()); } return result; } | /**
* Clone and reset running staff's StaffStatus.
* @return the list of StaffStatus
*/ | Clone and reset running staff's StaffStatus | cloneAndResetRunningStaffStatuses | {
"repo_name": "LiuJianan/Graduate-Graph",
"path": "src/java/com/chinamobile/bcbsp/workermanager/WorkerManager.java",
"license": "apache-2.0",
"size": 72981
} | [
"com.chinamobile.bcbsp.util.StaffStatus",
"java.util.ArrayList",
"java.util.List"
] | import com.chinamobile.bcbsp.util.StaffStatus; import java.util.ArrayList; import java.util.List; | import com.chinamobile.bcbsp.util.*; import java.util.*; | [
"com.chinamobile.bcbsp",
"java.util"
] | com.chinamobile.bcbsp; java.util; | 1,787,937 |
static private boolean isInlineLimitPlanNodePossible(AbstractPlanNode pn) {
if (pn instanceof OrderByPlanNode ||
pn.getPlanNodeType() == PlanNodeType.AGGREGATE) {
return true;
}
return false;
} | static boolean function(AbstractPlanNode pn) { if (pn instanceof OrderByPlanNode pn.getPlanNodeType() == PlanNodeType.AGGREGATE) { return true; } return false; } | /**
* Inline limit plan node can be applied with ORDER BY node and serial aggregation node
* @param pn
* @return
*/ | Inline limit plan node can be applied with ORDER BY node and serial aggregation node | isInlineLimitPlanNodePossible | {
"repo_name": "paulmartel/voltdb",
"path": "src/frontend/org/voltdb/planner/PlanAssembler.java",
"license": "agpl-3.0",
"size": 138072
} | [
"org.voltdb.plannodes.AbstractPlanNode",
"org.voltdb.plannodes.OrderByPlanNode",
"org.voltdb.types.PlanNodeType"
] | import org.voltdb.plannodes.AbstractPlanNode; import org.voltdb.plannodes.OrderByPlanNode; import org.voltdb.types.PlanNodeType; | import org.voltdb.plannodes.*; import org.voltdb.types.*; | [
"org.voltdb.plannodes",
"org.voltdb.types"
] | org.voltdb.plannodes; org.voltdb.types; | 1,204,770 |
@FilterWith(WhitelistFilter.class)
public Result showDomainWhitelist(Context context)
{
List<Domain> domainList = Domain.getAll();
return Results.html().render("domains", domainList);
} | @FilterWith(WhitelistFilter.class) Result function(Context context) { List<Domain> domainList = Domain.getAll(); return Results.html().render(STR, domainList); } | /**
* Shows a page that contains a list of all domains allowed for registration.
*
* @param context
* the context of this request
* @return overview of all white-listed domains
*/ | Shows a page that contains a list of all domains allowed for registration | showDomainWhitelist | {
"repo_name": "Xceptance/XCMailr",
"path": "xcmailr-webapp/src/main/java/controllers/AdminHandler.java",
"license": "apache-2.0",
"size": 23975
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 516,848 |
protected void sendKeyExchangeInit() throws IOException {
setLocalKexInit(createLocalKexInit());
sendMessage(getLocalKexInit(), this);
state.setValue(TransportProtocolState.PERFORMING_KEYEXCHANGE);
} | void function() throws IOException { setLocalKexInit(createLocalKexInit()); sendMessage(getLocalKexInit(), this); state.setValue(TransportProtocolState.PERFORMING_KEYEXCHANGE); } | /**
* DOCUMENT ME!
*
* @throws IOException DOCUMENT ME!
*/ | DOCUMENT ME | sendKeyExchangeInit | {
"repo_name": "swift-lang/swift-k",
"path": "cogkit/modules/provider-ssh/src/com/sshtools/j2ssh/transport/TransportProtocolCommon.java",
"license": "apache-2.0",
"size": 43025
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,709,878 |
interface Aws2KmsComponentBuilder extends ComponentBuilder<KMS2Component> {
default Aws2KmsComponentBuilder configuration(
org.apache.camel.component.aws2.kms.KMS2Configuration configuration) {
doSetProperty("configuration", configuration);
return this;
} | interface Aws2KmsComponentBuilder extends ComponentBuilder<KMS2Component> { default Aws2KmsComponentBuilder configuration( org.apache.camel.component.aws2.kms.KMS2Configuration configuration) { doSetProperty(STR, configuration); return this; } | /**
* Component configuration.
*
* The option is a:
* <code>org.apache.camel.component.aws2.kms.KMS2Configuration</code> type.
*
* Group: producer
*
* @param configuration the value to set
* @return the dsl builder
*/ | Component configuration. The option is a: <code>org.apache.camel.component.aws2.kms.KMS2Configuration</code> type. Group: producer | configuration | {
"repo_name": "gnodet/camel",
"path": "core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/Aws2KmsComponentBuilderFactory.java",
"license": "apache-2.0",
"size": 12610
} | [
"org.apache.camel.builder.component.ComponentBuilder",
"org.apache.camel.component.aws2.kms.KMS2Component"
] | import org.apache.camel.builder.component.ComponentBuilder; import org.apache.camel.component.aws2.kms.KMS2Component; | import org.apache.camel.builder.component.*; import org.apache.camel.component.aws2.kms.*; | [
"org.apache.camel"
] | org.apache.camel; | 1,995,507 |
private DefaultActionGroup getActionGroupForCommand(CommandImpl command) {
String goalId = command.getGoal();
if (isNullOrEmpty(goalId)) {
goalId = goalRegistry.getDefaultGoal().getId();
}
DefaultActionGroup commandGoalPopUpGroup = goalPopUpGroups.get(goalId);
if (commandGoalPopUpGroup == null) {
commandGoalPopUpGroup = goalPopUpGroupFactory.create(goalId);
actionManager.registerAction(GOAL_ACTION_GROUP_ID_PREFIX + goalId, commandGoalPopUpGroup);
goalPopUpGroups.put(goalId, commandGoalPopUpGroup);
commandsActionGroup.add(commandGoalPopUpGroup);
}
return commandGoalPopUpGroup;
} | DefaultActionGroup function(CommandImpl command) { String goalId = command.getGoal(); if (isNullOrEmpty(goalId)) { goalId = goalRegistry.getDefaultGoal().getId(); } DefaultActionGroup commandGoalPopUpGroup = goalPopUpGroups.get(goalId); if (commandGoalPopUpGroup == null) { commandGoalPopUpGroup = goalPopUpGroupFactory.create(goalId); actionManager.registerAction(GOAL_ACTION_GROUP_ID_PREFIX + goalId, commandGoalPopUpGroup); goalPopUpGroups.put(goalId, commandGoalPopUpGroup); commandsActionGroup.add(commandGoalPopUpGroup); } return commandGoalPopUpGroup; } | /**
* Returns the action group which is appropriate for placing the action for executing the given command.
* If appropriate action group doesn't exist it will be created and added to the right place.
*/ | Returns the action group which is appropriate for placing the action for executing the given command. If appropriate action group doesn't exist it will be created and added to the right place | getActionGroupForCommand | {
"repo_name": "snjeza/che",
"path": "ide/che-core-ide-app/src/main/java/org/eclipse/che/ide/command/execute/ExecuteCommandActionManager.java",
"license": "epl-1.0",
"size": 7631
} | [
"org.eclipse.che.ide.api.action.DefaultActionGroup",
"org.eclipse.che.ide.api.command.CommandImpl"
] | import org.eclipse.che.ide.api.action.DefaultActionGroup; import org.eclipse.che.ide.api.command.CommandImpl; | import org.eclipse.che.ide.api.action.*; import org.eclipse.che.ide.api.command.*; | [
"org.eclipse.che"
] | org.eclipse.che; | 926,988 |
@Test
public void testCheckHandlerMethodAccess() throws Exception {
ViewPostMetadata viewPostMetadata = new ViewPostMetadata();
model.setViewPostMetadata(viewPostMetadata);
assertMethodAccess("Accessible annotation not picked up", "method1", true);
assertMethodAccess("Custom method should be allowed due to not being in the available methods", "method2", true);
viewPostMetadata.addAvailableMethodToCall( "method2" );
assertMethodAccess("Accessible annotation picked up where not present", "method2", false);
viewPostMetadata.addAccessibleMethodToCall("method4");
viewPostMetadata.addAccessibleMethodToCall("method6");
assertMethodAccess("Accessible method by view not picked up", "method4", true);
assertMethodAccess("Accessible method by view not picked up", "method6", true);
assertMethodAccess("Method not accessible for empty method to call", null, true);
}
| void function() throws Exception { ViewPostMetadata viewPostMetadata = new ViewPostMetadata(); model.setViewPostMetadata(viewPostMetadata); assertMethodAccess(STR, STR, true); assertMethodAccess(STR, STR, true); viewPostMetadata.addAvailableMethodToCall( STR ); assertMethodAccess(STR, STR, false); viewPostMetadata.addAccessibleMethodToCall(STR); viewPostMetadata.addAccessibleMethodToCall(STR); assertMethodAccess(STR, STR, true); assertMethodAccess(STR, STR, true); assertMethodAccess(STR, null, true); } | /**
* Tests method access is being granted where annotations are present and the method is within
* the view configuration.
*/ | Tests method access is being granted where annotations are present and the method is within the view configuration | testCheckHandlerMethodAccess | {
"repo_name": "ricepanda/rice-git3",
"path": "rice-framework/krad-web-framework/src/test/java/org/kuali/rice/krad/web/controller/UifControllerHandlerInterceptorTest.java",
"license": "apache-2.0",
"size": 5206
} | [
"org.kuali.rice.krad.uif.lifecycle.ViewPostMetadata"
] | import org.kuali.rice.krad.uif.lifecycle.ViewPostMetadata; | import org.kuali.rice.krad.uif.lifecycle.*; | [
"org.kuali.rice"
] | org.kuali.rice; | 1,327,290 |
public final class RtIssuesTest {
@Test
public void createIssue() throws Exception {
final String title = "Found a bug";
final String body = issue(title).toString();
final MkContainer container = new MkGrizzlyContainer().next(
new MkAnswer.Simple(HttpURLConnection.HTTP_CREATED, body)
).next(new MkAnswer.Simple(HttpURLConnection.HTTP_OK, body)).start();
final RtIssues issues = new RtIssues(
new JdkRequest(container.home()),
repo()
);
final Issue issue = issues.create(title, "having a problem with it.");
MatcherAssert.assertThat(
container.take().method(),
Matchers.equalTo(Request.POST)
);
MatcherAssert.assertThat(
new Issue.Smart(issue).title(),
Matchers.equalTo(title)
);
container.stop();
} | final class RtIssuesTest { public void function() throws Exception { final String title = STR; final String body = issue(title).toString(); final MkContainer container = new MkGrizzlyContainer().next( new MkAnswer.Simple(HttpURLConnection.HTTP_CREATED, body) ).next(new MkAnswer.Simple(HttpURLConnection.HTTP_OK, body)).start(); final RtIssues issues = new RtIssues( new JdkRequest(container.home()), repo() ); final Issue issue = issues.create(title, STR); MatcherAssert.assertThat( container.take().method(), Matchers.equalTo(Request.POST) ); MatcherAssert.assertThat( new Issue.Smart(issue).title(), Matchers.equalTo(title) ); container.stop(); } | /**
* RtIssues can create an issue.
*
* @throws Exception if some problem inside
*/ | RtIssues can create an issue | createIssue | {
"repo_name": "cvrebert/typed-github",
"path": "src/test/java/com/jcabi/github/RtIssuesTest.java",
"license": "bsd-3-clause",
"size": 6600
} | [
"com.jcabi.http.Request",
"com.jcabi.http.mock.MkAnswer",
"com.jcabi.http.mock.MkContainer",
"com.jcabi.http.mock.MkGrizzlyContainer",
"com.jcabi.http.request.JdkRequest",
"java.net.HttpURLConnection",
"org.hamcrest.MatcherAssert",
"org.hamcrest.Matchers"
] | import com.jcabi.http.Request; import com.jcabi.http.mock.MkAnswer; import com.jcabi.http.mock.MkContainer; import com.jcabi.http.mock.MkGrizzlyContainer; import com.jcabi.http.request.JdkRequest; import java.net.HttpURLConnection; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; | import com.jcabi.http.*; import com.jcabi.http.mock.*; import com.jcabi.http.request.*; import java.net.*; import org.hamcrest.*; | [
"com.jcabi.http",
"java.net",
"org.hamcrest"
] | com.jcabi.http; java.net; org.hamcrest; | 277,033 |
// -------------------------------------------------------------------------
public int countOrphansWithConstraint(Connection con, String table1,
String col1, String table2, String col2, String constraint1) {
if (con == null) {
logger.severe("countOrphans: Database connection is null");
}
int resultLeft;
String sql = " FROM " + table1 + " LEFT JOIN " + table2 + " ON "
+ table1 + "." + col1 + " = " + table2 + "." + col2 + " WHERE "
+ table2 + "." + col2 + " iS NULL";
sql = sql + " AND " + table1 + "." + constraint1;
resultLeft = DBUtils.getRowCount(con, "SELECT COUNT(*)" + sql);
if (resultLeft > 0) {
String[] values = DBUtils.getColumnValues(con, "SELECT " + table1
+ "." + col1 + sql + " LIMIT 20");
for (int i = 0; i < values.length; i++) {
ReportManager.info(this, con, table1 + "." + col1 + " "
+ values[i] + " is not linked.");
}
}
logger.finest("Left: " + resultLeft);
return resultLeft;
} // countOrphans | int function(Connection con, String table1, String col1, String table2, String col2, String constraint1) { if (con == null) { logger.severe(STR); } int resultLeft; String sql = STR + table1 + STR + table2 + STR + table1 + "." + col1 + STR + table2 + "." + col2 + STR + table2 + "." + col2 + STR; sql = sql + STR + table1 + "." + constraint1; resultLeft = DBUtils.getRowCount(con, STR + sql); if (resultLeft > 0) { String[] values = DBUtils.getColumnValues(con, STR + table1 + "." + col1 + sql + STR); for (int i = 0; i < values.length; i++) { ReportManager.info(this, con, table1 + "." + col1 + " " + values[i] + STR); } } logger.finest(STR + resultLeft); return resultLeft; } | /**
* Verify foreign-key relations.
*
* @param con
* A connection to the database to be tested. Should already be
* open.
* @param table1
* With col1, specifies the first key to check.
* @param col1
* Column in table1 to check.
* @param table2
* With col2, specifies the second key to check.
* @param col2
* Column in table2 to check.
* @param constraint1
* additional constraint on a column in table1
* @return The number of "orphans"
*/ | Verify foreign-key relations | countOrphansWithConstraint | {
"repo_name": "Ensembl/ensj-healthcheck",
"path": "src/org/ensembl/healthcheck/testcase/EnsTestCase.java",
"license": "apache-2.0",
"size": 70295
} | [
"java.sql.Connection",
"org.ensembl.healthcheck.ReportManager",
"org.ensembl.healthcheck.util.DBUtils"
] | import java.sql.Connection; import org.ensembl.healthcheck.ReportManager; import org.ensembl.healthcheck.util.DBUtils; | import java.sql.*; import org.ensembl.healthcheck.*; import org.ensembl.healthcheck.util.*; | [
"java.sql",
"org.ensembl.healthcheck"
] | java.sql; org.ensembl.healthcheck; | 2,185,292 |
context.put(barkKey, new Context.Factory<Bark>() { | context.put(barkKey, new Context.Factory<Bark>() { | /**
* Preregisters factories to create and use a Bark object for use as
* both a Log and a Bark.
*/ | Preregisters factories to create and use a Bark object for use as both a Log and a Bark | preRegister | {
"repo_name": "TheTypoMaster/Scaper",
"path": "openjdk/langtools/src/share/classes/com/sun/tools/apt/util/Bark.java",
"license": "gpl-2.0",
"size": 6437
} | [
"com.sun.tools.javac.util.Context"
] | import com.sun.tools.javac.util.Context; | import com.sun.tools.javac.util.*; | [
"com.sun.tools"
] | com.sun.tools; | 1,018,247 |
public void forEach(long parallelismThreshold,
BiConsumer<? super K,? super V> action) {
if (action == null) throw new NullPointerException();
new ForEachMappingTask<K,V>
(null, batchFor(parallelismThreshold), 0, 0, table,
action).invoke();
} | void function(long parallelismThreshold, BiConsumer<? super K,? super V> action) { if (action == null) throw new NullPointerException(); new ForEachMappingTask<K,V> (null, batchFor(parallelismThreshold), 0, 0, table, action).invoke(); } | /**
* Performs the given action for each (key, value).
*
* @param parallelismThreshold the (estimated) number of elements
* needed for this operation to be executed in parallel
* @param action the action
* @since 1.8
*/ | Performs the given action for each (key, value) | forEach | {
"repo_name": "flyzsd/java-code-snippets",
"path": "ibm.jdk8/src/java/util/concurrent/ConcurrentHashMap.java",
"license": "mit",
"size": 263097
} | [
"java.util.function.BiConsumer"
] | import java.util.function.BiConsumer; | import java.util.function.*; | [
"java.util"
] | java.util; | 978,339 |
public Class<? extends MasterObserver>[] getMasterObserverClasses() {
return MASTER_OBSERVER_CLASSES.getArray(this);
} | Class<? extends MasterObserver>[] function() { return MASTER_OBSERVER_CLASSES.getArray(this); } | /**
* Get array of MasterObserver classes set in the configuration.
*
* @return array of MasterObserver classes.
*/ | Get array of MasterObserver classes set in the configuration | getMasterObserverClasses | {
"repo_name": "renato2099/giraph-gora",
"path": "giraph-core/src/main/java/org/apache/giraph/conf/GiraphConfiguration.java",
"license": "apache-2.0",
"size": 32121
} | [
"org.apache.giraph.master.MasterObserver"
] | import org.apache.giraph.master.MasterObserver; | import org.apache.giraph.master.*; | [
"org.apache.giraph"
] | org.apache.giraph; | 1,663,760 |
private long dispatchBlockMoves() throws InterruptedException {
final long bytesLastMoved = getBytesMoved();
final Future<?>[] futures = new Future<?>[sources.size()]; | long function() throws InterruptedException { final long bytesLastMoved = getBytesMoved(); final Future<?>[] futures = new Future<?>[sources.size()]; | /**
* Dispatch block moves for each source. The thread selects blocks to move &
* sends request to proxy source to initiate block move. The process is flow
* controlled. Block selection is blocked if there are too many un-confirmed
* block moves.
*
* @return the total number of bytes successfully moved in this iteration.
*/ | Dispatch block moves for each source. The thread selects blocks to move & sends request to proxy source to initiate block move. The process is flow controlled. Block selection is blocked if there are too many un-confirmed block moves | dispatchBlockMoves | {
"repo_name": "cnfire/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Dispatcher.java",
"license": "apache-2.0",
"size": 40718
} | [
"java.util.concurrent.Future"
] | import java.util.concurrent.Future; | import java.util.concurrent.*; | [
"java.util"
] | java.util; | 614,669 |
public static void sendParts(OutputStream out, Part[] parts, byte[] partBoundary)
throws IOException {
if (parts == null) {
throw new IllegalArgumentException("Parts may not be null");
}
if (partBoundary == null || partBoundary.length == 0) {
throw new IllegalArgumentException("partBoundary may not be empty");
}
for (int i = 0; i < parts.length; i++) {
// set the part boundary before the part is sent
parts[i].setPartBoundary(partBoundary);
parts[i].send(out);
}
out.write(EXTRA_BYTES);
out.write(partBoundary);
out.write(EXTRA_BYTES);
out.write(CRLF_BYTES);
} | static void function(OutputStream out, Part[] parts, byte[] partBoundary) throws IOException { if (parts == null) { throw new IllegalArgumentException(STR); } if (partBoundary == null partBoundary.length == 0) { throw new IllegalArgumentException(STR); } for (int i = 0; i < parts.length; i++) { parts[i].setPartBoundary(partBoundary); parts[i].send(out); } out.write(EXTRA_BYTES); out.write(partBoundary); out.write(EXTRA_BYTES); out.write(CRLF_BYTES); } | /**
* Write all parts and the last boundary to the specified output stream.
*
* @param out The stream to write to.
* @param parts The parts to write.
* @param partBoundary The ASCII bytes to use as the part boundary.
*
* @throws IOException If an I/O error occurs while writing the parts.
*
* @since 3.0
*/ | Write all parts and the last boundary to the specified output stream | sendParts | {
"repo_name": "magneticmoon/httpclient3-ntml",
"path": "src/java/org/apache/commons/httpclient/methods/multipart/Part.java",
"license": "apache-2.0",
"size": 14715
} | [
"java.io.IOException",
"java.io.OutputStream"
] | import java.io.IOException; import java.io.OutputStream; | import java.io.*; | [
"java.io"
] | java.io; | 420,241 |
private void createMain(ClassType clazztest, Set<ClassMemberSignature> sigs) {
MethodGen mainGen;
mainGen = new MethodGen(Constants.ACC_PUBLIC | Constants.ACC_STATIC, // private and static
org.apache.bcel.generic.Type.VOID, // return type
new org.apache.bcel.generic.Type[] // parameters
{ new org.apache.bcel.generic.ArrayType("java.lang.String", 1) }, null, // parameters names: we do not care
"main", // method's name
this.getClassName(), // defining class
this.getMainCode(clazztest, sigs), this.getConstantPool()); // constant pool
// we must always call these methods before the getMethod()
// method below. They set the number of local variables and stack
// elements used by the code of the method
mainGen.setMaxStack();
mainGen.setMaxLocals();
// we add a method to the class that we are generating
this.addMethod(mainGen.getMethod());
} | void function(ClassType clazztest, Set<ClassMemberSignature> sigs) { MethodGen mainGen; mainGen = new MethodGen(Constants.ACC_PUBLIC Constants.ACC_STATIC, org.apache.bcel.generic.Type.VOID, new org.apache.bcel.generic.Type[] { new org.apache.bcel.generic.ArrayType(STR, 1) }, null, "main", this.getClassName(), this.getMainCode(clazztest, sigs), this.getConstantPool()); mainGen.setMaxStack(); mainGen.setMaxLocals(); this.addMethod(mainGen.getMethod()); } | /**
* Create the Main method of the class
* @param clazztest
* the class type being tested
* @param sigs
* a set of class member signatures
*/ | Create the Main method of the class | createMain | {
"repo_name": "Kaos1337/ProjectKitten",
"path": "src/javaBytecodeGenerator/TestClassGenerator.java",
"license": "gpl-2.0",
"size": 10086
} | [
"java.util.Set",
"org.apache.bcel.Constants",
"org.apache.bcel.generic.MethodGen",
"org.apache.bcel.generic.Type"
] | import java.util.Set; import org.apache.bcel.Constants; import org.apache.bcel.generic.MethodGen; import org.apache.bcel.generic.Type; | import java.util.*; import org.apache.bcel.*; import org.apache.bcel.generic.*; | [
"java.util",
"org.apache.bcel"
] | java.util; org.apache.bcel; | 985,685 |
@Override
public ArrayList<PlaceArmiesMove> getPlaceArmiesMoves(BotState state, Long timeOut)
{
ArrayList<PlaceArmiesMove> placeArmiesMoves = new ArrayList<PlaceArmiesMove>();
String myName = state.getMyPlayerName();
int armies = 2;
int armiesLeft = state.getStartingArmies();
LinkedList<RegionData> visibleRegions = state.getMap().getRegions();
while(armiesLeft > 0)
{
double rand = Math.random();
int r = (int) (rand*visibleRegions.size());
RegionData region = visibleRegions.get(r);
if(region.ownedByPlayer(myName))
{
placeArmiesMoves.add(new PlaceArmiesMove(myName, region, Math.min(armiesLeft, armies)));
armiesLeft -= armies;
}
}
return placeArmiesMoves;
} | ArrayList<PlaceArmiesMove> function(BotState state, Long timeOut) { ArrayList<PlaceArmiesMove> placeArmiesMoves = new ArrayList<PlaceArmiesMove>(); String myName = state.getMyPlayerName(); int armies = 2; int armiesLeft = state.getStartingArmies(); LinkedList<RegionData> visibleRegions = state.getMap().getRegions(); while(armiesLeft > 0) { double rand = Math.random(); int r = (int) (rand*visibleRegions.size()); RegionData region = visibleRegions.get(r); if(region.ownedByPlayer(myName)) { placeArmiesMoves.add(new PlaceArmiesMove(myName, region, Math.min(armiesLeft, armies))); armiesLeft -= armies; } } return placeArmiesMoves; } | /**
* This method is called for at first part of each round. This example puts two armies on random regions
* until he has no more armies left to place.
* @return The list of PlaceArmiesMoves for one round
*/ | This method is called for at first part of each round. This example puts two armies on random regions until he has no more armies left to place | getPlaceArmiesMoves | {
"repo_name": "kefik/conquest-engine-gui",
"path": "Conquest/src/conquest/bot/BotStarter.java",
"license": "apache-2.0",
"size": 4717
} | [
"java.util.ArrayList",
"java.util.LinkedList"
] | import java.util.ArrayList; import java.util.LinkedList; | import java.util.*; | [
"java.util"
] | java.util; | 599,918 |
public long getAvailableSize() {
return BioUtils.getAvailableSize(this);
} | long function() { return BioUtils.getAvailableSize(this); } | /**
* Returns the size effectively available. This returns the same value as
* {@link #getSize()} if no range is defined, otherwise it returns the size
* of the range using {@link Range#getSize()}.
*
* @return The available size.
*/ | Returns the size effectively available. This returns the same value as <code>#getSize()</code> if no range is defined, otherwise it returns the size of the range using <code>Range#getSize()</code> | getAvailableSize | {
"repo_name": "theanuradha/debrief",
"path": "org.mwc.asset.comms/docs/restlet_src/org.restlet/org/restlet/representation/Representation.java",
"license": "epl-1.0",
"size": 23610
} | [
"org.restlet.engine.io.BioUtils"
] | import org.restlet.engine.io.BioUtils; | import org.restlet.engine.io.*; | [
"org.restlet.engine"
] | org.restlet.engine; | 1,911,696 |
@Nonnull
public double[] readDoubleArray() {
return ArrayUtils.toPrimitive(this.readArray(PresetBuffer::readDouble));
} | double[] function() { return ArrayUtils.toPrimitive(this.readArray(PresetBuffer::readDouble)); } | /**
* Reads an array of fixed-point double values from this buffer.
*/ | Reads an array of fixed-point double values from this buffer | readDoubleArray | {
"repo_name": "dotStart/Pandemonium",
"path": "preset/src/main/java/tv/dotstart/pandemonium/preset/PresetBuffer.java",
"license": "apache-2.0",
"size": 18094
} | [
"org.apache.commons.lang3.ArrayUtils"
] | import org.apache.commons.lang3.ArrayUtils; | import org.apache.commons.lang3.*; | [
"org.apache.commons"
] | org.apache.commons; | 2,276,019 |
public static List<String> bruteForceTokenise(CharSequence text, String sessionId) {
List<String> tokens = new ArrayList<>();
Pattern separatorPattern = Tokeniser.getTokenSeparators(sessionId);
Matcher matcher = separatorPattern.matcher(text);
TIntSet separatorMatches = new TIntHashSet();
while (matcher.find())
separatorMatches.add(matcher.start());
int currentPos = 0;
for (int i = 0; i < text.length(); i++) {
if (separatorMatches.contains(i)) {
if (i > currentPos)
tokens.add(text.subSequence(currentPos, i).toString());
tokens.add(text.subSequence(i, i + 1).toString());
currentPos = i + 1;
}
}
if (currentPos < text.length())
tokens.add(text.subSequence(currentPos, text.length()).toString());
return tokens;
} | static List<String> function(CharSequence text, String sessionId) { List<String> tokens = new ArrayList<>(); Pattern separatorPattern = Tokeniser.getTokenSeparators(sessionId); Matcher matcher = separatorPattern.matcher(text); TIntSet separatorMatches = new TIntHashSet(); while (matcher.find()) separatorMatches.add(matcher.start()); int currentPos = 0; for (int i = 0; i < text.length(); i++) { if (separatorMatches.contains(i)) { if (i > currentPos) tokens.add(text.subSequence(currentPos, i).toString()); tokens.add(text.subSequence(i, i + 1).toString()); currentPos = i + 1; } } if (currentPos < text.length()) tokens.add(text.subSequence(currentPos, text.length()).toString()); return tokens; } | /**
* For a given text, returns a list of strings which are guaranteed not to
* overlap any token boundaries, except on the unlikely occurrence when a
* token placeholder cut a token in the middle of two non-separators.
*/ | For a given text, returns a list of strings which are guaranteed not to overlap any token boundaries, except on the unlikely occurrence when a token placeholder cut a token in the middle of two non-separators | bruteForceTokenise | {
"repo_name": "urieli/talismane",
"path": "talismane_core/src/main/java/com/joliciel/talismane/tokeniser/Tokeniser.java",
"license": "agpl-3.0",
"size": 11684
} | [
"gnu.trove.set.TIntSet",
"gnu.trove.set.hash.TIntHashSet",
"java.util.ArrayList",
"java.util.List",
"java.util.regex.Matcher",
"java.util.regex.Pattern"
] | import gnu.trove.set.TIntSet; import gnu.trove.set.hash.TIntHashSet; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; | import gnu.trove.set.*; import gnu.trove.set.hash.*; import java.util.*; import java.util.regex.*; | [
"gnu.trove.set",
"java.util"
] | gnu.trove.set; java.util; | 2,829,264 |
final RtGithub github = new RtGithub(new FakeRequest());
MatcherAssert.assertThat(
github.repos(),
Matchers.notNullValue()
);
} | final RtGithub github = new RtGithub(new FakeRequest()); MatcherAssert.assertThat( github.repos(), Matchers.notNullValue() ); } | /**
* RtGithub can retrieve its repos.
*
* @throws Exception if a problem occurs.
*/ | RtGithub can retrieve its repos | retrievesRepos | {
"repo_name": "shelan/jcabi-github",
"path": "src/test/java/com/jcabi/github/RtGithubTest.java",
"license": "bsd-3-clause",
"size": 6029
} | [
"com.jcabi.http.request.FakeRequest",
"org.hamcrest.MatcherAssert",
"org.hamcrest.Matchers"
] | import com.jcabi.http.request.FakeRequest; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; | import com.jcabi.http.request.*; import org.hamcrest.*; | [
"com.jcabi.http",
"org.hamcrest"
] | com.jcabi.http; org.hamcrest; | 1,573,202 |
@FIXVersion(introduced="5.0")
@TagNumRef(tagNum=TagNum.PriceProtectionScope)
public void setPriceProtectionScope(PriceProtectionScope priceProtectionScope) {
this.priceProtectionScope = priceProtectionScope;
} | @FIXVersion(introduced="5.0") @TagNumRef(tagNum=TagNum.PriceProtectionScope) void function(PriceProtectionScope priceProtectionScope) { this.priceProtectionScope = priceProtectionScope; } | /**
* Message field setter.
* @param priceProtectionScope field value
*/ | Message field setter | setPriceProtectionScope | {
"repo_name": "marvisan/HadesFIX",
"path": "Model/src/main/java/net/hades/fix/message/OrderModificationRequestMsg.java",
"license": "gpl-3.0",
"size": 149491
} | [
"net.hades.fix.message.anno.FIXVersion",
"net.hades.fix.message.anno.TagNumRef",
"net.hades.fix.message.type.PriceProtectionScope",
"net.hades.fix.message.type.TagNum"
] | import net.hades.fix.message.anno.FIXVersion; import net.hades.fix.message.anno.TagNumRef; import net.hades.fix.message.type.PriceProtectionScope; import net.hades.fix.message.type.TagNum; | import net.hades.fix.message.anno.*; import net.hades.fix.message.type.*; | [
"net.hades.fix"
] | net.hades.fix; | 1,390,208 |
@Override
public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) {
if (childrenFeatures == null) {
super.getChildrenFeatures(object);
childrenFeatures.add(TransformrPackage.Literals.GRAPH__ELEMENTS);
}
return childrenFeatures;
} | Collection<? extends EStructuralFeature> function(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(TransformrPackage.Literals.GRAPH__ELEMENTS); } return childrenFeatures; } | /**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
* {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This specifies how to implement <code>#getChildren</code> and is used to deduce an appropriate feature for an <code>org.eclipse.emf.edit.command.AddCommand</code>, <code>org.eclipse.emf.edit.command.RemoveCommand</code> or <code>org.eclipse.emf.edit.command.MoveCommand</code> in <code>#createCommand</code>. | getChildrenFeatures | {
"repo_name": "ckrause/transformr",
"path": "plugins/org.transformr.edit/src/org/transformr/provider/GraphItemProvider.java",
"license": "epl-1.0",
"size": 7632
} | [
"java.util.Collection",
"org.eclipse.emf.ecore.EStructuralFeature",
"org.transformr.model.TransformrPackage"
] | import java.util.Collection; import org.eclipse.emf.ecore.EStructuralFeature; import org.transformr.model.TransformrPackage; | import java.util.*; import org.eclipse.emf.ecore.*; import org.transformr.model.*; | [
"java.util",
"org.eclipse.emf",
"org.transformr.model"
] | java.util; org.eclipse.emf; org.transformr.model; | 1,081,727 |
public void update(Observable arg0, Object arg1)
{
if (arg1 instanceof Event)
{
Event event = (Event) arg1;
if (event.getModify())
{
String refstr = event.getResource();
if (this.m_updatableResources.contains(refstr))
{
log.debug("Updating configuration from " + refstr);
updateConfig(refstr);
}
}
}
} | void function(Observable arg0, Object arg1) { if (arg1 instanceof Event) { Event event = (Event) arg1; if (event.getModify()) { String refstr = event.getResource(); if (this.m_updatableResources.contains(refstr)) { log.debug(STR + refstr); updateConfig(refstr); } } } } | /**
* Called when an observed object chnages (@see java.util.Observer#update)
* @param arg0 - The observed object
* @param arg1 - Event argument
*/ | Called when an observed object chnages (@see java.util.Observer#update) | update | {
"repo_name": "OpenCollabZA/sakai",
"path": "citations/citations-impl/impl/src/java/org/sakaiproject/citation/impl/BaseConfigurationService.java",
"license": "apache-2.0",
"size": 47349
} | [
"java.util.Observable",
"org.sakaiproject.event.api.Event"
] | import java.util.Observable; import org.sakaiproject.event.api.Event; | import java.util.*; import org.sakaiproject.event.api.*; | [
"java.util",
"org.sakaiproject.event"
] | java.util; org.sakaiproject.event; | 2,676,950 |
int substitute(InputStream in, OutputStream out, SubstitutionType type, String encoding)
throws Exception; | int substitute(InputStream in, OutputStream out, SubstitutionType type, String encoding) throws Exception; | /**
* Substitutes the variables found in the specified input stream. Escapes special characters
* using file type specific escaping if necessary.
*
* @param in the input stream to read
* @param out the output stream to write
* @param type the file type or null for plain
* @param encoding the character encoding or null for default
* @return the number of substitutions made
* @throws IllegalArgumentException if unknown file type specified
* @throws UnsupportedEncodingException if encoding not supported
* @throws IOException if an I/O error occurs
* @throws
*/ | Substitutes the variables found in the specified input stream. Escapes special characters using file type specific escaping if necessary | substitute | {
"repo_name": "mtjandra/izpack",
"path": "izpack-api/src/main/java/com/izforge/izpack/api/substitutor/VariableSubstitutor.java",
"license": "apache-2.0",
"size": 4712
} | [
"java.io.InputStream",
"java.io.OutputStream"
] | import java.io.InputStream; import java.io.OutputStream; | import java.io.*; | [
"java.io"
] | java.io; | 2,831,124 |
private Node buildTop(List<T> leaves) {
checkNotNull(leaves);
List<Node> hashNodes = new ArrayList<>();
for (int i = 0; i < leaves.size(); i += dimensions) {
int idx = i + dimensions > leaves.size()
? leaves.size()
: i + dimensions;
List<T> curLeaves = leaves.subList(i, idx);
hashNodes.add(curLeaves.size() == 1 ? curLeaves.get(0) : new HashNode(new ArrayList<Node>(curLeaves)));
}
List<Node> finalTree = build(hashNodes);
checkState(finalTree.size() > 0, "Final tree cannot have 0 root nodes.");
return finalTree.get(0);
} | Node function(List<T> leaves) { checkNotNull(leaves); List<Node> hashNodes = new ArrayList<>(); for (int i = 0; i < leaves.size(); i += dimensions) { int idx = i + dimensions > leaves.size() ? leaves.size() : i + dimensions; List<T> curLeaves = leaves.subList(i, idx); hashNodes.add(curLeaves.size() == 1 ? curLeaves.get(0) : new HashNode(new ArrayList<Node>(curLeaves))); } List<Node> finalTree = build(hashNodes); checkState(finalTree.size() > 0, STR); return finalTree.get(0); } | /**
* The merkle tree is constructed from the bottom up.
*
* @param leaves
* @return
*/ | The merkle tree is constructed from the bottom up | buildTop | {
"repo_name": "calrissian/accumulo-recipes",
"path": "store/changelog-store/src/main/java/org/calrissian/accumlorecipes/changelog/domain/MerkleTree.java",
"license": "apache-2.0",
"size": 7496
} | [
"com.google.common.base.Preconditions",
"java.util.ArrayList",
"java.util.List"
] | import com.google.common.base.Preconditions; import java.util.ArrayList; import java.util.List; | import com.google.common.base.*; import java.util.*; | [
"com.google.common",
"java.util"
] | com.google.common; java.util; | 1,682,498 |
protected boolean loadTemplateFromString(String template, String password)
throws InvalidLoginException {
JSONObject json;
try {
json = new JSONObject(template);
username = json.getString("username");
this.password = json.getString("password");
if (!this.password.equals(password)) {
throw new InvalidLoginException();
}
counter = json.getInt("counter");
templateHoldCount = json.getInt("holdCount");
JSONArray samples = json.getJSONArray("samples");
for (int i = 0; i < json.getInt("samplesCount"); i++) {
List<LoggedKey> rowValues = new ArrayList<LoggedKey>();
JSONObject sample = samples.getJSONObject(i);
JSONArray flyingTimes = sample.getJSONArray("flyingTimes");
JSONArray errors = sample.getJSONArray("errors");
JSONArray substitutions = sample.getJSONArray("substitutions");
JSONArray orientations = sample.getJSONArray("orientations");
JSONArray pressures = sample.getJSONArray("pressures");
JSONArray xAxises = sample.getJSONArray("xAxises");
JSONArray yAxises = sample.getJSONArray("yAxises");
JSONArray zAxises = sample.getJSONArray("zAxises");
JSONArray accXs = sample.getJSONArray("accXs");
JSONArray accYs = sample.getJSONArray("accYs");
JSONArray accZs = sample.getJSONArray("accZs");
for (int j = 0; j < flyingTimes.length(); j++) {
rowValues.add(new LoggedKey(flyingTimes.getLong(j),
substitutions.getBoolean(j), errors.getBoolean(j),
orientations.getInt(j), pressures.getDouble(j),
xAxises.getDouble(j), yAxises.getDouble(j), zAxises
.getDouble(j), accXs.getDouble(j), accYs
.getDouble(j), accZs.getDouble(j)));
}
loggedKeys.add(rowValues);
}
} catch (JSONException e) {
Log.e(UserLoggerManager.LOGNAME, e.getMessage(), e);
return false;
}
return true;
}
| boolean function(String template, String password) throws InvalidLoginException { JSONObject json; try { json = new JSONObject(template); username = json.getString(STR); this.password = json.getString(STR); if (!this.password.equals(password)) { throw new InvalidLoginException(); } counter = json.getInt(STR); templateHoldCount = json.getInt(STR); JSONArray samples = json.getJSONArray(STR); for (int i = 0; i < json.getInt(STR); i++) { List<LoggedKey> rowValues = new ArrayList<LoggedKey>(); JSONObject sample = samples.getJSONObject(i); JSONArray flyingTimes = sample.getJSONArray(STR); JSONArray errors = sample.getJSONArray(STR); JSONArray substitutions = sample.getJSONArray(STR); JSONArray orientations = sample.getJSONArray(STR); JSONArray pressures = sample.getJSONArray(STR); JSONArray xAxises = sample.getJSONArray(STR); JSONArray yAxises = sample.getJSONArray(STR); JSONArray zAxises = sample.getJSONArray(STR); JSONArray accXs = sample.getJSONArray("accXs"); JSONArray accYs = sample.getJSONArray("accYs"); JSONArray accZs = sample.getJSONArray("accZs"); for (int j = 0; j < flyingTimes.length(); j++) { rowValues.add(new LoggedKey(flyingTimes.getLong(j), substitutions.getBoolean(j), errors.getBoolean(j), orientations.getInt(j), pressures.getDouble(j), xAxises.getDouble(j), yAxises.getDouble(j), zAxises .getDouble(j), accXs.getDouble(j), accYs .getDouble(j), accZs.getDouble(j))); } loggedKeys.add(rowValues); } } catch (JSONException e) { Log.e(UserLoggerManager.LOGNAME, e.getMessage(), e); return false; } return true; } | /**
* Load user's template from JSON data.
*
* @param template
* template JSON string
* @param password
* encrypted password with sha256
* @return true on success, false when data are wrong
* @throws InvalidLoginException
* raises when password mismatch from template password
*/ | Load user's template from JSON data | loadTemplateFromString | {
"repo_name": "Morzeux/BiosecLogger",
"path": "src/biosecLogger/core/UserModel.java",
"license": "apache-2.0",
"size": 7002
} | [
"android.util.Log",
"java.util.ArrayList",
"java.util.List",
"org.json.JSONArray",
"org.json.JSONException",
"org.json.JSONObject"
] | import android.util.Log; import java.util.ArrayList; import java.util.List; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; | import android.util.*; import java.util.*; import org.json.*; | [
"android.util",
"java.util",
"org.json"
] | android.util; java.util; org.json; | 102,967 |
private boolean upgrade(final ChannelHandlerContext ctx, final FullHttpRequest request) {
// Select the best protocol based on those requested in the UPGRADE header.
final List<CharSequence> requestedProtocols = splitHeader(request.headers().get(HttpHeaderNames.UPGRADE));
final int numRequestedProtocols = requestedProtocols.size();
UpgradeCodec upgradeCodec = null;
CharSequence upgradeProtocol = null;
for (int i = 0; i < numRequestedProtocols; i ++) {
final CharSequence p = requestedProtocols.get(i);
final UpgradeCodec c = upgradeCodecFactory.newUpgradeCodec(p);
if (c != null) {
upgradeProtocol = p;
upgradeCodec = c;
break;
}
}
if (upgradeCodec == null) {
// None of the requested protocols are supported, don't upgrade.
return false;
}
// Make sure the CONNECTION header is present.
List<String> connectionHeaderValues = request.headers().getAll(HttpHeaderNames.CONNECTION);
if (connectionHeaderValues == null) {
return false;
}
final StringBuilder concatenatedConnectionValue = new StringBuilder(connectionHeaderValues.size() * 10);
for (CharSequence connectionHeaderValue : connectionHeaderValues) {
concatenatedConnectionValue.append(connectionHeaderValue).append(COMMA);
}
concatenatedConnectionValue.setLength(concatenatedConnectionValue.length() - 1);
// Make sure the CONNECTION header contains UPGRADE as well as all protocol-specific headers.
Collection<CharSequence> requiredHeaders = upgradeCodec.requiredUpgradeHeaders();
List<CharSequence> values = splitHeader(concatenatedConnectionValue);
if (!containsContentEqualsIgnoreCase(values, HttpHeaderNames.UPGRADE) ||
!containsAllContentEqualsIgnoreCase(values, requiredHeaders)) {
return false;
}
// Ensure that all required protocol-specific headers are found in the request.
for (CharSequence requiredHeader : requiredHeaders) {
if (!request.headers().contains(requiredHeader)) {
return false;
}
}
// Prepare and send the upgrade response. Wait for this write to complete before upgrading,
// since we need the old codec in-place to properly encode the response.
final FullHttpResponse upgradeResponse = createUpgradeResponse(upgradeProtocol);
if (!upgradeCodec.prepareUpgradeResponse(ctx, request, upgradeResponse.headers())) {
return false;
}
// Create the user event to be fired once the upgrade completes.
final UpgradeEvent event = new UpgradeEvent(upgradeProtocol, request);
// After writing the upgrade response we immediately prepare the
// pipeline for the next protocol to avoid a race between completion
// of the write future and receiving data before the pipeline is
// restructured.
try {
final ChannelFuture writeComplete = ctx.writeAndFlush(upgradeResponse);
// Perform the upgrade to the new protocol.
sourceCodec.upgradeFrom(ctx);
upgradeCodec.upgradeTo(ctx, request);
// Remove this handler from the pipeline.
ctx.pipeline().remove(HttpServerUpgradeHandler.this);
// Notify that the upgrade has occurred. Retain the event to offset
// the release() in the finally block.
ctx.fireUserEventTriggered(event.retain());
// Add the listener last to avoid firing upgrade logic after
// the channel is already closed since the listener may fire
// immediately if the write failed eagerly.
writeComplete.addListener(ChannelFutureListener.CLOSE_ON_FAILURE);
} finally {
// Release the event if the upgrade event wasn't fired.
event.release();
}
return true;
} | boolean function(final ChannelHandlerContext ctx, final FullHttpRequest request) { final List<CharSequence> requestedProtocols = splitHeader(request.headers().get(HttpHeaderNames.UPGRADE)); final int numRequestedProtocols = requestedProtocols.size(); UpgradeCodec upgradeCodec = null; CharSequence upgradeProtocol = null; for (int i = 0; i < numRequestedProtocols; i ++) { final CharSequence p = requestedProtocols.get(i); final UpgradeCodec c = upgradeCodecFactory.newUpgradeCodec(p); if (c != null) { upgradeProtocol = p; upgradeCodec = c; break; } } if (upgradeCodec == null) { return false; } List<String> connectionHeaderValues = request.headers().getAll(HttpHeaderNames.CONNECTION); if (connectionHeaderValues == null) { return false; } final StringBuilder concatenatedConnectionValue = new StringBuilder(connectionHeaderValues.size() * 10); for (CharSequence connectionHeaderValue : connectionHeaderValues) { concatenatedConnectionValue.append(connectionHeaderValue).append(COMMA); } concatenatedConnectionValue.setLength(concatenatedConnectionValue.length() - 1); Collection<CharSequence> requiredHeaders = upgradeCodec.requiredUpgradeHeaders(); List<CharSequence> values = splitHeader(concatenatedConnectionValue); if (!containsContentEqualsIgnoreCase(values, HttpHeaderNames.UPGRADE) !containsAllContentEqualsIgnoreCase(values, requiredHeaders)) { return false; } for (CharSequence requiredHeader : requiredHeaders) { if (!request.headers().contains(requiredHeader)) { return false; } } final FullHttpResponse upgradeResponse = createUpgradeResponse(upgradeProtocol); if (!upgradeCodec.prepareUpgradeResponse(ctx, request, upgradeResponse.headers())) { return false; } final UpgradeEvent event = new UpgradeEvent(upgradeProtocol, request); try { final ChannelFuture writeComplete = ctx.writeAndFlush(upgradeResponse); sourceCodec.upgradeFrom(ctx); upgradeCodec.upgradeTo(ctx, request); ctx.pipeline().remove(HttpServerUpgradeHandler.this); ctx.fireUserEventTriggered(event.retain()); writeComplete.addListener(ChannelFutureListener.CLOSE_ON_FAILURE); } finally { event.release(); } return true; } | /**
* Attempts to upgrade to the protocol(s) identified by the {@link HttpHeaderNames#UPGRADE} header (if provided
* in the request).
*
* @param ctx the context for this handler.
* @param request the HTTP request.
* @return {@code true} if the upgrade occurred, otherwise {@code false}.
*/ | Attempts to upgrade to the protocol(s) identified by the <code>HttpHeaderNames#UPGRADE</code> header (if provided in the request) | upgrade | {
"repo_name": "zer0se7en/netty",
"path": "codec-http/src/main/java/io/netty/handler/codec/http/HttpServerUpgradeHandler.java",
"license": "apache-2.0",
"size": 16763
} | [
"io.netty.channel.ChannelFuture",
"io.netty.channel.ChannelFutureListener",
"io.netty.channel.ChannelHandlerContext",
"java.util.Collection",
"java.util.List"
] | import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandlerContext; import java.util.Collection; import java.util.List; | import io.netty.channel.*; import java.util.*; | [
"io.netty.channel",
"java.util"
] | io.netty.channel; java.util; | 2,583,672 |
public JSONArray put(int index, Map value) throws JSONException {
put(index, new JSONObject(value));
return this;
} | JSONArray function(int index, Map value) throws JSONException { put(index, new JSONObject(value)); return this; } | /**
* Put a value in the JSONArray, where the value will be a
* JSONObject which is produced from a Map.
* @param index The subscript.
* @param value The Map value.
* @return this.
* @throws JSONException If the index is negative or if the the value is
* an invalid number.
*/ | Put a value in the JSONArray, where the value will be a JSONObject which is produced from a Map | put | {
"repo_name": "ifcharming/voltdb2.1",
"path": "third_party/java/src/org/json_voltpatches/JSONArray.java",
"license": "gpl-3.0",
"size": 29589
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 1,667,781 |
public List<Stem> uniqueStems(char word[], int length) {
List<Stem> stems = new ArrayList<Stem>();
CharArraySet terms = new CharArraySet(dictionary.getVersion(), 8, dictionary.isIgnoreCase());
if (dictionary.lookupWord(word, 0, length) != null) {
stems.add(new Stem(word, length));
terms.add(word);
}
List<Stem> otherStems = stem(word, length, null, 0);
for (Stem s : otherStems) {
if (!terms.contains(s.stem)) {
stems.add(s);
terms.add(s.stem);
}
}
return stems;
}
// ================================================= Helper Methods ================================================ | List<Stem> function(char word[], int length) { List<Stem> stems = new ArrayList<Stem>(); CharArraySet terms = new CharArraySet(dictionary.getVersion(), 8, dictionary.isIgnoreCase()); if (dictionary.lookupWord(word, 0, length) != null) { stems.add(new Stem(word, length)); terms.add(word); } List<Stem> otherStems = stem(word, length, null, 0); for (Stem s : otherStems) { if (!terms.contains(s.stem)) { stems.add(s); terms.add(s.stem); } } return stems; } | /**
* Find the unique stem(s) of the provided word
*
* @param word Word to find the stems for
* @return List of stems for the word
*/ | Find the unique stem(s) of the provided word | uniqueStems | {
"repo_name": "fogbeam/Heceta_solr",
"path": "lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/HunspellStemmer.java",
"license": "apache-2.0",
"size": 13194
} | [
"java.util.ArrayList",
"java.util.List",
"org.apache.lucene.analysis.util.CharArraySet"
] | import java.util.ArrayList; import java.util.List; import org.apache.lucene.analysis.util.CharArraySet; | import java.util.*; import org.apache.lucene.analysis.util.*; | [
"java.util",
"org.apache.lucene"
] | java.util; org.apache.lucene; | 1,931,579 |
private Map getAllByReadMode(boolean inTx, IgniteCache cache, Set keys, ReadMode readMode) {
// TODO Remove in IGNITE-6938
if (inTx)
readMode = GET;
switch (readMode) {
case GET:
return cache.getAll(keys); | Map function(boolean inTx, IgniteCache cache, Set keys, ReadMode readMode) { if (inTx) readMode = GET; switch (readMode) { case GET: return cache.getAll(keys); | /**
* Reads value from cache for the given key using given read mode.
*
* // TODO IGNITE-6938 remove inTx flag
* // TODO IGNITE-6739 add SQL-get support "select _key, _val from cache where _key in ... keySet"
* @param inTx Flag whether current read is inside transaction.
* This is because reads can't see writes made in current transaction.
* @param cache Cache.
* @param keys Key.
* @param readMode Read mode.
* @return Value.
*/ | Reads value from cache for the given key using given read mode. TODO IGNITE-6938 remove inTx flag TODO IGNITE-6739 add SQL-get support "select _key, _val from cache where _key in ... keySet" | getAllByReadMode | {
"repo_name": "shroman/ignite",
"path": "modules/core/src/test/java/org/apache/ignite/internal/processors/cache/mvcc/CacheMvccTransactionsTest.java",
"license": "apache-2.0",
"size": 110904
} | [
"java.util.Map",
"java.util.Set",
"org.apache.ignite.IgniteCache"
] | import java.util.Map; import java.util.Set; import org.apache.ignite.IgniteCache; | import java.util.*; import org.apache.ignite.*; | [
"java.util",
"org.apache.ignite"
] | java.util; org.apache.ignite; | 2,079,689 |
@Override
public int codeToGID(int code) throws IOException
{
if (!isEmbedded)
{
// The conforming reader shall select glyphs by translating characters from the
// encoding specified by the predefined CMap to one of the encodings in the TrueType
// font's 'cmap' table. The means by which this is accomplished are implementation-
// dependent.
boolean hasUnicodeMap = parent.getCMapUCS2() != null;
if (cid2gid != null)
{
// Acrobat allows non-embedded GIDs - todo: can we find a test PDF for this?
int cid = codeToCID(code);
return cid2gid[cid];
}
else if (hasIdentityCid2Gid || !hasUnicodeMap)
{
// same as above, but for the default Identity CID2GIDMap or when there is no
// ToUnicode CMap to fallback to, see PDFBOX-2599 and PDFBOX-2560
// todo: can we find a test PDF for the Identity case?
return codeToCID(code);
}
else
{
// fallback to the ToUnicode CMap, test with PDFBOX-1422 and PDFBOX-2560
String unicode = parent.toUnicode(code);
if (unicode == null)
{
LOG.warn("Failed to find a character mapping for " + code + " in " + getName());
return 0;
}
else if (unicode.length() > 1)
{
LOG.warn("Trying to map multi-byte character using 'cmap', result will be poor");
}
// a non-embedded font always has a cmap (otherwise FontMapper won't load it)
return cmap.getGlyphId(unicode.codePointAt(0));
}
}
else
{
// If the TrueType font program is embedded, the Type 2 CIDFont dictionary shall contain
// a CIDToGIDMap entry that maps CIDs to the glyph indices for the appropriate glyph
// descriptions in that font program.
int cid = codeToCID(code);
if (cid2gid != null)
{
// use CIDToGIDMap
if (cid < cid2gid.length)
{
return cid2gid[cid];
}
else
{
return 0;
}
}
else
{
// "Identity" is the default CIDToGIDMap
if (cid < ttf.getNumberOfGlyphs())
{
return cid;
}
else
{
// out of range CIDs map to GID 0
return 0;
}
}
}
} | int function(int code) throws IOException { if (!isEmbedded) { boolean hasUnicodeMap = parent.getCMapUCS2() != null; if (cid2gid != null) { int cid = codeToCID(code); return cid2gid[cid]; } else if (hasIdentityCid2Gid !hasUnicodeMap) { return codeToCID(code); } else { String unicode = parent.toUnicode(code); if (unicode == null) { LOG.warn(STR + code + STR + getName()); return 0; } else if (unicode.length() > 1) { LOG.warn(STR); } return cmap.getGlyphId(unicode.codePointAt(0)); } } else { int cid = codeToCID(code); if (cid2gid != null) { if (cid < cid2gid.length) { return cid2gid[cid]; } else { return 0; } } else { if (cid < ttf.getNumberOfGlyphs()) { return cid; } else { return 0; } } } } | /**
* Returns the GID for the given character code.
*
* @param code character code
* @return GID
* @throws IOException
*/ | Returns the GID for the given character code | codeToGID | {
"repo_name": "benmccann/pdfbox",
"path": "pdfbox/src/main/java/org/apache/pdfbox/pdmodel/font/PDCIDFontType2.java",
"license": "apache-2.0",
"size": 14173
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,045,208 |
@SmallTest
@Feature({"OverlayPanelBase"})
public void testLargeDesiredHeightIsMaximized() {
final float aboveMax = MOCK_MAXIMIZED_HEIGHT + 1000;
PanelState nextState =
mExpandPanel.findNearestPanelStateFromHeight(aboveMax, UPWARD_VELOCITY);
assertTrue(nextState == PanelState.MAXIMIZED);
nextState = mNoExpandPanel.findNearestPanelStateFromHeight(aboveMax, UPWARD_VELOCITY);
assertTrue(nextState == PanelState.MAXIMIZED);
// Make sure nothing bad happens if velocity is downward (this should never happen).
nextState = mExpandPanel.findNearestPanelStateFromHeight(aboveMax, DOWNWARD_VELOCITY);
assertTrue(nextState == PanelState.MAXIMIZED);
nextState = mNoExpandPanel.findNearestPanelStateFromHeight(aboveMax, DOWNWARD_VELOCITY);
assertTrue(nextState == PanelState.MAXIMIZED);
} | @Feature({STR}) void function() { final float aboveMax = MOCK_MAXIMIZED_HEIGHT + 1000; PanelState nextState = mExpandPanel.findNearestPanelStateFromHeight(aboveMax, UPWARD_VELOCITY); assertTrue(nextState == PanelState.MAXIMIZED); nextState = mNoExpandPanel.findNearestPanelStateFromHeight(aboveMax, UPWARD_VELOCITY); assertTrue(nextState == PanelState.MAXIMIZED); nextState = mExpandPanel.findNearestPanelStateFromHeight(aboveMax, DOWNWARD_VELOCITY); assertTrue(nextState == PanelState.MAXIMIZED); nextState = mNoExpandPanel.findNearestPanelStateFromHeight(aboveMax, DOWNWARD_VELOCITY); assertTrue(nextState == PanelState.MAXIMIZED); } | /**
* Tests that a panel is only maximized when desired height is far above the max.
*/ | Tests that a panel is only maximized when desired height is far above the max | testLargeDesiredHeightIsMaximized | {
"repo_name": "danakj/chromium",
"path": "chrome/android/javatests/src/org/chromium/chrome/browser/compositor/bottombar/OverlayPanelBaseTest.java",
"license": "bsd-3-clause",
"size": 10213
} | [
"org.chromium.base.test.util.Feature",
"org.chromium.chrome.browser.compositor.bottombar.OverlayPanel"
] | import org.chromium.base.test.util.Feature; import org.chromium.chrome.browser.compositor.bottombar.OverlayPanel; | import org.chromium.base.test.util.*; import org.chromium.chrome.browser.compositor.bottombar.*; | [
"org.chromium.base",
"org.chromium.chrome"
] | org.chromium.base; org.chromium.chrome; | 651,055 |
public ArrayListOfInts clone() {
return new ArrayListOfInts(Arrays.copyOf(array, this.size()));
} | ArrayListOfInts function() { return new ArrayListOfInts(Arrays.copyOf(array, this.size())); } | /**
* Returns a clone of this object.
*/ | Returns a clone of this object | clone | {
"repo_name": "duyvk/mavuno",
"path": "src/edu/umd/cloud9/util/array/ArrayListOfInts.java",
"license": "apache-2.0",
"size": 10160
} | [
"java.util.Arrays"
] | import java.util.Arrays; | import java.util.*; | [
"java.util"
] | java.util; | 2,354,974 |
public List<Producto> obtenerProductos() {
return obtenerProductos(DB_ORDENACION_NO, DB_ORDENACION_DESC);
}
| List<Producto> function() { return obtenerProductos(DB_ORDENACION_NO, DB_ORDENACION_DESC); } | /**
* Devuelve todos los Productos almacenados en la BD
*
* @return Lista de Productos
*/ | Devuelve todos los Productos almacenados en la BD | obtenerProductos | {
"repo_name": "ZaraTech/ps-15-e06-ztd1",
"path": "src/com/zaratech/smarket/utiles/AdaptadorBD.java",
"license": "mit",
"size": 28605
} | [
"com.zaratech.smarket.componentes.Producto",
"java.util.List"
] | import com.zaratech.smarket.componentes.Producto; import java.util.List; | import com.zaratech.smarket.componentes.*; import java.util.*; | [
"com.zaratech.smarket",
"java.util"
] | com.zaratech.smarket; java.util; | 1,235,629 |
protected void createUndoRedoActions() {
IUndoContext undoContext= getUndoContext();
if (undoContext != null) {
// Use actions provided by global undo/redo
// Create the undo action
OperationHistoryActionHandler undoAction= new UndoActionHandler(getEditorSite(), undoContext);
PlatformUI.getWorkbench().getHelpSystem().setHelp(undoAction, IAbstractTextEditorHelpContextIds.UNDO_ACTION);
undoAction.setActionDefinitionId(IWorkbenchCommandConstants.EDIT_UNDO);
registerUndoRedoAction(ITextEditorActionConstants.UNDO, undoAction);
// Create the redo action.
OperationHistoryActionHandler redoAction= new RedoActionHandler(getEditorSite(), undoContext);
PlatformUI.getWorkbench().getHelpSystem().setHelp(redoAction, IAbstractTextEditorHelpContextIds.REDO_ACTION);
redoAction.setActionDefinitionId(IWorkbenchCommandConstants.EDIT_REDO);
registerUndoRedoAction(ITextEditorActionConstants.REDO, redoAction);
// Install operation approvers
IOperationHistory history= OperationHistoryFactory.getOperationHistory();
// The first approver will prompt when operations affecting outside elements are to be undone or redone.
if (fNonLocalOperationApprover != null)
history.removeOperationApprover(fNonLocalOperationApprover);
fNonLocalOperationApprover= getUndoRedoOperationApprover(undoContext);
history.addOperationApprover(fNonLocalOperationApprover);
// The second approver will prompt from this editor when an undo is attempted on an operation
// and it is not the most recent operation in the editor.
if (fLinearUndoViolationApprover != null)
history.removeOperationApprover(fLinearUndoViolationApprover);
fLinearUndoViolationApprover= new LinearUndoViolationUserApprover(undoContext, this);
history.addOperationApprover(fLinearUndoViolationApprover);
} else {
// Use text operation actions (pre 3.1 style)
ResourceAction action;
if (getAction(ITextEditorActionConstants.UNDO) == null) {
action= new TextOperationAction(EditorMessages.getBundleForConstructedKeys(), "Editor.Undo.", this, ITextOperationTarget.UNDO); //$NON-NLS-1$
action.setHelpContextId(IAbstractTextEditorHelpContextIds.UNDO_ACTION);
action.setActionDefinitionId(IWorkbenchCommandConstants.EDIT_UNDO);
setAction(ITextEditorActionConstants.UNDO, action);
}
if (getAction(ITextEditorActionConstants.REDO) == null) {
action= new TextOperationAction(EditorMessages.getBundleForConstructedKeys(), "Editor.Redo.", this, ITextOperationTarget.REDO); //$NON-NLS-1$
action.setHelpContextId(IAbstractTextEditorHelpContextIds.REDO_ACTION);
action.setActionDefinitionId(IWorkbenchCommandConstants.EDIT_REDO);
setAction(ITextEditorActionConstants.REDO, action);
}
}
} | void function() { IUndoContext undoContext= getUndoContext(); if (undoContext != null) { OperationHistoryActionHandler undoAction= new UndoActionHandler(getEditorSite(), undoContext); PlatformUI.getWorkbench().getHelpSystem().setHelp(undoAction, IAbstractTextEditorHelpContextIds.UNDO_ACTION); undoAction.setActionDefinitionId(IWorkbenchCommandConstants.EDIT_UNDO); registerUndoRedoAction(ITextEditorActionConstants.UNDO, undoAction); OperationHistoryActionHandler redoAction= new RedoActionHandler(getEditorSite(), undoContext); PlatformUI.getWorkbench().getHelpSystem().setHelp(redoAction, IAbstractTextEditorHelpContextIds.REDO_ACTION); redoAction.setActionDefinitionId(IWorkbenchCommandConstants.EDIT_REDO); registerUndoRedoAction(ITextEditorActionConstants.REDO, redoAction); IOperationHistory history= OperationHistoryFactory.getOperationHistory(); if (fNonLocalOperationApprover != null) history.removeOperationApprover(fNonLocalOperationApprover); fNonLocalOperationApprover= getUndoRedoOperationApprover(undoContext); history.addOperationApprover(fNonLocalOperationApprover); if (fLinearUndoViolationApprover != null) history.removeOperationApprover(fLinearUndoViolationApprover); fLinearUndoViolationApprover= new LinearUndoViolationUserApprover(undoContext, this); history.addOperationApprover(fLinearUndoViolationApprover); } else { ResourceAction action; if (getAction(ITextEditorActionConstants.UNDO) == null) { action= new TextOperationAction(EditorMessages.getBundleForConstructedKeys(), STR, this, ITextOperationTarget.UNDO); action.setHelpContextId(IAbstractTextEditorHelpContextIds.UNDO_ACTION); action.setActionDefinitionId(IWorkbenchCommandConstants.EDIT_UNDO); setAction(ITextEditorActionConstants.UNDO, action); } if (getAction(ITextEditorActionConstants.REDO) == null) { action= new TextOperationAction(EditorMessages.getBundleForConstructedKeys(), STR, this, ITextOperationTarget.REDO); action.setHelpContextId(IAbstractTextEditorHelpContextIds.REDO_ACTION); action.setActionDefinitionId(IWorkbenchCommandConstants.EDIT_REDO); setAction(ITextEditorActionConstants.REDO, action); } } } | /**
* Creates this editor's undo/redo actions.
* <p>
* Subclasses may override or extend.</p>
*
* @since 3.1
*/ | Creates this editor's undo/redo actions. Subclasses may override or extend | createUndoRedoActions | {
"repo_name": "xiaguangme/simon_ide_tools",
"path": "02.eclipse_enhance/org.eclipse.ui.workbench.texteditor/src/org/eclipse/ui/texteditor/AbstractTextEditor.java",
"license": "apache-2.0",
"size": 247622
} | [
"org.eclipse.core.commands.operations.IOperationHistory",
"org.eclipse.core.commands.operations.IUndoContext",
"org.eclipse.core.commands.operations.OperationHistoryFactory",
"org.eclipse.jface.text.ITextOperationTarget",
"org.eclipse.ui.IWorkbenchCommandConstants",
"org.eclipse.ui.PlatformUI",
"org.eclipse.ui.operations.LinearUndoViolationUserApprover",
"org.eclipse.ui.operations.OperationHistoryActionHandler",
"org.eclipse.ui.operations.RedoActionHandler",
"org.eclipse.ui.operations.UndoActionHandler"
] | import org.eclipse.core.commands.operations.IOperationHistory; import org.eclipse.core.commands.operations.IUndoContext; import org.eclipse.core.commands.operations.OperationHistoryFactory; import org.eclipse.jface.text.ITextOperationTarget; import org.eclipse.ui.IWorkbenchCommandConstants; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.operations.LinearUndoViolationUserApprover; import org.eclipse.ui.operations.OperationHistoryActionHandler; import org.eclipse.ui.operations.RedoActionHandler; import org.eclipse.ui.operations.UndoActionHandler; | import org.eclipse.core.commands.operations.*; import org.eclipse.jface.text.*; import org.eclipse.ui.*; import org.eclipse.ui.operations.*; | [
"org.eclipse.core",
"org.eclipse.jface",
"org.eclipse.ui"
] | org.eclipse.core; org.eclipse.jface; org.eclipse.ui; | 1,030,361 |
@Test
public void testMmTableCompaction() throws Exception {
// 1. Insert some rows into MM table
runStatementOnDriver("insert into " + TableExtended.MMTBL + "(a,b) values(1,2)");
runStatementOnDriver("insert into " + TableExtended.MMTBL + "(a,b) values(3,4)");
// There should be 2 delta directories
verifyDirAndResult(2);
// 2. Perform a MINOR compaction. Since nothing was aborted, subdirs should stay.
runStatementOnDriver("alter table "+ TableExtended.MMTBL + " compact 'MINOR'");
runWorker(hiveConf);
verifyDirAndResult(2);
// 3. Let a transaction be aborted
hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODEROLLBACKTXN, true);
runStatementOnDriver("insert into " + TableExtended.MMTBL + "(a,b) values(5,6)");
hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODEROLLBACKTXN, false);
// There should be 3 delta directories. The new one is the aborted one.
verifyDirAndResult(3);
// 4. Perform a MINOR compaction again. This time it will remove the subdir for aborted transaction.
runStatementOnDriver("alter table "+ TableExtended.MMTBL + " compact 'MINOR'");
runWorker(hiveConf);
// The worker should remove the subdir for aborted transaction
verifyDirAndResult(2);
// 5. Run Cleaner. Shouldn't impact anything.
runCleaner(hiveConf);
verifyDirAndResult(2);
} | void function() throws Exception { runStatementOnDriver(STR + TableExtended.MMTBL + STR); runStatementOnDriver(STR + TableExtended.MMTBL + STR); verifyDirAndResult(2); runStatementOnDriver(STR+ TableExtended.MMTBL + STR); runWorker(hiveConf); verifyDirAndResult(2); hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODEROLLBACKTXN, true); runStatementOnDriver(STR + TableExtended.MMTBL + STR); hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODEROLLBACKTXN, false); verifyDirAndResult(3); runStatementOnDriver(STR+ TableExtended.MMTBL + STR); runWorker(hiveConf); verifyDirAndResult(2); runCleaner(hiveConf); verifyDirAndResult(2); } | /**
* Test compaction for Micro-managed table
* 1. Regular compaction shouldn't impact any valid subdirectories of MM tables
* 2. Compactions will only remove subdirectories for aborted transactions of MM tables, if any
* @throws Exception
*/ | Test compaction for Micro-managed table 1. Regular compaction shouldn't impact any valid subdirectories of MM tables 2. Compactions will only remove subdirectories for aborted transactions of MM tables, if any | testMmTableCompaction | {
"repo_name": "alanfgates/hive",
"path": "ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommandsForMmTable.java",
"license": "apache-2.0",
"size": 29907
} | [
"org.apache.hadoop.hive.conf.HiveConf"
] | import org.apache.hadoop.hive.conf.HiveConf; | import org.apache.hadoop.hive.conf.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 694,123 |
public void convertReactomeToGPMLByID(Long dbID, File dir,
Boolean saveatxml) {
GKInstance pathway;
try {
pathway = adaptor.fetchInstance(dbID);
String fileName = AbstractConverterFromReactome
.getFileName(pathway);
if (saveatxml) {
File atxmlfile = new File(dir, fileName + ".atxml");
atxmlfile.createNewFile();
r2g3Converter.queryATXML(pathway, atxmlfile);
}
File gpmlfile = new File(dir, fileName + ".gpml");
gpmlfile.createNewFile();
convertReactomeToGPML(pathway, gpmlfile);
} catch (Exception e) {
e.printStackTrace();
}
} | void function(Long dbID, File dir, Boolean saveatxml) { GKInstance pathway; try { pathway = adaptor.fetchInstance(dbID); String fileName = AbstractConverterFromReactome .getFileName(pathway); if (saveatxml) { File atxmlfile = new File(dir, fileName + STR); atxmlfile.createNewFile(); r2g3Converter.queryATXML(pathway, atxmlfile); } File gpmlfile = new File(dir, fileName + ".gpml"); gpmlfile.createNewFile(); convertReactomeToGPML(pathway, gpmlfile); } catch (Exception e) { e.printStackTrace(); } } | /**
* Convert Reactome pathways using their IDs
*
* @param dbID
* Stable ID of the pathway
* @param dir
* Directory to save converted gpml file
* @param saveatxml
* Boolean true if atxml files should be saved as well
*/ | Convert Reactome pathways using their IDs | convertReactomeToGPMLByID | {
"repo_name": "pennatula/reactome2gpml-converter",
"path": "src/org/gk/gpml/CLIConverter.java",
"license": "apache-2.0",
"size": 7540
} | [
"java.io.File",
"org.gk.model.GKInstance",
"org.reactome.convert.common.AbstractConverterFromReactome"
] | import java.io.File; import org.gk.model.GKInstance; import org.reactome.convert.common.AbstractConverterFromReactome; | import java.io.*; import org.gk.model.*; import org.reactome.convert.common.*; | [
"java.io",
"org.gk.model",
"org.reactome.convert"
] | java.io; org.gk.model; org.reactome.convert; | 1,481,972 |
public void genImage() {
if (image == null) {
ImageBuffer buffer = new ImageBuffer(128,16);
for (int i=0;i<128;i++) {
Color col = getColorAt(i / 128.0f);
for (int j=0;j<16;j++) {
buffer.setRGBA(i, j, col.getRedByte(), col.getGreenByte(), col.getBlueByte(), col.getAlphaByte());
}
}
image = buffer.getImage();
}
}
| void function() { if (image == null) { ImageBuffer buffer = new ImageBuffer(128,16); for (int i=0;i<128;i++) { Color col = getColorAt(i / 128.0f); for (int j=0;j<16;j++) { buffer.setRGBA(i, j, col.getRedByte(), col.getGreenByte(), col.getBlueByte(), col.getAlphaByte()); } } image = buffer.getImage(); } } | /**
* Generate the image used for texturing the gradient across shapes
*/ | Generate the image used for texturing the gradient across shapes | genImage | {
"repo_name": "dbank-so/fadableUnicodeFont",
"path": "src/org/newdawn/slick/svg/Gradient.java",
"license": "bsd-3-clause",
"size": 6855
} | [
"org.newdawn.slick.Color",
"org.newdawn.slick.ImageBuffer"
] | import org.newdawn.slick.Color; import org.newdawn.slick.ImageBuffer; | import org.newdawn.slick.*; | [
"org.newdawn.slick"
] | org.newdawn.slick; | 2,826,420 |
public com.google.common.util.concurrent.ListenableFuture<
com.google.containeranalysis.v1beta1.ScanConfig>
getScanConfig(com.google.containeranalysis.v1beta1.GetScanConfigRequest request) {
return futureUnaryCall(
getChannel().newCall(getGetScanConfigMethodHelper(), getCallOptions()), request);
} | com.google.common.util.concurrent.ListenableFuture< com.google.containeranalysis.v1beta1.ScanConfig> function(com.google.containeranalysis.v1beta1.GetScanConfigRequest request) { return futureUnaryCall( getChannel().newCall(getGetScanConfigMethodHelper(), getCallOptions()), request); } | /**
*
*
* <pre>
* Gets the specified scan configuration.
* </pre>
*/ | <code> Gets the specified scan configuration. </code> | getScanConfig | {
"repo_name": "vam-google/google-cloud-java",
"path": "google-api-grpc/grpc-google-cloud-containeranalysis-v1beta1/src/main/java/com/google/containeranalysis/v1beta1/ContainerAnalysisV1Beta1Grpc.java",
"license": "apache-2.0",
"size": 47183
} | [
"io.grpc.stub.ClientCalls"
] | import io.grpc.stub.ClientCalls; | import io.grpc.stub.*; | [
"io.grpc.stub"
] | io.grpc.stub; | 336,982 |
public boolean isHttpsTransportOnly(Policy policy) throws SecurityConfigException {
// When there is a transport binding sec policy assertion,
// the service should be exposed only via HTTPS
boolean httpsRequired = false;
try {
Iterator alternatives = policy.getAlternatives();
if (alternatives.hasNext()) {
List it = (List) alternatives.next();
RampartPolicyData rampartPolicyData = RampartPolicyBuilder.build(it);
if (rampartPolicyData.isTransportBinding()) {
httpsRequired = true;
} else if (rampartPolicyData.isSymmetricBinding()) {
Token encrToken = rampartPolicyData.getEncryptionToken();
if (encrToken instanceof SecureConversationToken) {
Policy bsPol = ((SecureConversationToken) encrToken).getBootstrapPolicy();
Iterator alts = bsPol.getAlternatives();
List bsIt = (List) alts.next();
RampartPolicyData bsRampartPolicyData = RampartPolicyBuilder.build(bsIt);
httpsRequired = bsRampartPolicyData.isTransportBinding();
}
}
}
} catch (WSSPolicyException e) {
log.error("Error in checking http transport only", e);
throw new SecurityConfigException("Error in checking http transport only", e);
}
return httpsRequired;
} | boolean function(Policy policy) throws SecurityConfigException { boolean httpsRequired = false; try { Iterator alternatives = policy.getAlternatives(); if (alternatives.hasNext()) { List it = (List) alternatives.next(); RampartPolicyData rampartPolicyData = RampartPolicyBuilder.build(it); if (rampartPolicyData.isTransportBinding()) { httpsRequired = true; } else if (rampartPolicyData.isSymmetricBinding()) { Token encrToken = rampartPolicyData.getEncryptionToken(); if (encrToken instanceof SecureConversationToken) { Policy bsPol = ((SecureConversationToken) encrToken).getBootstrapPolicy(); Iterator alts = bsPol.getAlternatives(); List bsIt = (List) alts.next(); RampartPolicyData bsRampartPolicyData = RampartPolicyBuilder.build(bsIt); httpsRequired = bsRampartPolicyData.isTransportBinding(); } } } } catch (WSSPolicyException e) { log.error(STR, e); throw new SecurityConfigException(STR, e); } return httpsRequired; } | /**
* Check the policy to see whether the service should only be exposed in
* HTTPS
*
* @param policy service policy
* @return returns true if the service should only be exposed in HTTPS
* @throws org.wso2.carbon.security.SecurityConfigException ex
*/ | Check the policy to see whether the service should only be exposed in HTTPS | isHttpsTransportOnly | {
"repo_name": "hpmtissera/carbon-identity",
"path": "components/security-mgt/org.wso2.carbon.security.mgt/src/main/java/org/wso2/carbon/security/config/SecurityConfigAdmin.java",
"license": "apache-2.0",
"size": 64683
} | [
"java.util.Iterator",
"java.util.List",
"org.apache.neethi.Policy",
"org.apache.rampart.policy.RampartPolicyBuilder",
"org.apache.rampart.policy.RampartPolicyData",
"org.apache.ws.secpolicy.WSSPolicyException",
"org.apache.ws.secpolicy.model.SecureConversationToken",
"org.apache.ws.secpolicy.model.Token",
"org.wso2.carbon.security.SecurityConfigException"
] | import java.util.Iterator; import java.util.List; import org.apache.neethi.Policy; import org.apache.rampart.policy.RampartPolicyBuilder; import org.apache.rampart.policy.RampartPolicyData; import org.apache.ws.secpolicy.WSSPolicyException; import org.apache.ws.secpolicy.model.SecureConversationToken; import org.apache.ws.secpolicy.model.Token; import org.wso2.carbon.security.SecurityConfigException; | import java.util.*; import org.apache.neethi.*; import org.apache.rampart.policy.*; import org.apache.ws.secpolicy.*; import org.apache.ws.secpolicy.model.*; import org.wso2.carbon.security.*; | [
"java.util",
"org.apache.neethi",
"org.apache.rampart",
"org.apache.ws",
"org.wso2.carbon"
] | java.util; org.apache.neethi; org.apache.rampart; org.apache.ws; org.wso2.carbon; | 537,785 |
public void setAttributes(Map<String, String> attributes) {
if (attributes != null) {
this.attributes = attributes;
}
} | void function(Map<String, String> attributes) { if (attributes != null) { this.attributes = attributes; } } | /**
* Set a map of attributes to the resource.
* @param attributes resource attributes
*/ | Set a map of attributes to the resource | setAttributes | {
"repo_name": "littlezhou/hadoop",
"path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ResourceInformation.java",
"license": "apache-2.0",
"size": 11701
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 2,364,866 |
public void testPutAll() {
ConcurrentHashMap empty = new ConcurrentHashMap();
ConcurrentHashMap map = map5();
empty.putAll(map);
assertEquals(5, empty.size());
assertTrue(empty.containsKey(one));
assertTrue(empty.containsKey(two));
assertTrue(empty.containsKey(three));
assertTrue(empty.containsKey(four));
assertTrue(empty.containsKey(five));
} | void function() { ConcurrentHashMap empty = new ConcurrentHashMap(); ConcurrentHashMap map = map5(); empty.putAll(map); assertEquals(5, empty.size()); assertTrue(empty.containsKey(one)); assertTrue(empty.containsKey(two)); assertTrue(empty.containsKey(three)); assertTrue(empty.containsKey(four)); assertTrue(empty.containsKey(five)); } | /**
* putAll adds all key-value pairs from the given map
*/ | putAll adds all key-value pairs from the given map | testPutAll | {
"repo_name": "YouDiSN/OpenJDK-Research",
"path": "jdk9/jdk/test/java/util/concurrent/tck/ConcurrentHashMapTest.java",
"license": "gpl-2.0",
"size": 27002
} | [
"java.util.concurrent.ConcurrentHashMap"
] | import java.util.concurrent.ConcurrentHashMap; | import java.util.concurrent.*; | [
"java.util"
] | java.util; | 240,473 |
public void doPrint() {
PrinterJob pj = PrinterJob.getPrinterJob();
pj.setJobName(" SPMF print time series");
| void function() { PrinterJob pj = PrinterJob.getPrinterJob(); pj.setJobName(STR); | /**
* This method is for printing the panel
*/ | This method is for printing the panel | doPrint | {
"repo_name": "aocalderon/PhD",
"path": "Y2Q2/Research/Code/SPMF/src/ca/pfv/spmf/gui/instanceviewer/InstancesViewerPanel.java",
"license": "lgpl-3.0",
"size": 14642
} | [
"java.awt.print.PrinterJob"
] | import java.awt.print.PrinterJob; | import java.awt.print.*; | [
"java.awt"
] | java.awt; | 1,100,941 |
public static IWMainApplication getIWMainApplication(
ServletContext application) {
return (IWMainApplication) application
.getAttribute(IWMainApplication.APPLICATION_BEAN_ID);
} | static IWMainApplication function( ServletContext application) { return (IWMainApplication) application .getAttribute(IWMainApplication.APPLICATION_BEAN_ID); } | /**
* Gets the application instance from the given ServletContext instance
* @param application
* @return
*/ | Gets the application instance from the given ServletContext instance | getIWMainApplication | {
"repo_name": "idega/platform2",
"path": "src/com/idega/idegaweb/IWMainApplication.java",
"license": "gpl-3.0",
"size": 74900
} | [
"javax.servlet.ServletContext"
] | import javax.servlet.ServletContext; | import javax.servlet.*; | [
"javax.servlet"
] | javax.servlet; | 628,669 |
public Adapter createModelElementAdapter()
{
return null;
} | Adapter function() { return null; } | /**
* Creates a new adapter for an object of class '{@link org.eclipse.oomph.base.ModelElement <em>Model Element</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.eclipse.oomph.base.ModelElement
* @generated
*/ | Creates a new adapter for an object of class '<code>org.eclipse.oomph.base.ModelElement Model Element</code>'. This default implementation returns null so that we can easily ignore cases; it's useful to ignore a case when inheritance will catch all the cases anyway. | createModelElementAdapter | {
"repo_name": "cupid-ide/oomph",
"path": "plugins/org.earthsystemmodeling.oomph.createsyncproject/src/org/earthsystemmodeling/oomph/createsyncproject/util/CreateSyncProjectAdapterFactory.java",
"license": "mit",
"size": 6624
} | [
"org.eclipse.emf.common.notify.Adapter"
] | import org.eclipse.emf.common.notify.Adapter; | import org.eclipse.emf.common.notify.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 1,426,968 |
private static <T> T serializeAndDeserialize(T serialized) throws Exception {
File serializeFile = new File("_serialized");
// serialize
try (ObjectOutputStream out = new ObjectOutputStream(new FileOutputStream(serializeFile))) {
out.writeObject(serialized);
}
print("-- serialized");
// deserialize
try (ObjectInputStream in = new ObjectInputStream(new FileInputStream(serializeFile))) {
@SuppressWarnings("unchecked")
T deserialized = (T) in.readObject();
print("-- deserialized");
return deserialized;
}
} | static <T> T function(T serialized) throws Exception { File serializeFile = new File(STR); try (ObjectOutputStream out = new ObjectOutputStream(new FileOutputStream(serializeFile))) { out.writeObject(serialized); } print(STR); try (ObjectInputStream in = new ObjectInputStream(new FileInputStream(serializeFile))) { @SuppressWarnings(STR) T deserialized = (T) in.readObject(); print(STR); return deserialized; } } | /**
* Serializes the specified instance to disk. Then deserializes the file and returns the deserialized value.
*
* @param serialized
* the instance to be serialized
* @return the deserialized instance
* @throws Exception
* if (de)serialization fails
*/ | Serializes the specified instance to disk. Then deserializes the file and returns the deserialized value | serializeAndDeserialize | {
"repo_name": "CodeFX-org/demo-serialization-proxy-pattern",
"path": "src/org/codefx/lab/serialization/proxypattern/Demo.java",
"license": "unlicense",
"size": 2917
} | [
"java.io.File",
"java.io.FileInputStream",
"java.io.FileOutputStream",
"java.io.ObjectInputStream",
"java.io.ObjectOutputStream"
] | import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; | import java.io.*; | [
"java.io"
] | java.io; | 903,283 |
private void saveAssignmentSupplementItem(SessionState state,
ParameterParser params, String siteId, AssignmentEdit a) {
// assignment supplement items
String aId = a.getId();
//model answer
if (state.getAttribute(MODELANSWER_TO_DELETE) != null && "true".equals((String) state.getAttribute(MODELANSWER_TO_DELETE)))
{
// to delete the model answer
AssignmentModelAnswerItem mAnswer = m_assignmentSupplementItemService.getModelAnswer(aId);
if (mAnswer != null)
{
m_assignmentSupplementItemService.cleanAttachment(mAnswer);
m_assignmentSupplementItemService.removeModelAnswer(mAnswer);
}
}
else if (state.getAttribute(MODELANSWER_TEXT) != null)
{
// edit/add model answer
AssignmentModelAnswerItem mAnswer = m_assignmentSupplementItemService.getModelAnswer(aId);
if (mAnswer == null)
{
mAnswer = m_assignmentSupplementItemService.newModelAnswer();
m_assignmentSupplementItemService.saveModelAnswer(mAnswer);
}
mAnswer.setAssignmentId(a.getId());
mAnswer.setText((String) state.getAttribute(MODELANSWER_TEXT));
mAnswer.setShowTo(state.getAttribute(MODELANSWER_SHOWTO) != null ? Integer.parseInt((String) state.getAttribute(MODELANSWER_SHOWTO)) : 0);
mAnswer.setAttachmentSet(getAssignmentSupplementItemAttachment(state, mAnswer, MODELANSWER_ATTACHMENTS));
m_assignmentSupplementItemService.saveModelAnswer(mAnswer);
}
// note
if (state.getAttribute(NOTE_TO_DELETE) != null && "true".equals((String) state.getAttribute(NOTE_TO_DELETE)))
{
// to remove note item
AssignmentNoteItem nNote = m_assignmentSupplementItemService.getNoteItem(aId);
if (nNote != null)
m_assignmentSupplementItemService.removeNoteItem(nNote);
}
else if (state.getAttribute(NOTE_TEXT) != null)
{
// edit/add private note
AssignmentNoteItem nNote = m_assignmentSupplementItemService.getNoteItem(aId);
if (nNote == null)
nNote = m_assignmentSupplementItemService.newNoteItem();
nNote.setAssignmentId(a.getId());
nNote.setNote((String) state.getAttribute(NOTE_TEXT));
nNote.setShareWith(state.getAttribute(NOTE_SHAREWITH) != null ? Integer.parseInt((String) state.getAttribute(NOTE_SHAREWITH)) : 0);
nNote.setCreatorId(UserDirectoryService.getCurrentUser().getId());
m_assignmentSupplementItemService.saveNoteItem(nNote);
}
// all purpose
if (state.getAttribute(ALLPURPOSE_TO_DELETE) != null && "true".equals((String) state.getAttribute(ALLPURPOSE_TO_DELETE)))
{
// to remove allPurpose item
AssignmentAllPurposeItem nAllPurpose = m_assignmentSupplementItemService.getAllPurposeItem(aId);
if (nAllPurpose != null)
{
m_assignmentSupplementItemService.cleanAttachment(nAllPurpose);
m_assignmentSupplementItemService.cleanAllPurposeItemAccess(nAllPurpose);
m_assignmentSupplementItemService.removeAllPurposeItem(nAllPurpose);
}
}
else if (state.getAttribute(ALLPURPOSE_TITLE) != null)
{
// edit/add private note
AssignmentAllPurposeItem nAllPurpose = m_assignmentSupplementItemService.getAllPurposeItem(aId);
if (nAllPurpose == null)
{
nAllPurpose = m_assignmentSupplementItemService.newAllPurposeItem();
m_assignmentSupplementItemService.saveAllPurposeItem(nAllPurpose);
}
nAllPurpose.setAssignmentId(a.getId());
nAllPurpose.setTitle((String) state.getAttribute(ALLPURPOSE_TITLE));
nAllPurpose.setText((String) state.getAttribute(ALLPURPOSE_TEXT));
boolean allPurposeShowFrom = state.getAttribute(ALLPURPOSE_SHOW_FROM) != null ? ((Boolean) state.getAttribute(ALLPURPOSE_SHOW_FROM)).booleanValue() : false;
boolean allPurposeShowTo = state.getAttribute(ALLPURPOSE_SHOW_TO) != null ? ((Boolean) state.getAttribute(ALLPURPOSE_SHOW_TO)).booleanValue() : false;
boolean allPurposeHide = state.getAttribute(ALLPURPOSE_HIDE) != null ? ((Boolean) state.getAttribute(ALLPURPOSE_HIDE)).booleanValue() : false;
nAllPurpose.setHide(allPurposeHide);
// save the release and retract dates
if (allPurposeShowFrom && !allPurposeHide)
{
// save release date
Time releaseTime = getTimeFromState(state, ALLPURPOSE_RELEASE_MONTH, ALLPURPOSE_RELEASE_DAY, ALLPURPOSE_RELEASE_YEAR, ALLPURPOSE_RELEASE_HOUR, ALLPURPOSE_RELEASE_MIN, ALLPURPOSE_RELEASE_AMPM);
GregorianCalendar cal = new GregorianCalendar();
cal.setTimeInMillis(releaseTime.getTime());
nAllPurpose.setReleaseDate(cal.getTime());
}
else
{
nAllPurpose.setReleaseDate(null);
}
if (allPurposeShowTo && !allPurposeHide)
{
// save retract date
Time retractTime = getTimeFromState(state, ALLPURPOSE_RETRACT_MONTH, ALLPURPOSE_RETRACT_DAY, ALLPURPOSE_RETRACT_YEAR, ALLPURPOSE_RETRACT_HOUR, ALLPURPOSE_RETRACT_MIN, ALLPURPOSE_RETRACT_AMPM);
GregorianCalendar cal = new GregorianCalendar();
cal.setTimeInMillis(retractTime.getTime());
nAllPurpose.setRetractDate(cal.getTime());
}
else
{
nAllPurpose.setRetractDate(null);
}
nAllPurpose.setAttachmentSet(getAssignmentSupplementItemAttachment(state, nAllPurpose, ALLPURPOSE_ATTACHMENTS));
// clean the access list first
if (state.getAttribute(ALLPURPOSE_ACCESS) != null)
{
// get the access settings
List<String> accessList = (List<String>) state.getAttribute(ALLPURPOSE_ACCESS);
m_assignmentSupplementItemService.cleanAllPurposeItemAccess(nAllPurpose);
Set<AssignmentAllPurposeItemAccess> accessSet = new HashSet<AssignmentAllPurposeItemAccess>();
try
{
AuthzGroup realm = AuthzGroupService.getAuthzGroup(SiteService.siteReference(siteId));
Set<Role> roles = realm.getRoles();
for(Iterator iRoles = roles.iterator(); iRoles.hasNext();)
{
// iterator through roles first
Role r = (Role) iRoles.next();
if (accessList.contains(r.getId()))
{
AssignmentAllPurposeItemAccess access = m_assignmentSupplementItemService.newAllPurposeItemAccess();
access.setAccess(r.getId());
access.setAssignmentAllPurposeItem(nAllPurpose);
m_assignmentSupplementItemService.saveAllPurposeItemAccess(access);
accessSet.add(access);
}
else
{
// if the role is not selected, iterate through the users with this role
Set userIds = realm.getUsersHasRole(r.getId());
for(Iterator iUserIds = userIds.iterator(); iUserIds.hasNext();)
{
String userId = (String) iUserIds.next();
if (accessList.contains(userId))
{
AssignmentAllPurposeItemAccess access = m_assignmentSupplementItemService.newAllPurposeItemAccess();
access.setAccess(userId);
access.setAssignmentAllPurposeItem(nAllPurpose);
m_assignmentSupplementItemService.saveAllPurposeItemAccess(access);
accessSet.add(access);
}
}
}
}
}
catch (Exception e)
{
M_log.warn(this + ":post_save_assignment " + e.toString() + "error finding authzGroup for = " + siteId);
}
nAllPurpose.setAccessSet(accessSet);
}
m_assignmentSupplementItemService.saveAllPurposeItem(nAllPurpose);
}
} | void function(SessionState state, ParameterParser params, String siteId, AssignmentEdit a) { String aId = a.getId(); if (state.getAttribute(MODELANSWER_TO_DELETE) != null && "true".equals((String) state.getAttribute(MODELANSWER_TO_DELETE))) { AssignmentModelAnswerItem mAnswer = m_assignmentSupplementItemService.getModelAnswer(aId); if (mAnswer != null) { m_assignmentSupplementItemService.cleanAttachment(mAnswer); m_assignmentSupplementItemService.removeModelAnswer(mAnswer); } } else if (state.getAttribute(MODELANSWER_TEXT) != null) { AssignmentModelAnswerItem mAnswer = m_assignmentSupplementItemService.getModelAnswer(aId); if (mAnswer == null) { mAnswer = m_assignmentSupplementItemService.newModelAnswer(); m_assignmentSupplementItemService.saveModelAnswer(mAnswer); } mAnswer.setAssignmentId(a.getId()); mAnswer.setText((String) state.getAttribute(MODELANSWER_TEXT)); mAnswer.setShowTo(state.getAttribute(MODELANSWER_SHOWTO) != null ? Integer.parseInt((String) state.getAttribute(MODELANSWER_SHOWTO)) : 0); mAnswer.setAttachmentSet(getAssignmentSupplementItemAttachment(state, mAnswer, MODELANSWER_ATTACHMENTS)); m_assignmentSupplementItemService.saveModelAnswer(mAnswer); } if (state.getAttribute(NOTE_TO_DELETE) != null && "true".equals((String) state.getAttribute(NOTE_TO_DELETE))) { AssignmentNoteItem nNote = m_assignmentSupplementItemService.getNoteItem(aId); if (nNote != null) m_assignmentSupplementItemService.removeNoteItem(nNote); } else if (state.getAttribute(NOTE_TEXT) != null) { AssignmentNoteItem nNote = m_assignmentSupplementItemService.getNoteItem(aId); if (nNote == null) nNote = m_assignmentSupplementItemService.newNoteItem(); nNote.setAssignmentId(a.getId()); nNote.setNote((String) state.getAttribute(NOTE_TEXT)); nNote.setShareWith(state.getAttribute(NOTE_SHAREWITH) != null ? Integer.parseInt((String) state.getAttribute(NOTE_SHAREWITH)) : 0); nNote.setCreatorId(UserDirectoryService.getCurrentUser().getId()); m_assignmentSupplementItemService.saveNoteItem(nNote); } if (state.getAttribute(ALLPURPOSE_TO_DELETE) != null && "true".equals((String) state.getAttribute(ALLPURPOSE_TO_DELETE))) { AssignmentAllPurposeItem nAllPurpose = m_assignmentSupplementItemService.getAllPurposeItem(aId); if (nAllPurpose != null) { m_assignmentSupplementItemService.cleanAttachment(nAllPurpose); m_assignmentSupplementItemService.cleanAllPurposeItemAccess(nAllPurpose); m_assignmentSupplementItemService.removeAllPurposeItem(nAllPurpose); } } else if (state.getAttribute(ALLPURPOSE_TITLE) != null) { AssignmentAllPurposeItem nAllPurpose = m_assignmentSupplementItemService.getAllPurposeItem(aId); if (nAllPurpose == null) { nAllPurpose = m_assignmentSupplementItemService.newAllPurposeItem(); m_assignmentSupplementItemService.saveAllPurposeItem(nAllPurpose); } nAllPurpose.setAssignmentId(a.getId()); nAllPurpose.setTitle((String) state.getAttribute(ALLPURPOSE_TITLE)); nAllPurpose.setText((String) state.getAttribute(ALLPURPOSE_TEXT)); boolean allPurposeShowFrom = state.getAttribute(ALLPURPOSE_SHOW_FROM) != null ? ((Boolean) state.getAttribute(ALLPURPOSE_SHOW_FROM)).booleanValue() : false; boolean allPurposeShowTo = state.getAttribute(ALLPURPOSE_SHOW_TO) != null ? ((Boolean) state.getAttribute(ALLPURPOSE_SHOW_TO)).booleanValue() : false; boolean allPurposeHide = state.getAttribute(ALLPURPOSE_HIDE) != null ? ((Boolean) state.getAttribute(ALLPURPOSE_HIDE)).booleanValue() : false; nAllPurpose.setHide(allPurposeHide); if (allPurposeShowFrom && !allPurposeHide) { Time releaseTime = getTimeFromState(state, ALLPURPOSE_RELEASE_MONTH, ALLPURPOSE_RELEASE_DAY, ALLPURPOSE_RELEASE_YEAR, ALLPURPOSE_RELEASE_HOUR, ALLPURPOSE_RELEASE_MIN, ALLPURPOSE_RELEASE_AMPM); GregorianCalendar cal = new GregorianCalendar(); cal.setTimeInMillis(releaseTime.getTime()); nAllPurpose.setReleaseDate(cal.getTime()); } else { nAllPurpose.setReleaseDate(null); } if (allPurposeShowTo && !allPurposeHide) { Time retractTime = getTimeFromState(state, ALLPURPOSE_RETRACT_MONTH, ALLPURPOSE_RETRACT_DAY, ALLPURPOSE_RETRACT_YEAR, ALLPURPOSE_RETRACT_HOUR, ALLPURPOSE_RETRACT_MIN, ALLPURPOSE_RETRACT_AMPM); GregorianCalendar cal = new GregorianCalendar(); cal.setTimeInMillis(retractTime.getTime()); nAllPurpose.setRetractDate(cal.getTime()); } else { nAllPurpose.setRetractDate(null); } nAllPurpose.setAttachmentSet(getAssignmentSupplementItemAttachment(state, nAllPurpose, ALLPURPOSE_ATTACHMENTS)); if (state.getAttribute(ALLPURPOSE_ACCESS) != null) { List<String> accessList = (List<String>) state.getAttribute(ALLPURPOSE_ACCESS); m_assignmentSupplementItemService.cleanAllPurposeItemAccess(nAllPurpose); Set<AssignmentAllPurposeItemAccess> accessSet = new HashSet<AssignmentAllPurposeItemAccess>(); try { AuthzGroup realm = AuthzGroupService.getAuthzGroup(SiteService.siteReference(siteId)); Set<Role> roles = realm.getRoles(); for(Iterator iRoles = roles.iterator(); iRoles.hasNext();) { Role r = (Role) iRoles.next(); if (accessList.contains(r.getId())) { AssignmentAllPurposeItemAccess access = m_assignmentSupplementItemService.newAllPurposeItemAccess(); access.setAccess(r.getId()); access.setAssignmentAllPurposeItem(nAllPurpose); m_assignmentSupplementItemService.saveAllPurposeItemAccess(access); accessSet.add(access); } else { Set userIds = realm.getUsersHasRole(r.getId()); for(Iterator iUserIds = userIds.iterator(); iUserIds.hasNext();) { String userId = (String) iUserIds.next(); if (accessList.contains(userId)) { AssignmentAllPurposeItemAccess access = m_assignmentSupplementItemService.newAllPurposeItemAccess(); access.setAccess(userId); access.setAssignmentAllPurposeItem(nAllPurpose); m_assignmentSupplementItemService.saveAllPurposeItemAccess(access); accessSet.add(access); } } } } } catch (Exception e) { M_log.warn(this + STR + e.toString() + STR + siteId); } nAllPurpose.setAccessSet(accessSet); } m_assignmentSupplementItemService.saveAllPurposeItem(nAllPurpose); } } | /**
* supplement item related information
* @param state
* @param params
* @param siteId
* @param a
*/ | supplement item related information | saveAssignmentSupplementItem | {
"repo_name": "harfalm/Sakai-10.1",
"path": "assignment/assignment-tool/tool/src/java/org/sakaiproject/assignment/tool/AssignmentAction.java",
"license": "apache-2.0",
"size": 605178
} | [
"java.util.GregorianCalendar",
"java.util.HashSet",
"java.util.Iterator",
"java.util.List",
"java.util.Set",
"org.sakaiproject.assignment.api.AssignmentEdit",
"org.sakaiproject.assignment.api.model.AssignmentAllPurposeItem",
"org.sakaiproject.assignment.api.model.AssignmentAllPurposeItemAccess",
"org.sakaiproject.assignment.api.model.AssignmentModelAnswerItem",
"org.sakaiproject.assignment.api.model.AssignmentNoteItem",
"org.sakaiproject.authz.api.AuthzGroup",
"org.sakaiproject.authz.api.Role",
"org.sakaiproject.authz.cover.AuthzGroupService",
"org.sakaiproject.event.api.SessionState",
"org.sakaiproject.site.cover.SiteService",
"org.sakaiproject.time.api.Time",
"org.sakaiproject.user.cover.UserDirectoryService",
"org.sakaiproject.util.ParameterParser"
] | import java.util.GregorianCalendar; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.sakaiproject.assignment.api.AssignmentEdit; import org.sakaiproject.assignment.api.model.AssignmentAllPurposeItem; import org.sakaiproject.assignment.api.model.AssignmentAllPurposeItemAccess; import org.sakaiproject.assignment.api.model.AssignmentModelAnswerItem; import org.sakaiproject.assignment.api.model.AssignmentNoteItem; import org.sakaiproject.authz.api.AuthzGroup; import org.sakaiproject.authz.api.Role; import org.sakaiproject.authz.cover.AuthzGroupService; import org.sakaiproject.event.api.SessionState; import org.sakaiproject.site.cover.SiteService; import org.sakaiproject.time.api.Time; import org.sakaiproject.user.cover.UserDirectoryService; import org.sakaiproject.util.ParameterParser; | import java.util.*; import org.sakaiproject.assignment.api.*; import org.sakaiproject.assignment.api.model.*; import org.sakaiproject.authz.api.*; import org.sakaiproject.authz.cover.*; import org.sakaiproject.event.api.*; import org.sakaiproject.site.cover.*; import org.sakaiproject.time.api.*; import org.sakaiproject.user.cover.*; import org.sakaiproject.util.*; | [
"java.util",
"org.sakaiproject.assignment",
"org.sakaiproject.authz",
"org.sakaiproject.event",
"org.sakaiproject.site",
"org.sakaiproject.time",
"org.sakaiproject.user",
"org.sakaiproject.util"
] | java.util; org.sakaiproject.assignment; org.sakaiproject.authz; org.sakaiproject.event; org.sakaiproject.site; org.sakaiproject.time; org.sakaiproject.user; org.sakaiproject.util; | 1,498,568 |
protected void addLineToMappings(int lineIndex)
{
for (SourceMapMapping mapping : sourceMapMappings)
{
FilePosition destStartPosition = mapping.destStartPosition;
int startLine = destStartPosition.getLine();
if(startLine > lineIndex)
{
mapping.destStartPosition = new FilePosition(startLine + 1, destStartPosition.getColumn());
FilePosition destEndPosition = mapping.destEndPosition;
mapping.destEndPosition = new FilePosition(destEndPosition.getLine() + 1, destEndPosition.getColumn());
}
}
} | void function(int lineIndex) { for (SourceMapMapping mapping : sourceMapMappings) { FilePosition destStartPosition = mapping.destStartPosition; int startLine = destStartPosition.getLine(); if(startLine > lineIndex) { mapping.destStartPosition = new FilePosition(startLine + 1, destStartPosition.getColumn()); FilePosition destEndPosition = mapping.destEndPosition; mapping.destEndPosition = new FilePosition(destEndPosition.getLine() + 1, destEndPosition.getColumn()); } } } | /**
* Adjusts the line numbers saved in the source map when a line should be
* added during post processing.
*
* @param lineIndex
*/ | Adjusts the line numbers saved in the source map when a line should be added during post processing | addLineToMappings | {
"repo_name": "greg-dove/flex-falcon",
"path": "compiler-jx/src/main/java/org/apache/flex/compiler/internal/codegen/js/JSEmitter.java",
"license": "apache-2.0",
"size": 18298
} | [
"com.google.debugging.sourcemap.FilePosition"
] | import com.google.debugging.sourcemap.FilePosition; | import com.google.debugging.sourcemap.*; | [
"com.google.debugging"
] | com.google.debugging; | 1,942,846 |
public static void reopenInactiveRecords(List<Section> sections, Map<String, String> tabStates, String collectionName) {
for (Section section : sections) {
for (Row row: section.getRows()) {
for (Field field : row.getFields()) {
if (field != null) {
if (Field.CONTAINER.equals(field.getFieldType()) && StringUtils.startsWith(field.getContainerName(), collectionName)) {
final String tabKey = WebUtils.generateTabKey(FieldUtils.generateCollectionSubTabName(field));
tabStates.put(tabKey, KualiForm.TabState.OPEN.name());
}
}
}
}
}
}
| static void function(List<Section> sections, Map<String, String> tabStates, String collectionName) { for (Section section : sections) { for (Row row: section.getRows()) { for (Field field : row.getFields()) { if (field != null) { if (Field.CONTAINER.equals(field.getFieldType()) && StringUtils.startsWith(field.getContainerName(), collectionName)) { final String tabKey = WebUtils.generateTabKey(FieldUtils.generateCollectionSubTabName(field)); tabStates.put(tabKey, KualiForm.TabState.OPEN.name()); } } } } } } | /**
* Attempts to reopen sub tabs which would have been closed for inactive records
*
* @param sections the list of Sections whose rows and fields to set the open tab state on
* @param tabStates the map of tabKey->tabState. This map will be modified to set entries to "OPEN"
* @param collectionName the name of the collection reopening
*/ | Attempts to reopen sub tabs which would have been closed for inactive records | reopenInactiveRecords | {
"repo_name": "kuali/kc-rice",
"path": "rice-middleware/kns/src/main/java/org/kuali/rice/kns/util/WebUtils.java",
"license": "apache-2.0",
"size": 43933
} | [
"java.util.List",
"java.util.Map",
"org.apache.commons.lang.StringUtils",
"org.kuali.rice.kns.web.struts.form.KualiForm",
"org.kuali.rice.kns.web.ui.Field",
"org.kuali.rice.kns.web.ui.Row",
"org.kuali.rice.kns.web.ui.Section"
] | import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.kuali.rice.kns.web.struts.form.KualiForm; import org.kuali.rice.kns.web.ui.Field; import org.kuali.rice.kns.web.ui.Row; import org.kuali.rice.kns.web.ui.Section; | import java.util.*; import org.apache.commons.lang.*; import org.kuali.rice.kns.web.struts.form.*; import org.kuali.rice.kns.web.ui.*; | [
"java.util",
"org.apache.commons",
"org.kuali.rice"
] | java.util; org.apache.commons; org.kuali.rice; | 747,260 |
private static byte[] streamToBytes(InputStream in, int length) throws IOException
{
byte[] bytes = new byte[length];
int count;
int pos = 0;
while (pos < length && ((count = in.read(bytes, pos, length - pos)) != -1))
{
pos += count;
}
if (pos != length)
{
throw new IOException("Expected " + length + " bytes, read " + pos + " bytes");
}
return bytes;
}
// Visible for testing.
static class CacheHeader
{
public long size;
public String key;
public String etag;
public long serverDate;
public long ttl;
public long softTtl;
public Map<String, String> responseHeaders;
private CacheHeader()
{}
public CacheHeader(String key, Entry entry)
{
this.key = key;
this.size = entry.data.length;
this.etag = entry.etag;
this.serverDate = entry.serverDate;
this.ttl = entry.ttl;
this.softTtl = entry.softTtl;
this.responseHeaders = entry.responseHeaders;
} | static byte[] function(InputStream in, int length) throws IOException { byte[] bytes = new byte[length]; int count; int pos = 0; while (pos < length && ((count = in.read(bytes, pos, length - pos)) != -1)) { pos += count; } if (pos != length) { throw new IOException(STR + length + STR + pos + STR); } return bytes; } static class CacheHeader { public long size; public String key; public String etag; public long serverDate; public long ttl; public long softTtl; public Map<String, String> responseHeaders; private CacheHeader() {} public CacheHeader(String key, Entry entry) { this.key = key; this.size = entry.data.length; this.etag = entry.etag; this.serverDate = entry.serverDate; this.ttl = entry.ttl; this.softTtl = entry.softTtl; this.responseHeaders = entry.responseHeaders; } | /**
* Reads the contents of an InputStream into a byte[].
*/ | Reads the contents of an InputStream into a byte[] | streamToBytes | {
"repo_name": "haikuowuya/android_volley",
"path": "src/com/android/volley/toolbox/DiskBasedCache.java",
"license": "apache-2.0",
"size": 15283
} | [
"java.io.IOException",
"java.io.InputStream",
"java.util.Map"
] | import java.io.IOException; import java.io.InputStream; import java.util.Map; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 1,748,888 |
@Override
public CharSequence convertToString(Cursor cursor) {
if (cursor == null) {
return null;
}
String query = getColumnString(cursor, SearchManager.SUGGEST_COLUMN_QUERY);
if (query != null) {
return query;
}
return null;
} | CharSequence function(Cursor cursor) { if (cursor == null) { return null; } String query = getColumnString(cursor, SearchManager.SUGGEST_COLUMN_QUERY); if (query != null) { return query; } return null; } | /**
* Gets the text to show in the query field when a suggestion is selected.
*
* @param cursor The Cursor to read the suggestion data from. The Cursor should already
* be moved to the suggestion that is to be read from.
* @return The text to show, or <code>null</code> if the query should not be
* changed when selecting this suggestion.
*/ | Gets the text to show in the query field when a suggestion is selected | convertToString | {
"repo_name": "perrystreetsoftware/ActionBarSherlock",
"path": "actionbarsherlock/src/com/actionbarsherlock/widget/SuggestionsAdapter.java",
"license": "apache-2.0",
"size": 28606
} | [
"android.app.SearchManager",
"android.database.Cursor"
] | import android.app.SearchManager; import android.database.Cursor; | import android.app.*; import android.database.*; | [
"android.app",
"android.database"
] | android.app; android.database; | 617,971 |
public void setAttributeClass(Class type) {
if (type == null)
throw new NullPointerException("type is null");
if (!RadiusAttribute.class.isAssignableFrom(type))
throw new IllegalArgumentException("type is not a RadiusAttribute descendant");
this.attributeClass = type;
} | void function(Class type) { if (type == null) throw new NullPointerException(STR); if (!RadiusAttribute.class.isAssignableFrom(type)) throw new IllegalArgumentException(STR); this.attributeClass = type; } | /**
* Sets the RadiusAttribute descendant class which represents
* attributes of this type.
*/ | Sets the RadiusAttribute descendant class which represents attributes of this type | setAttributeClass | {
"repo_name": "leoluozhe/ToughRadius",
"path": "src/radius/java/org/tinyradius/dictionary/AttributeType.java",
"license": "bsd-3-clause",
"size": 5101
} | [
"org.tinyradius.attribute.RadiusAttribute"
] | import org.tinyradius.attribute.RadiusAttribute; | import org.tinyradius.attribute.*; | [
"org.tinyradius.attribute"
] | org.tinyradius.attribute; | 2,825,531 |
@Test
public void testSelfRemovingTaskSuspendTransaction(PrintWriter out) throws Exception {
SelfRemovingTask task = new SelfRemovingTask("testSelfRemovingTaskSuspendTransaction", 3, true); // Remove on the third update
task.getExecutionProperties().put(ManagedTask.TRANSACTION, ManagedTask.SUSPEND);
Trigger trigger = new FixedRepeatTrigger(4, 32); // Run up to 4 times, but we should remove at 3
TaskStatus<Integer> status = scheduler.schedule((Callable<Integer>) task, trigger);
for (long start = System.nanoTime(); status != null && System.nanoTime() - start < TIMEOUT_NS; Thread.sleep(POLL_INTERVAL))
status = scheduler.getStatus(status.getTaskId());
if (status != null)
throw new Exception("Task was not removed. " + status);
pollForTableEntry("testSelfRemovingTaskSuspendTransaction", 3);
} | void function(PrintWriter out) throws Exception { SelfRemovingTask task = new SelfRemovingTask(STR, 3, true); task.getExecutionProperties().put(ManagedTask.TRANSACTION, ManagedTask.SUSPEND); Trigger trigger = new FixedRepeatTrigger(4, 32); TaskStatus<Integer> status = scheduler.schedule((Callable<Integer>) task, trigger); for (long start = System.nanoTime(); status != null && System.nanoTime() - start < TIMEOUT_NS; Thread.sleep(POLL_INTERVAL)) status = scheduler.getStatus(status.getTaskId()); if (status != null) throw new Exception(STR + status); pollForTableEntry(STR, 3); } | /**
* Schedule a task that runs with the persistent executor transaction suspended,
* and removes itself when it runs the third time.
*/ | Schedule a task that runs with the persistent executor transaction suspended, and removes itself when it runs the third time | testSelfRemovingTaskSuspendTransaction | {
"repo_name": "kgibm/open-liberty",
"path": "dev/com.ibm.ws.concurrent.persistent_fat/test-applications/schedtest/src/web/SchedulerFATServlet.java",
"license": "epl-1.0",
"size": 196518
} | [
"com.ibm.websphere.concurrent.persistent.TaskStatus",
"java.io.PrintWriter",
"java.util.concurrent.Callable",
"javax.enterprise.concurrent.ManagedTask",
"javax.enterprise.concurrent.Trigger"
] | import com.ibm.websphere.concurrent.persistent.TaskStatus; import java.io.PrintWriter; import java.util.concurrent.Callable; import javax.enterprise.concurrent.ManagedTask; import javax.enterprise.concurrent.Trigger; | import com.ibm.websphere.concurrent.persistent.*; import java.io.*; import java.util.concurrent.*; import javax.enterprise.concurrent.*; | [
"com.ibm.websphere",
"java.io",
"java.util",
"javax.enterprise"
] | com.ibm.websphere; java.io; java.util; javax.enterprise; | 2,398,528 |
private native void initSocketNative() throws IOException;
private native void initSocketFromFdNative(int fd) throws IOException;
private native void connectNative() throws IOException;
private native int bindListenNative();
private native BluetoothSocket acceptNative(int timeout) throws IOException;
private native int availableNative() throws IOException;
private native int readNative(byte[] b, int offset, int length) throws IOException;
private native int writeNative(byte[] b, int offset, int length) throws IOException;
private native void abortNative() throws IOException;
private native void destroyNative() throws IOException;
native void throwErrnoNative(int errno) throws IOException;
private static class SdpHelper extends IBluetoothCallback.Stub {
private final IBluetooth service;
private final ParcelUuid uuid;
private final BluetoothDevice device;
private int channel;
private boolean canceled;
public SdpHelper(BluetoothDevice device, ParcelUuid uuid) {
service = BluetoothDevice.getService();
this.device = device;
this.uuid = uuid;
canceled = false;
} | native void initSocketNative() throws IOException; private native void initSocketFromFdNative(int fd) throws IOException; private native void connectNative() throws IOException; private native int bindListenNative(); private native BluetoothSocket acceptNative(int timeout) throws IOException; private native int availableNative() throws IOException; private native int readNative(byte[] b, int offset, int length) throws IOException; private native int writeNative(byte[] b, int offset, int length) throws IOException; private native void abortNative() throws IOException; private native void destroyNative() throws IOException; native void function(int errno) throws IOException; private static class SdpHelper extends IBluetoothCallback.Stub { private final IBluetooth service; private final ParcelUuid uuid; private final BluetoothDevice device; private int channel; private boolean canceled; public SdpHelper(BluetoothDevice device, ParcelUuid uuid) { service = BluetoothDevice.getService(); this.device = device; this.uuid = uuid; canceled = false; } | /**
* Throws an IOException for given posix errno. Done natively so we can
* use strerr to convert to string error.
*/ | Throws an IOException for given posix errno. Done natively so we can use strerr to convert to string error | throwErrnoNative | {
"repo_name": "esmasui/backport-android-bluetooth",
"path": "backport-android-bluetooth201/eclair/BluetoothSocket.java",
"license": "apache-2.0",
"size": 13951
} | [
"android.bluetooth.IBluetoothCallback",
"android.os.ParcelUuid",
"java.io.IOException"
] | import android.bluetooth.IBluetoothCallback; import android.os.ParcelUuid; import java.io.IOException; | import android.bluetooth.*; import android.os.*; import java.io.*; | [
"android.bluetooth",
"android.os",
"java.io"
] | android.bluetooth; android.os; java.io; | 945,357 |
public static boolean createHeightfield(Context ctx, Heightfield hf, int width, int height, Vector3f minBounds, Vector3f maxBounds, float cs, float ch) {
SWIGTYPE_p_float bmin = Converter.convertToSWIGTYPE_p_float(minBounds);
SWIGTYPE_p_float bmax = Converter.convertToSWIGTYPE_p_float(maxBounds);
return RecastJNI.rcCreateHeightfield(Context.getCPtr(ctx), ctx, Heightfield.getCPtr(hf), hf, width, height, SWIGTYPE_p_float.getCPtr(bmin), SWIGTYPE_p_float.getCPtr(bmax), cs, ch);
} | static boolean function(Context ctx, Heightfield hf, int width, int height, Vector3f minBounds, Vector3f maxBounds, float cs, float ch) { SWIGTYPE_p_float bmin = Converter.convertToSWIGTYPE_p_float(minBounds); SWIGTYPE_p_float bmax = Converter.convertToSWIGTYPE_p_float(maxBounds); return RecastJNI.rcCreateHeightfield(Context.getCPtr(ctx), ctx, Heightfield.getCPtr(hf), hf, width, height, SWIGTYPE_p_float.getCPtr(bmin), SWIGTYPE_p_float.getCPtr(bmax), cs, ch); } | /**
* Initializes a new heightfield.
*
* @param ctx The build context to use during the operation.
* @param hf The allocated heightfield to initialize.
* @param width The width of the field along the x-axis. [Limit: >= 0]
* [Units: vx]
* @param height The height of the field along the z-axis. [Limit: >= 0]
* [Units: vx]
* @param minBounds The minimum bounds of the field's AABB. [(x, y, z)]
* [Units: wu]
* @param maxBounds The maximum bounds of the field's AABB. [(x, y, z)]
* [Units: wu]
* @param cs The xz-plane cell size to use for the field. [Limit: > 0]
* [Units: wu]
* @param ch The y-axis cell size to use for field. [Limit: > 0] [Units: wu]
* @return
*/ | Initializes a new heightfield | createHeightfield | {
"repo_name": "QuietOne/jNavigation",
"path": "src/com/jme3/ai/navigation/recast/RecastBuilder.java",
"license": "bsd-3-clause",
"size": 51030
} | [
"com.jme3.ai.navigation.utils.Converter",
"com.jme3.ai.navigation.utils.RecastJNI",
"com.jme3.math.Vector3f"
] | import com.jme3.ai.navigation.utils.Converter; import com.jme3.ai.navigation.utils.RecastJNI; import com.jme3.math.Vector3f; | import com.jme3.ai.navigation.utils.*; import com.jme3.math.*; | [
"com.jme3.ai",
"com.jme3.math"
] | com.jme3.ai; com.jme3.math; | 1,038,114 |
private void find_N(SortedMap<String, Integer> vowels) {
// for each letter, the stupid way!
for(char currchar = 'A'; currchar <= 'Z'; ++currchar){
int vowelBefore = 0;
// start at looking from the first index
for(int i = cipher.indexOf(currchar,1); i != -1; i = cipher.indexOf(currchar, i+1) ){
// look at the letter before it
char before = cipher.charAt(i-1);
// see if it's a vowel
if(vowels.containsKey(""+before)){
vowelBefore++;
}
}
System.out.printf("%c %2d : vowel before %4.0f%%\n", currchar, histogram[currchar-'A'], ((double)vowelBefore/histogram[currchar-'A'])*100);
}
}
| void function(SortedMap<String, Integer> vowels) { for(char currchar = 'A'; currchar <= 'Z'; ++currchar){ int vowelBefore = 0; for(int i = cipher.indexOf(currchar,1); i != -1; i = cipher.indexOf(currchar, i+1) ){ char before = cipher.charAt(i-1); if(vowels.containsKey(STR%c %2d : vowel before %4.0f%%\n", currchar, histogram[currchar-'A'], ((double)vowelBefore/histogram[currchar-'A'])*100); } } | /**
* N in plain text.
*
* @param vowels
* @param consonants
*/ | N in plain text | find_N | {
"repo_name": "sctthrvy/Crypto",
"path": "src/KeyWordTranspose/FirstCodeword.java",
"license": "gpl-2.0",
"size": 7637
} | [
"java.util.SortedMap"
] | import java.util.SortedMap; | import java.util.*; | [
"java.util"
] | java.util; | 2,108,005 |
protected void shutdown(String databaseName)
throws SQLException {
shutdown(databaseName, null);
} | void function(String databaseName) throws SQLException { shutdown(databaseName, null); } | /**
* Shutdown the specified database.
*
* @param databaseName the name of the database
*/ | Shutdown the specified database | shutdown | {
"repo_name": "scnakandala/derby",
"path": "java/testing/org/apache/derbyTesting/functionTests/tests/store/EncryptionKeyTest.java",
"license": "apache-2.0",
"size": 26162
} | [
"java.sql.SQLException"
] | import java.sql.SQLException; | import java.sql.*; | [
"java.sql"
] | java.sql; | 1,964,699 |
public static void validateHistogramTrack(final Track track) {
Assert.notNull(track.getId(), getMessage(MessagesConstants.ERROR_NULL_PARAM, "id"));
Assert.notNull(track.getChromosome(), getMessage(MessagesConstants.ERROR_NULL_PARAM, CHROMOSOME_FILED));
} | static void function(final Track track) { Assert.notNull(track.getId(), getMessage(MessagesConstants.ERROR_NULL_PARAM, "id")); Assert.notNull(track.getChromosome(), getMessage(MessagesConstants.ERROR_NULL_PARAM, CHROMOSOME_FILED)); } | /**
* Validates a histogram tack
* @param track a histogram track to validate
*/ | Validates a histogram tack | validateHistogramTrack | {
"repo_name": "epam/NGB",
"path": "server/catgenome/src/main/java/com/epam/catgenome/manager/TrackHelper.java",
"license": "mit",
"size": 14329
} | [
"com.epam.catgenome.component.MessageHelper",
"com.epam.catgenome.constant.MessagesConstants",
"com.epam.catgenome.entity.track.Track",
"org.springframework.util.Assert"
] | import com.epam.catgenome.component.MessageHelper; import com.epam.catgenome.constant.MessagesConstants; import com.epam.catgenome.entity.track.Track; import org.springframework.util.Assert; | import com.epam.catgenome.component.*; import com.epam.catgenome.constant.*; import com.epam.catgenome.entity.track.*; import org.springframework.util.*; | [
"com.epam.catgenome",
"org.springframework.util"
] | com.epam.catgenome; org.springframework.util; | 2,683,511 |
public List<Lot> getLots() {
return lots;
} | List<Lot> function() { return lots; } | /**
* lots getter
*
* @return lots
*/ | lots getter | getLots | {
"repo_name": "acheype/cantharella",
"path": "cantharella.data/src/main/java/nc/ird/cantharella/data/model/Campagne.java",
"license": "agpl-3.0",
"size": 10813
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,668,471 |
public void setSession(Session session) {
this.session = session;
}
private HttpServletResponse httpServletResponse;
| void function(Session session) { this.session = session; } private HttpServletResponse httpServletResponse; | /**
* This sets the session
* @param session The instance of session.
*/ | This sets the session | setSession | {
"repo_name": "gauravvermaicloud/JavaProjectTemplate",
"path": "src/main/java/com/boilerplate/framework/RequestParameters.java",
"license": "apache-2.0",
"size": 2627
} | [
"com.boilerplate.sessions.Session",
"javax.servlet.http.HttpServletResponse"
] | import com.boilerplate.sessions.Session; import javax.servlet.http.HttpServletResponse; | import com.boilerplate.sessions.*; import javax.servlet.http.*; | [
"com.boilerplate.sessions",
"javax.servlet"
] | com.boilerplate.sessions; javax.servlet; | 2,767,391 |
public void setDataset(ValueDataset dataset) {
setDataset(0, dataset);
} | void function(ValueDataset dataset) { setDataset(0, dataset); } | /**
* Sets the dataset for the plot, replacing the existing dataset, if there
* is one, and sends a {@link PlotChangeEvent} to all registered
* listeners.
*
* @param dataset the dataset (<code>null</code> permitted).
*/ | Sets the dataset for the plot, replacing the existing dataset, if there is one, and sends a <code>PlotChangeEvent</code> to all registered listeners | setDataset | {
"repo_name": "nologic/nabs",
"path": "client/trunk/shared/libraries/jfreechart-1.0.5/experimental/org/jfree/experimental/chart/plot/dial/DialPlot.java",
"license": "gpl-2.0",
"size": 18780
} | [
"org.jfree.data.general.ValueDataset"
] | import org.jfree.data.general.ValueDataset; | import org.jfree.data.general.*; | [
"org.jfree.data"
] | org.jfree.data; | 2,659,080 |
@Test
@TestForIssue(jiraKey = "HHH-8363")
public void testStoppableClassLoaderService() {
final BootstrapServiceRegistryBuilder bootstrapBuilder = new BootstrapServiceRegistryBuilder();
bootstrapBuilder.with( new TestClassLoader() );
final ServiceRegistry serviceRegistry = new ServiceRegistryBuilder( bootstrapBuilder.build() ).buildServiceRegistry();
final ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
TestIntegrator testIntegrator1 = findTestIntegrator( classLoaderService );
assertNotNull( testIntegrator1 );
TestIntegrator testIntegrator2 = findTestIntegrator( classLoaderService );
assertNotNull( testIntegrator2 );
assertSame( testIntegrator1, testIntegrator2 );
ServiceRegistryBuilder.destroy( serviceRegistry );
// Should return null -- aggregratedClassLoader blown away.
testIntegrator2 = findTestIntegrator( classLoaderService );
assertNull( testIntegrator2 );
} | @TestForIssue(jiraKey = STR) void function() { final BootstrapServiceRegistryBuilder bootstrapBuilder = new BootstrapServiceRegistryBuilder(); bootstrapBuilder.with( new TestClassLoader() ); final ServiceRegistry serviceRegistry = new ServiceRegistryBuilder( bootstrapBuilder.build() ).buildServiceRegistry(); final ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class ); TestIntegrator testIntegrator1 = findTestIntegrator( classLoaderService ); assertNotNull( testIntegrator1 ); TestIntegrator testIntegrator2 = findTestIntegrator( classLoaderService ); assertNotNull( testIntegrator2 ); assertSame( testIntegrator1, testIntegrator2 ); ServiceRegistryBuilder.destroy( serviceRegistry ); testIntegrator2 = findTestIntegrator( classLoaderService ); assertNull( testIntegrator2 ); } | /**
* HHH-8363 discovered multiple leaks within CLS. Most notably, it wasn't getting GC'd due to holding
* references to ServiceLoaders. Ensure that the addition of Stoppable functionality cleans up properly.
*
* TODO: Is there a way to test that the ServiceLoader was actually reset?
*/ | HHH-8363 discovered multiple leaks within CLS. Most notably, it wasn't getting GC'd due to holding references to ServiceLoaders. Ensure that the addition of Stoppable functionality cleans up properly | testStoppableClassLoaderService | {
"repo_name": "HerrB92/obp",
"path": "OpenBeaconPackage/libraries/hibernate-release-4.2.7.SP1/project/hibernate-core/src/test/java/org/hibernate/test/service/ClassLoaderServiceImplTest.java",
"license": "mit",
"size": 5880
} | [
"org.hibernate.service.BootstrapServiceRegistryBuilder",
"org.hibernate.service.ServiceRegistry",
"org.hibernate.service.ServiceRegistryBuilder",
"org.hibernate.service.classloading.spi.ClassLoaderService",
"org.hibernate.testing.TestForIssue",
"org.junit.Assert"
] | import org.hibernate.service.BootstrapServiceRegistryBuilder; import org.hibernate.service.ServiceRegistry; import org.hibernate.service.ServiceRegistryBuilder; import org.hibernate.service.classloading.spi.ClassLoaderService; import org.hibernate.testing.TestForIssue; import org.junit.Assert; | import org.hibernate.service.*; import org.hibernate.service.classloading.spi.*; import org.hibernate.testing.*; import org.junit.*; | [
"org.hibernate.service",
"org.hibernate.testing",
"org.junit"
] | org.hibernate.service; org.hibernate.testing; org.junit; | 2,844,483 |
protected void setRequiredFiles(final List<String> requiredFiles) {
this.requiredFiles = requiredFiles;
}
| void function(final List<String> requiredFiles) { this.requiredFiles = requiredFiles; } | /**
* Sets the required files.
*
* @param requiredFiles the new required files
*/ | Sets the required files | setRequiredFiles | {
"repo_name": "daitr-gu/scheduler_emulator_v2",
"path": "src/org/cloudbus/cloudsim/Cloudlet.java",
"license": "mit",
"size": 41844
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,223,092 |
public static String readToPreviousQuote(TemplateDocument document, int position) {
StringBuilder typedString = new StringBuilder();
char c;
try {
position--;
while (true) {
c = document.getChar(position--);
if (c == '"') {
break;
} else if (c == '\n' || c == '\r') {
return null;
}
typedString.append(c);
}
} catch (BadLocationException e) {
return null;
}
return typedString.reverse().toString();
} | static String function(TemplateDocument document, int position) { StringBuilder typedString = new StringBuilder(); char c; try { position--; while (true) { c = document.getChar(position--); if (c == '"') { break; } else if (c == '\n' c == '\r') { return null; } typedString.append(c); } } catch (BadLocationException e) { return null; } return typedString.reverse().toString(); } | /**
* Reads a string backwards from the current offset position to first
* occurrence of a double quote. Return null if none is found or a new line is found.
*
* @param document
* The document to search in.
* @param position
* The offset in the document to start searching at.
* @return The string from the specified position to the first occurrence of
* double quote.
*/ | Reads a string backwards from the current offset position to first occurrence of a double quote. Return null if none is found or a new line is found | readToPreviousQuote | {
"repo_name": "zhangzhx/aws-toolkit-eclipse",
"path": "bundles/com.amazonaws.eclipse.cloudformation/src/com/amazonaws/eclipse/cloudformation/templates/editor/DocumentUtils.java",
"license": "apache-2.0",
"size": 6121
} | [
"org.eclipse.jface.text.BadLocationException"
] | import org.eclipse.jface.text.BadLocationException; | import org.eclipse.jface.text.*; | [
"org.eclipse.jface"
] | org.eclipse.jface; | 2,260,396 |
@Override
public Centroid<MLData> createCentroid() {
return null;
} | Centroid<MLData> function() { return null; } | /**
* Not supported.
* <p/>
* @return Nothing.
*/ | Not supported. | createCentroid | {
"repo_name": "ladygagapowerbot/bachelor-thesis-implementation",
"path": "lib/Encog/src/main/java/org/encog/ml/data/specific/BiPolarNeuralData.java",
"license": "mit",
"size": 5635
} | [
"org.encog.ml.data.MLData",
"org.encog.util.kmeans.Centroid"
] | import org.encog.ml.data.MLData; import org.encog.util.kmeans.Centroid; | import org.encog.ml.data.*; import org.encog.util.kmeans.*; | [
"org.encog.ml",
"org.encog.util"
] | org.encog.ml; org.encog.util; | 1,550,188 |
public static void writeFormElementContextOpen(Writer out, String fullFormId, String elementId, boolean isPresent,
PageContext pageContext, String idPrefix) throws Exception {
// Enclose the element in a <span id=someuniqueid>
String spanId = idPrefix + fullFormId + Path.SEPARATOR + elementId;
JspUtil.writeOpenStartTag(out, "span");
JspUtil.writeAttribute(out, "id", spanId);
// We'll also use the span around a form element for tracking keyboard events.
// that is, the span will call our handler on a keypress.
// Another, better way would be to set up some global keypress handler, but unfortunately
// that way it is too difficult to determine exactly which element was the target for the event.
// All events are sent to a handler called "uiHandleKeypress(event, formElementId)"
// We use the "keydown" event, not keypress, because this allows to catch F2 in IE.
// Actual onkeydown event is attached to span with behavioural javascript --
// that also takes care of adding hidden element into DOM that indicates this
// form element is present in request.
JspUtil.writeCloseStartTag(out);
} | static void function(Writer out, String fullFormId, String elementId, boolean isPresent, PageContext pageContext, String idPrefix) throws Exception { String spanId = idPrefix + fullFormId + Path.SEPARATOR + elementId; JspUtil.writeOpenStartTag(out, "span"); JspUtil.writeAttribute(out, "id", spanId); JspUtil.writeCloseStartTag(out); } | /**
* Write a span with random ID around the element, and register this SPAN with javascript (done by external behavior
* scripts, SPAN functions as keyboard handler). Default implementation does not use any parameters except
* <code>Writer</code> and <code>PageContext</code>.
*
* @param out The writer to write to.
* @param fullFormId The full form ID.
* @param elementId The current element ID.
* @param isPresent
* @param pageContext The JSP page context.
* @param idPrefix The prefix for the ID.
* @throws Exception
*/ | Write a span with random ID around the element, and register this SPAN with javascript (done by external behavior scripts, SPAN functions as keyboard handler). Default implementation does not use any parameters except <code>Writer</code> and <code>PageContext</code> | writeFormElementContextOpen | {
"repo_name": "nortal/araneaframework",
"path": "src/org/araneaframework/jsp/tag/uilib/form/BaseFormElementHtmlTag.java",
"license": "apache-2.0",
"size": 16449
} | [
"java.io.Writer",
"javax.servlet.jsp.PageContext",
"org.araneaframework.Path",
"org.araneaframework.jsp.util.JspUtil"
] | import java.io.Writer; import javax.servlet.jsp.PageContext; import org.araneaframework.Path; import org.araneaframework.jsp.util.JspUtil; | import java.io.*; import javax.servlet.jsp.*; import org.araneaframework.*; import org.araneaframework.jsp.util.*; | [
"java.io",
"javax.servlet",
"org.araneaframework",
"org.araneaframework.jsp"
] | java.io; javax.servlet; org.araneaframework; org.araneaframework.jsp; | 341,475 |
@Test
public void testParameterConstruction() { // NOPMD (assert missing)
for (int i=0;i<ARRAY_LENGTH;i++) {
// initialize
MonitorExitEvent record = new MonitorExitEvent(LONG_VALUES.get(i % LONG_VALUES.size()), LONG_VALUES.get(i % LONG_VALUES.size()), INT_VALUES.get(i % INT_VALUES.size()), INT_VALUES.get(i % INT_VALUES.size()));
// check values
Assert.assertEquals("MonitorExitEvent.timestamp values are not equal.", (long) LONG_VALUES.get(i % LONG_VALUES.size()), record.getTimestamp());
Assert.assertEquals("MonitorExitEvent.traceId values are not equal.", (long) LONG_VALUES.get(i % LONG_VALUES.size()), record.getTraceId());
Assert.assertEquals("MonitorExitEvent.orderIndex values are not equal.", (int) INT_VALUES.get(i % INT_VALUES.size()), record.getOrderIndex());
Assert.assertEquals("MonitorExitEvent.lockId values are not equal.", (int) INT_VALUES.get(i % INT_VALUES.size()), record.getLockId());
}
}
| void function() { for (int i=0;i<ARRAY_LENGTH;i++) { MonitorExitEvent record = new MonitorExitEvent(LONG_VALUES.get(i % LONG_VALUES.size()), LONG_VALUES.get(i % LONG_VALUES.size()), INT_VALUES.get(i % INT_VALUES.size()), INT_VALUES.get(i % INT_VALUES.size())); Assert.assertEquals(STR, (long) LONG_VALUES.get(i % LONG_VALUES.size()), record.getTimestamp()); Assert.assertEquals(STR, (long) LONG_VALUES.get(i % LONG_VALUES.size()), record.getTraceId()); Assert.assertEquals(STR, (int) INT_VALUES.get(i % INT_VALUES.size()), record.getOrderIndex()); Assert.assertEquals(STR, (int) INT_VALUES.get(i % INT_VALUES.size()), record.getLockId()); } } | /**
* Tests {@link MonitorExitEvent#TestMonitorExitEvent(long, long, int, int)}.
*/ | Tests <code>MonitorExitEvent#TestMonitorExitEvent(long, long, int, int)</code> | testParameterConstruction | {
"repo_name": "leadwire-apm/leadwire-javaagent",
"path": "leadwire-common/test-gen/kieker/test/common/junit/record/flow/trace/concurrency/monitor/TestGeneratedMonitorExitEvent.java",
"license": "apache-2.0",
"size": 7777
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 2,473,312 |
protected void endPage(PDPage page) throws IOException
{
// default is to do nothing
}
private static final float END_OF_LAST_TEXT_X_RESET_VALUE = -1;
private static final float MAX_Y_FOR_LINE_RESET_VALUE = -Float.MAX_VALUE;
private static final float EXPECTED_START_OF_NEXT_WORD_X_RESET_VALUE = -Float.MAX_VALUE;
private static final float MAX_HEIGHT_FOR_LINE_RESET_VALUE = -1;
private static final float MIN_Y_TOP_FOR_LINE_RESET_VALUE = Float.MAX_VALUE;
private static final float LAST_WORD_SPACING_RESET_VALUE = -1; | void function(PDPage page) throws IOException { } private static final float END_OF_LAST_TEXT_X_RESET_VALUE = -1; private static final float MAX_Y_FOR_LINE_RESET_VALUE = -Float.MAX_VALUE; private static final float EXPECTED_START_OF_NEXT_WORD_X_RESET_VALUE = -Float.MAX_VALUE; private static final float MAX_HEIGHT_FOR_LINE_RESET_VALUE = -1; private static final float MIN_Y_TOP_FOR_LINE_RESET_VALUE = Float.MAX_VALUE; private static final float LAST_WORD_SPACING_RESET_VALUE = -1; | /**
* End a page. Default implementation is to do nothing. Subclasses may provide additional information.
*
* @param page The page we are about to process.
*
* @throws IOException If there is any error writing to the stream.
*/ | End a page. Default implementation is to do nothing. Subclasses may provide additional information | endPage | {
"repo_name": "kalaspuffar/pdfbox",
"path": "pdfbox/src/main/java/org/apache/pdfbox/text/PDFTextStripper.java",
"license": "apache-2.0",
"size": 75799
} | [
"java.io.IOException",
"org.apache.pdfbox.pdmodel.PDPage"
] | import java.io.IOException; import org.apache.pdfbox.pdmodel.PDPage; | import java.io.*; import org.apache.pdfbox.pdmodel.*; | [
"java.io",
"org.apache.pdfbox"
] | java.io; org.apache.pdfbox; | 2,464,433 |
protected static <T> T value(
final ParameterDescriptor<T> param, final ParameterValueGroup group)
throws ParameterNotFoundException {
final ParameterValue<T> value = getParameter(param, group);
return (value != null) ? value.getValue() : null;
} | static <T> T function( final ParameterDescriptor<T> param, final ParameterValueGroup group) throws ParameterNotFoundException { final ParameterValue<T> value = getParameter(param, group); return (value != null) ? value.getValue() : null; } | /**
* Returns the parameter value for the specified operation parameter. This convenience method is
* used by subclasses for initializing {@linkplain MathTransform math transform} from a set of
* parameters.
*
* @param <T> The type of parameter value.
* @param param The parameter to look for.
* @param group The parameter value group to search into.
* @return The requested parameter value, or {@code null} if {@code param} is {@linkplain
* #createOptionalDescriptor optional} and the user didn't provided any value.
* @throws ParameterNotFoundException if the parameter is not found.
* @todo Move to the {@link org.geotools.parameter.Parameters} class.
*/ | Returns the parameter value for the specified operation parameter. This convenience method is used by subclasses for initializing MathTransform math transform from a set of parameters | value | {
"repo_name": "geotools/geotools",
"path": "modules/library/referencing/src/main/java/org/geotools/referencing/operation/MathTransformProvider.java",
"license": "lgpl-2.1",
"size": 27404
} | [
"org.opengis.parameter.ParameterDescriptor",
"org.opengis.parameter.ParameterNotFoundException",
"org.opengis.parameter.ParameterValue",
"org.opengis.parameter.ParameterValueGroup"
] | import org.opengis.parameter.ParameterDescriptor; import org.opengis.parameter.ParameterNotFoundException; import org.opengis.parameter.ParameterValue; import org.opengis.parameter.ParameterValueGroup; | import org.opengis.parameter.*; | [
"org.opengis.parameter"
] | org.opengis.parameter; | 1,542,784 |
@Override
public String getCreateChildText(Object owner, Object feature, Object child, Collection<?> selection) {
Object childFeature = feature;
Object childObject = child;
boolean qualify =
childFeature == BusComponentsPackage.Literals.BUS_CABLE__SIGNAL_PLUG1 ||
childFeature == BusComponentsPackage.Literals.BUS_CABLE__SIGNAL_PLUG2;
if (qualify) {
return getString
("_UI_CreateChild_text2",
new Object[] { getTypeText(childObject), getFeatureText(childFeature), getTypeText(owner) });
}
return super.getCreateChildText(owner, feature, child, selection);
}
| String function(Object owner, Object feature, Object child, Collection<?> selection) { Object childFeature = feature; Object childObject = child; boolean qualify = childFeature == BusComponentsPackage.Literals.BUS_CABLE__SIGNAL_PLUG1 childFeature == BusComponentsPackage.Literals.BUS_CABLE__SIGNAL_PLUG2; if (qualify) { return getString (STR, new Object[] { getTypeText(childObject), getFeatureText(childFeature), getTypeText(owner) }); } return super.getCreateChildText(owner, feature, child, selection); } | /**
* This returns the label text for {@link org.eclipse.emf.edit.command.CreateChildCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This returns the label text for <code>org.eclipse.emf.edit.command.CreateChildCommand</code>. | getCreateChildText | {
"repo_name": "KAMP-Research/KAMP4APS",
"path": "edu.kit.ipd.sdq.kamp4aps.aps.edit/src/edu/kit/ipd/sdq/kamp4aps/model/aPS/BusComponents/provider/EtherCATCableItemProvider.java",
"license": "apache-2.0",
"size": 3654
} | [
"edu.kit.ipd.sdq.kamp4aps.model.aPS.BusComponents",
"java.util.Collection"
] | import edu.kit.ipd.sdq.kamp4aps.model.aPS.BusComponents; import java.util.Collection; | import edu.kit.ipd.sdq.kamp4aps.model.*; import java.util.*; | [
"edu.kit.ipd",
"java.util"
] | edu.kit.ipd; java.util; | 164,501 |
LongWrapper getLong() throws ServiceException; | LongWrapper getLong() throws ServiceException; | /**
* Get complex types with long properties
*
* @return the LongWrapper object if successful.
* @throws ServiceException the exception wrapped in ServiceException if failed.
*/ | Get complex types with long properties | getLong | {
"repo_name": "BretJohnson/autorest",
"path": "AutoRest/Generators/Java/Java.Tests/src/main/java/fixtures/bodycomplex/Primitive.java",
"license": "mit",
"size": 17621
} | [
"com.microsoft.rest.ServiceException"
] | import com.microsoft.rest.ServiceException; | import com.microsoft.rest.*; | [
"com.microsoft.rest"
] | com.microsoft.rest; | 1,167,149 |
private void readObject(ObjectInputStream s)
throws IOException, ClassNotFoundException
{
s.defaultReadObject();
int num = s.readInt();
table = new Object[Math.max(num << 1, DEFAULT_CAPACITY) << 1];
// Read key/value pairs.
while (--num >= 0)
put(s.readObject(), s.readObject());
} | void function(ObjectInputStream s) throws IOException, ClassNotFoundException { s.defaultReadObject(); int num = s.readInt(); table = new Object[Math.max(num << 1, DEFAULT_CAPACITY) << 1]; while (--num >= 0) put(s.readObject(), s.readObject()); } | /**
* Reads the object from a serial stream.
*
* @param s the stream to read from
* @throws ClassNotFoundException if the underlying stream fails
* @throws IOException if the underlying stream fails
* @serialData expects the size (int), followed by that many key (Object)
* and value (Object) pairs, with the pairs in no particular
* order
*/ | Reads the object from a serial stream | readObject | {
"repo_name": "shaotuanchen/sunflower_exp",
"path": "tools/source/gcc-4.2.4/libjava/classpath/java/util/IdentityHashMap.java",
"license": "bsd-3-clause",
"size": 28607
} | [
"java.io.IOException",
"java.io.ObjectInputStream"
] | import java.io.IOException; import java.io.ObjectInputStream; | import java.io.*; | [
"java.io"
] | java.io; | 363,709 |
public void setType(Type type) {
this.type = type;
} | void function(Type type) { this.type = type; } | /**
* Set the main type of this parameter.
*
* @param type the main type
*/ | Set the main type of this parameter | setType | {
"repo_name": "boy0001/FastAsyncWorldedit",
"path": "core/src/main/java/com/sk89q/worldedit/util/command/parametric/ParameterData.java",
"license": "gpl-3.0",
"size": 7222
} | [
"java.lang.reflect.Type"
] | import java.lang.reflect.Type; | import java.lang.reflect.*; | [
"java.lang"
] | java.lang; | 1,701,263 |
public int waitForCondition(Channel c, long timeout, int condition_mask)
{
long end_time = 0;
boolean end_time_set = false;
synchronized (c)
{
while (true)
{
int current_cond = 0;
int stdoutAvail = c.stdoutWritepos - c.stdoutReadpos;
int stderrAvail = c.stderrWritepos - c.stderrReadpos;
if (stdoutAvail > 0)
current_cond = current_cond | ChannelCondition.STDOUT_DATA;
if (stderrAvail > 0)
current_cond = current_cond | ChannelCondition.STDERR_DATA;
if (c.EOF)
current_cond = current_cond | ChannelCondition.EOF;
if (c.getExitStatus() != null)
current_cond = current_cond | ChannelCondition.EXIT_STATUS;
if (c.getExitSignal() != null)
current_cond = current_cond | ChannelCondition.EXIT_SIGNAL;
if (c.state == Channel.STATE_CLOSED)
return current_cond | ChannelCondition.CLOSED | ChannelCondition.EOF;
if ((current_cond & condition_mask) != 0)
return current_cond;
if (timeout > 0)
{
if (!end_time_set)
{
end_time = System.currentTimeMillis() + timeout;
end_time_set = true;
}
else
{
timeout = end_time - System.currentTimeMillis();
if (timeout <= 0)
return current_cond | ChannelCondition.TIMEOUT;
}
}
try
{
if (timeout > 0)
c.wait(timeout);
else
c.wait();
}
catch (InterruptedException e)
{
}
}
}
}
| int function(Channel c, long timeout, int condition_mask) { long end_time = 0; boolean end_time_set = false; synchronized (c) { while (true) { int current_cond = 0; int stdoutAvail = c.stdoutWritepos - c.stdoutReadpos; int stderrAvail = c.stderrWritepos - c.stderrReadpos; if (stdoutAvail > 0) current_cond = current_cond ChannelCondition.STDOUT_DATA; if (stderrAvail > 0) current_cond = current_cond ChannelCondition.STDERR_DATA; if (c.EOF) current_cond = current_cond ChannelCondition.EOF; if (c.getExitStatus() != null) current_cond = current_cond ChannelCondition.EXIT_STATUS; if (c.getExitSignal() != null) current_cond = current_cond ChannelCondition.EXIT_SIGNAL; if (c.state == Channel.STATE_CLOSED) return current_cond ChannelCondition.CLOSED ChannelCondition.EOF; if ((current_cond & condition_mask) != 0) return current_cond; if (timeout > 0) { if (!end_time_set) { end_time = System.currentTimeMillis() + timeout; end_time_set = true; } else { timeout = end_time - System.currentTimeMillis(); if (timeout <= 0) return current_cond ChannelCondition.TIMEOUT; } } try { if (timeout > 0) c.wait(timeout); else c.wait(); } catch (InterruptedException e) { } } } } | /**
* Wait until for a condition.
*
* @param c
* Channel
* @param timeout
* in ms, 0 means no timeout.
* @param condition_mask
* minimum event mask
* @return all current events
*
*/ | Wait until for a condition | waitForCondition | {
"repo_name": "vx/connectbot",
"path": "src/com/trilead/ssh2/channel/ChannelManager.java",
"license": "apache-2.0",
"size": 47181
} | [
"com.trilead.ssh2.ChannelCondition"
] | import com.trilead.ssh2.ChannelCondition; | import com.trilead.ssh2.*; | [
"com.trilead.ssh2"
] | com.trilead.ssh2; | 669,670 |
@Test
public void testGetServicePrivacyPolicy() {
LOG.info("[[TEST]] testGetServicePrivacyPolicy: add and retrieve a privacy policy");
RequestPolicy addedPrivacyPolicy = null;
RequestPolicy retrievedPrivacyPolicy = null;
boolean deleteResult = false;
try {
addedPrivacyPolicy = privacyPolicyManager.updatePrivacyPolicy(servicePolicy);
retrievedPrivacyPolicy = privacyPolicyManager.getPrivacyPolicy(requestorService);
deleteResult = privacyPolicyManager.deletePrivacyPolicy(requestorService);
} catch (PrivacyException e) {
LOG.info("[Test PrivacyException] testGetServicePrivacyPolicy: add and retrieve a privacy policy", e);
fail("[Error testGetServicePrivacyPolicy] Privacy error");
} catch (Exception e) {
e.printStackTrace();
fail("[Error testDeletePrivacyPolicy] error");
}
assertNotNull("Privacy policy not added.", addedPrivacyPolicy);
assertNotNull("Privacy policy retrieved is null, but it should not.", retrievedPrivacyPolicy);
assertEquals("Expected a privacy policy, but it what not the good one.", retrievedPrivacyPolicy, addedPrivacyPolicy);
assertTrue("Privacy policy not deleted.", deleteResult);
}
| void function() { LOG.info(STR); RequestPolicy addedPrivacyPolicy = null; RequestPolicy retrievedPrivacyPolicy = null; boolean deleteResult = false; try { addedPrivacyPolicy = privacyPolicyManager.updatePrivacyPolicy(servicePolicy); retrievedPrivacyPolicy = privacyPolicyManager.getPrivacyPolicy(requestorService); deleteResult = privacyPolicyManager.deletePrivacyPolicy(requestorService); } catch (PrivacyException e) { LOG.info(STR, e); fail(STR); } catch (Exception e) { e.printStackTrace(); fail(STR); } assertNotNull(STR, addedPrivacyPolicy); assertNotNull(STR, retrievedPrivacyPolicy); assertEquals(STR, retrievedPrivacyPolicy, addedPrivacyPolicy); assertTrue(STR, deleteResult); } | /**
* Test method for {@link org.societies.privacytrust.privacyprotection.privacypolicy.PrivacyPolicyManager#getPrivacyPolicy(java.lang.String)}.
*/ | Test method for <code>org.societies.privacytrust.privacyprotection.privacypolicy.PrivacyPolicyManager#getPrivacyPolicy(java.lang.String)</code> | testGetServicePrivacyPolicy | {
"repo_name": "EPapadopoulou/PersoNIS",
"path": "privacy-policy-management/src/test/java/org/societies/privacytrust/privacyprotection/test/privacypolicy/PrivacyPolicyManagerDeprecatedTest.java",
"license": "bsd-2-clause",
"size": 41713
} | [
"org.junit.Assert",
"org.societies.api.privacytrust.privacy.model.PrivacyException",
"org.societies.api.privacytrust.privacy.model.privacypolicy.RequestPolicy"
] | import org.junit.Assert; import org.societies.api.privacytrust.privacy.model.PrivacyException; import org.societies.api.privacytrust.privacy.model.privacypolicy.RequestPolicy; | import org.junit.*; import org.societies.api.privacytrust.privacy.model.*; import org.societies.api.privacytrust.privacy.model.privacypolicy.*; | [
"org.junit",
"org.societies.api"
] | org.junit; org.societies.api; | 230,822 |
void updateSubscriptions(APIIdentifier identifier, String userId, int applicationId) throws APIManagementException; | void updateSubscriptions(APIIdentifier identifier, String userId, int applicationId) throws APIManagementException; | /**
* This method is to update the subscriber.
*
* @param identifier APIIdentifier
* @param userId user id
* @param applicationId Application Id
* @throws APIManagementException if failed to update subscription
*/ | This method is to update the subscriber | updateSubscriptions | {
"repo_name": "jaadds/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.api/src/main/java/org/wso2/carbon/apimgt/api/APIConsumer.java",
"license": "apache-2.0",
"size": 41368
} | [
"org.wso2.carbon.apimgt.api.model.APIIdentifier"
] | import org.wso2.carbon.apimgt.api.model.APIIdentifier; | import org.wso2.carbon.apimgt.api.model.*; | [
"org.wso2.carbon"
] | org.wso2.carbon; | 1,988,292 |
private static Principal getPrimaryPrincipal(final PrincipalElectionStrategy principalElectionStrategy,
final Set<Authentication> authentications,
final Map<String, List<Object>> principalAttributes) {
return principalElectionStrategy.nominate(new LinkedHashSet<>(authentications), principalAttributes);
} | static Principal function(final PrincipalElectionStrategy principalElectionStrategy, final Set<Authentication> authentications, final Map<String, List<Object>> principalAttributes) { return principalElectionStrategy.nominate(new LinkedHashSet<>(authentications), principalAttributes); } | /**
* Principal id is and must be enforced to be the same for all authentications.
* Based on that restriction, it's safe to grab the first principal id in the chain
* when composing the authentication chain for the caller.
*/ | Principal id is and must be enforced to be the same for all authentications. Based on that restriction, it's safe to grab the first principal id in the chain when composing the authentication chain for the caller | getPrimaryPrincipal | {
"repo_name": "apereo/cas",
"path": "core/cas-server-core-authentication-api/src/main/java/org/apereo/cas/authentication/DefaultAuthenticationResultBuilder.java",
"license": "apache-2.0",
"size": 8447
} | [
"java.util.LinkedHashSet",
"java.util.List",
"java.util.Map",
"java.util.Set",
"org.apereo.cas.authentication.principal.Principal"
] | import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apereo.cas.authentication.principal.Principal; | import java.util.*; import org.apereo.cas.authentication.principal.*; | [
"java.util",
"org.apereo.cas"
] | java.util; org.apereo.cas; | 2,749,551 |
public void dropStatisticsBefore(final Instant dropBefore) {
final Iterator<CalculationNodeStatistics> it = _nodeStatistics.values().iterator();
while (it.hasNext()) {
final CalculationNodeStatistics nodeStatistics = it.next();
if (nodeStatistics.getLastJobTime().isBefore(dropBefore)) {
it.remove();
}
}
} | void function(final Instant dropBefore) { final Iterator<CalculationNodeStatistics> it = _nodeStatistics.values().iterator(); while (it.hasNext()) { final CalculationNodeStatistics nodeStatistics = it.next(); if (nodeStatistics.getLastJobTime().isBefore(dropBefore)) { it.remove(); } } } | /**
* Cleanup the statistics deleting all information before a fixed instant.
*
* @param dropBefore the instant to delete before, not null
*/ | Cleanup the statistics deleting all information before a fixed instant | dropStatisticsBefore | {
"repo_name": "McLeodMoores/starling",
"path": "projects/engine/src/main/java/com/opengamma/engine/calcnode/stats/TotallingNodeStatisticsGatherer.java",
"license": "apache-2.0",
"size": 2439
} | [
"java.util.Iterator",
"org.threeten.bp.Instant"
] | import java.util.Iterator; import org.threeten.bp.Instant; | import java.util.*; import org.threeten.bp.*; | [
"java.util",
"org.threeten.bp"
] | java.util; org.threeten.bp; | 47,156 |
public void testBackToContext() throws Exception {
Realm realm = new RealmDefaultImpl("test", "test", "/", null);
String backToContextPath = PathUtil.backToContext(realm);
assertEquals("Incorrect backToContextPath for realm mount point: " + realm.getMountPoint(), "", backToContextPath);
realm = new RealmDefaultImpl("test", "test", "/test/usecase/", null);
backToContextPath = PathUtil.backToContext(realm);
assertEquals("Incorrect backToContextPath for realm mount point: " + realm.getMountPoint(), "../../", backToContextPath);
realm = new RealmDefaultImpl("test", "test", "/yanel-website/", null);
backToContextPath = PathUtil.backToContext(realm);
assertEquals("Incorrect backToContextPath for realm mount point: " + realm.getMountPoint(), "../", backToContextPath);
} | void function() throws Exception { Realm realm = new RealmDefaultImpl("testSTRtestSTR/", null); String backToContextPath = PathUtil.backToContext(realm); assertEquals(STR + realm.getMountPoint(), STRtestSTRtestSTR/test/usecase/", null); backToContextPath = PathUtil.backToContext(realm); assertEquals(STR + realm.getMountPoint(), "../../STRtestSTRtestSTR/yanel-website/", null); backToContextPath = PathUtil.backToContext(realm); assertEquals(STR + realm.getMountPoint(), "../", backToContextPath); } | /**
* Tests if the PathUtil.backToContext returns the correct amount of "../" for a given path.
*/ | Tests if the PathUtil.backToContext returns the correct amount of "../" for a given path | testBackToContext | {
"repo_name": "baszero/yanel",
"path": "src/test/junit/org/wyona/yanel/core/util/PathUtilTest.java",
"license": "apache-2.0",
"size": 4152
} | [
"org.wyona.yanel.core.map.Realm",
"org.wyona.yanel.core.map.RealmDefaultImpl",
"org.wyona.yanel.core.util.PathUtil"
] | import org.wyona.yanel.core.map.Realm; import org.wyona.yanel.core.map.RealmDefaultImpl; import org.wyona.yanel.core.util.PathUtil; | import org.wyona.yanel.core.map.*; import org.wyona.yanel.core.util.*; | [
"org.wyona.yanel"
] | org.wyona.yanel; | 607,292 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.