method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
public IndexShard getShard(int shardId) {
IndexShard indexShard = getShardOrNull(shardId);
if (indexShard == null) {
throw new ShardNotFoundException(new ShardId(index(), shardId));
}
return indexShard;
} | IndexShard function(int shardId) { IndexShard indexShard = getShardOrNull(shardId); if (indexShard == null) { throw new ShardNotFoundException(new ShardId(index(), shardId)); } return indexShard; } | /**
* Return the shard with the provided id, or throw an exception if it doesn't exist.
*/ | Return the shard with the provided id, or throw an exception if it doesn't exist | getShard | {
"repo_name": "gmarz/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/index/IndexService.java",
"license": "apache-2.0",
"size": 34850
} | [
"org.elasticsearch.index.shard.IndexShard",
"org.elasticsearch.index.shard.ShardId",
"org.elasticsearch.index.shard.ShardNotFoundException"
] | import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; | import org.elasticsearch.index.shard.*; | [
"org.elasticsearch.index"
] | org.elasticsearch.index; | 837,040 |
private void setupNonRootFragments(final Collection<PlanFragment> fragments) throws ForemanException {
if (fragments.isEmpty()) {
// nothing to do here
return;
}
final Multimap<DrillbitEndpoint, PlanFragment> leafFragmentMap = ArrayListMultimap.create();
final Multimap<DrillbitEndpoint, PlanFragment> intFragmentMap = ArrayListMultimap.create();
// record all fragments for status purposes.
for (final PlanFragment planFragment : fragments) {
logger.trace("Tracking intermediate remote node {} with data {}",
planFragment.getAssignment(), planFragment.getFragmentJson());
queryManager.addFragmentStatusTracker(planFragment, false);
if (planFragment.getLeafFragment()) {
leafFragmentMap.put(planFragment.getAssignment(), planFragment);
} else {
intFragmentMap.put(planFragment.getAssignment(), planFragment);
}
}
final int numIntFragments = intFragmentMap.keySet().size();
final ExtendedLatch endpointLatch = new ExtendedLatch(numIntFragments);
final FragmentSubmitFailures fragmentSubmitFailures = new FragmentSubmitFailures();
// send remote intermediate fragments
for (final DrillbitEndpoint ep : intFragmentMap.keySet()) {
sendRemoteFragments(ep, intFragmentMap.get(ep), endpointLatch, fragmentSubmitFailures);
}
final long timeout = RPC_WAIT_IN_MSECS_PER_FRAGMENT * numIntFragments;
if(numIntFragments > 0 && !endpointLatch.awaitUninterruptibly(timeout)){
long numberRemaining = endpointLatch.getCount();
throw UserException.connectionError()
.message(
"Exceeded timeout (%d) while waiting send intermediate work fragments to remote nodes. " +
"Sent %d and only heard response back from %d nodes.",
timeout, numIntFragments, numIntFragments - numberRemaining)
.build(logger);
}
// if any of the intermediate fragment submissions failed, fail the query
final List<FragmentSubmitFailures.SubmissionException> submissionExceptions = fragmentSubmitFailures.submissionExceptions;
if (submissionExceptions.size() > 0) {
Set<DrillbitEndpoint> endpoints = Sets.newHashSet();
StringBuilder sb = new StringBuilder();
boolean first = true;
for (FragmentSubmitFailures.SubmissionException e : fragmentSubmitFailures.submissionExceptions) {
DrillbitEndpoint endpoint = e.drillbitEndpoint;
if (endpoints.add(endpoint)) {
if (first) {
first = false;
} else {
sb.append(", ");
}
sb.append(endpoint.getAddress());
}
}
throw UserException.connectionError(submissionExceptions.get(0).rpcException)
.message("Error setting up remote intermediate fragment execution")
.addContext("Nodes with failures", sb.toString())
.build(logger);
}
injector.injectChecked(queryContext.getExecutionControls(), "send-fragments", ForemanException.class);
for (final DrillbitEndpoint ep : leafFragmentMap.keySet()) {
sendRemoteFragments(ep, leafFragmentMap.get(ep), null, null);
}
} | void function(final Collection<PlanFragment> fragments) throws ForemanException { if (fragments.isEmpty()) { return; } final Multimap<DrillbitEndpoint, PlanFragment> leafFragmentMap = ArrayListMultimap.create(); final Multimap<DrillbitEndpoint, PlanFragment> intFragmentMap = ArrayListMultimap.create(); for (final PlanFragment planFragment : fragments) { logger.trace(STR, planFragment.getAssignment(), planFragment.getFragmentJson()); queryManager.addFragmentStatusTracker(planFragment, false); if (planFragment.getLeafFragment()) { leafFragmentMap.put(planFragment.getAssignment(), planFragment); } else { intFragmentMap.put(planFragment.getAssignment(), planFragment); } } final int numIntFragments = intFragmentMap.keySet().size(); final ExtendedLatch endpointLatch = new ExtendedLatch(numIntFragments); final FragmentSubmitFailures fragmentSubmitFailures = new FragmentSubmitFailures(); for (final DrillbitEndpoint ep : intFragmentMap.keySet()) { sendRemoteFragments(ep, intFragmentMap.get(ep), endpointLatch, fragmentSubmitFailures); } final long timeout = RPC_WAIT_IN_MSECS_PER_FRAGMENT * numIntFragments; if(numIntFragments > 0 && !endpointLatch.awaitUninterruptibly(timeout)){ long numberRemaining = endpointLatch.getCount(); throw UserException.connectionError() .message( STR + STR, timeout, numIntFragments, numIntFragments - numberRemaining) .build(logger); } final List<FragmentSubmitFailures.SubmissionException> submissionExceptions = fragmentSubmitFailures.submissionExceptions; if (submissionExceptions.size() > 0) { Set<DrillbitEndpoint> endpoints = Sets.newHashSet(); StringBuilder sb = new StringBuilder(); boolean first = true; for (FragmentSubmitFailures.SubmissionException e : fragmentSubmitFailures.submissionExceptions) { DrillbitEndpoint endpoint = e.drillbitEndpoint; if (endpoints.add(endpoint)) { if (first) { first = false; } else { sb.append(STR); } sb.append(endpoint.getAddress()); } } throw UserException.connectionError(submissionExceptions.get(0).rpcException) .message(STR) .addContext(STR, sb.toString()) .build(logger); } injector.injectChecked(queryContext.getExecutionControls(), STR, ForemanException.class); for (final DrillbitEndpoint ep : leafFragmentMap.keySet()) { sendRemoteFragments(ep, leafFragmentMap.get(ep), null, null); } } | /**
* Set up the non-root fragments for execution. Some may be local, and some may be remote.
* Messages are sent immediately, so they may start returning data even before we complete this.
*
* @param fragments the fragments
* @throws ForemanException
*/ | Set up the non-root fragments for execution. Some may be local, and some may be remote. Messages are sent immediately, so they may start returning data even before we complete this | setupNonRootFragments | {
"repo_name": "ssriniva123/drill",
"path": "exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java",
"license": "apache-2.0",
"size": 49193
} | [
"com.google.common.collect.ArrayListMultimap",
"com.google.common.collect.Multimap",
"com.google.common.collect.Sets",
"java.util.Collection",
"java.util.List",
"java.util.Set",
"org.apache.drill.common.concurrent.ExtendedLatch",
"org.apache.drill.common.exceptions.UserException",
"org.apache.drill.exec.proto.BitControl",
"org.apache.drill.exec.proto.CoordinationProtos"
] | import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import com.google.common.collect.Sets; import java.util.Collection; import java.util.List; import java.util.Set; import org.apache.drill.common.concurrent.ExtendedLatch; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.exec.proto.BitControl; import org.apache.drill.exec.proto.CoordinationProtos; | import com.google.common.collect.*; import java.util.*; import org.apache.drill.common.concurrent.*; import org.apache.drill.common.exceptions.*; import org.apache.drill.exec.proto.*; | [
"com.google.common",
"java.util",
"org.apache.drill"
] | com.google.common; java.util; org.apache.drill; | 277,996 |
@Test public void connectViaHttpsToUntrustedServer() throws IOException, InterruptedException {
server.useHttps(sslContext.getSocketFactory(), false);
server.enqueue(new MockResponse()); // unused
server.play();
HttpURLConnection connection = client.open(server.getUrl("/foo"));
try {
connection.getInputStream();
fail();
} catch (SSLHandshakeException expected) {
assertTrue(expected.getCause() instanceof CertificateException);
}
assertEquals(0, server.getRequestCount());
} | @Test void function() throws IOException, InterruptedException { server.useHttps(sslContext.getSocketFactory(), false); server.enqueue(new MockResponse()); server.play(); HttpURLConnection connection = client.open(server.getUrl("/foo")); try { connection.getInputStream(); fail(); } catch (SSLHandshakeException expected) { assertTrue(expected.getCause() instanceof CertificateException); } assertEquals(0, server.getRequestCount()); } | /**
* Verify that we don't retry connections on certificate verification errors.
*
* http://code.google.com/p/android/issues/detail?id=13178
*/ | Verify that we don't retry connections on certificate verification errors. HREF | connectViaHttpsToUntrustedServer | {
"repo_name": "c-ong/mirrored-okhttp",
"path": "src/test/java/com/squareup/okhttp/internal/http/URLConnectionTest.java",
"license": "apache-2.0",
"size": 105008
} | [
"com.google.mockwebserver.MockResponse",
"java.io.IOException",
"java.net.HttpURLConnection",
"java.security.cert.CertificateException",
"javax.net.ssl.SSLHandshakeException",
"org.junit.Assert",
"org.junit.Test"
] | import com.google.mockwebserver.MockResponse; import java.io.IOException; import java.net.HttpURLConnection; import java.security.cert.CertificateException; import javax.net.ssl.SSLHandshakeException; import org.junit.Assert; import org.junit.Test; | import com.google.mockwebserver.*; import java.io.*; import java.net.*; import java.security.cert.*; import javax.net.ssl.*; import org.junit.*; | [
"com.google.mockwebserver",
"java.io",
"java.net",
"java.security",
"javax.net",
"org.junit"
] | com.google.mockwebserver; java.io; java.net; java.security; javax.net; org.junit; | 637,147 |
public synchronized void removeActionListener(ActionListener listener) {
if (registeredActionListeners.contains(listener)){
registeredActionListeners.remove(listener);
}
} | synchronized void function(ActionListener listener) { if (registeredActionListeners.contains(listener)){ registeredActionListeners.remove(listener); } } | /**
* Removes the action listener.
*
* @param listener the listener
*/ | Removes the action listener | removeActionListener | {
"repo_name": "acm-uiuc/Tacchi",
"path": "src/org/mt4j/components/visibleComponents/widgets/menus/MTGLButton.java",
"license": "gpl-2.0",
"size": 2475
} | [
"java.awt.event.ActionListener"
] | import java.awt.event.ActionListener; | import java.awt.event.*; | [
"java.awt"
] | java.awt; | 2,674,502 |
public void addRecipient(String email, String name) {
addRecipient(new Contact(email, name));
} | void function(String email, String name) { addRecipient(new Contact(email, name)); } | /**
* Adds a {@link Contact} with the specified {@code email} address and {@code name} to the list of recipients for the message.
*
* @param email
* the email address of the {@code Contact} to receive the message
* @param name
* the name of the {@code Contact} to receive the message
* @throws IllegalArgumentException
* If {@code email} is {@code null} or empty.
*/ | Adds a <code>Contact</code> with the specified email address and name to the list of recipients for the message | addRecipient | {
"repo_name": "neocotic/mail-manager",
"path": "mail-manager/src/com/appspot/mailmanager/send/SendRequest.java",
"license": "mit",
"size": 11636
} | [
"com.appspot.mailmanager.Contact"
] | import com.appspot.mailmanager.Contact; | import com.appspot.mailmanager.*; | [
"com.appspot.mailmanager"
] | com.appspot.mailmanager; | 225,029 |
private void setFolderTitle(CmsResource res) {
try {
CmsProperty titleProperty = m_cms.readPropertyObject(res, CmsPropertyDefinition.PROPERTY_TITLE, false);
if (!titleProperty.isNullProperty()) {
titleProperty.setValue(m_site.getTitle(), CmsProperty.TYPE_INDIVIDUAL);
m_cms.writePropertyObject(res.getRootPath(), titleProperty);
} else {
LOG.error("Editing title property of site root resource was not possible");
getReport().println(
Messages.get().container(Messages.RPT_SITE_ERROR_TITLE_0),
I_CmsReport.FORMAT_ERROR);
}
} catch (CmsException e) {
LOG.error("Editing title property of site root resource was not possible", e);
getReport().println(Messages.get().container(Messages.RPT_SITE_ERROR_TITLE_0), I_CmsReport.FORMAT_ERROR);
getReport().println(e);
}
} | void function(CmsResource res) { try { CmsProperty titleProperty = m_cms.readPropertyObject(res, CmsPropertyDefinition.PROPERTY_TITLE, false); if (!titleProperty.isNullProperty()) { titleProperty.setValue(m_site.getTitle(), CmsProperty.TYPE_INDIVIDUAL); m_cms.writePropertyObject(res.getRootPath(), titleProperty); } else { LOG.error(STR); getReport().println( Messages.get().container(Messages.RPT_SITE_ERROR_TITLE_0), I_CmsReport.FORMAT_ERROR); } } catch (CmsException e) { LOG.error(STR, e); getReport().println(Messages.get().container(Messages.RPT_SITE_ERROR_TITLE_0), I_CmsReport.FORMAT_ERROR); getReport().println(e); } } | /**
* Updates title property of site root resource in case of copy from template.<p>
*
* @param res root resource to set titel for
*/ | Updates title property of site root resource in case of copy from template | setFolderTitle | {
"repo_name": "alkacon/opencms-core",
"path": "src/org/opencms/ui/apps/sitemanager/CmsCreateSiteThread.java",
"license": "lgpl-2.1",
"size": 24285
} | [
"org.opencms.file.CmsProperty",
"org.opencms.file.CmsPropertyDefinition",
"org.opencms.file.CmsResource",
"org.opencms.main.CmsException",
"org.opencms.ui.apps.Messages"
] | import org.opencms.file.CmsProperty; import org.opencms.file.CmsPropertyDefinition; import org.opencms.file.CmsResource; import org.opencms.main.CmsException; import org.opencms.ui.apps.Messages; | import org.opencms.file.*; import org.opencms.main.*; import org.opencms.ui.apps.*; | [
"org.opencms.file",
"org.opencms.main",
"org.opencms.ui"
] | org.opencms.file; org.opencms.main; org.opencms.ui; | 463,790 |
void runSequenceAfter(HttpMessage msg, AbstractPlugin plugin); | void runSequenceAfter(HttpMessage msg, AbstractPlugin plugin); | /**
* A method that will be run after a message in a sequence has been scanned.
*
* @param msg The message that was scanned.
* @param plugin The current plugin.
*/ | A method that will be run after a message in a sequence has been scanned | runSequenceAfter | {
"repo_name": "psiinon/zaproxy",
"path": "zap/src/main/java/org/zaproxy/zap/extension/script/SequenceScript.java",
"license": "apache-2.0",
"size": 2000
} | [
"org.parosproxy.paros.core.scanner.AbstractPlugin",
"org.parosproxy.paros.network.HttpMessage"
] | import org.parosproxy.paros.core.scanner.AbstractPlugin; import org.parosproxy.paros.network.HttpMessage; | import org.parosproxy.paros.core.scanner.*; import org.parosproxy.paros.network.*; | [
"org.parosproxy.paros"
] | org.parosproxy.paros; | 904,856 |
public void createPartControl(Composite parent) {
createGraphicalViewer(parent);
}
| void function(Composite parent) { createGraphicalViewer(parent); } | /**
* Realizes the Editor by creating it's Control.
* <P>
* WARNING: This method may or may not be called by the workbench prior to
* {@link #dispose()}.
*
* @param parent
* the parent composite
*/ | Realizes the Editor by creating it's Control. <code>#dispose()</code> | createPartControl | {
"repo_name": "opensagres/xdocreport.eclipse",
"path": "rap/org.eclipse.gef/src/org/eclipse/gef/ui/parts/GraphicalEditor.java",
"license": "lgpl-2.1",
"size": 14380
} | [
"org.eclipse.swt.widgets.Composite"
] | import org.eclipse.swt.widgets.Composite; | import org.eclipse.swt.widgets.*; | [
"org.eclipse.swt"
] | org.eclipse.swt; | 367,168 |
private void updatePreemptionVariables() {
long now = clock.getTime();
lastPreemptionUpdateTime = now;
for (TaskType type: MAP_AND_REDUCE) {
for (PoolSchedulable sched: getPoolSchedulables(type)) {
if (!isStarvedForMinShare(sched)) {
sched.setLastTimeAtMinShare(now);
}
if (!isStarvedForFairShare(sched)) {
sched.setLastTimeAtHalfFairShare(now);
}
eventLog.log("PREEMPT_VARS", sched.getName(), type,
now - sched.getLastTimeAtMinShare(),
now - sched.getLastTimeAtHalfFairShare());
}
}
} | void function() { long now = clock.getTime(); lastPreemptionUpdateTime = now; for (TaskType type: MAP_AND_REDUCE) { for (PoolSchedulable sched: getPoolSchedulables(type)) { if (!isStarvedForMinShare(sched)) { sched.setLastTimeAtMinShare(now); } if (!isStarvedForFairShare(sched)) { sched.setLastTimeAtHalfFairShare(now); } eventLog.log(STR, sched.getName(), type, now - sched.getLastTimeAtMinShare(), now - sched.getLastTimeAtHalfFairShare()); } } } | /**
* Update the preemption fields for all PoolScheduables, i.e. the times since
* each pool last was at its guaranteed share and at > 1/2 of its fair share
* for each type of task.
*
* Requires locks on both the JobTracker and the FairScheduler to be held.
*/ | Update the preemption fields for all PoolScheduables, i.e. the times since each pool last was at its guaranteed share and at > 1/2 of its fair share for each type of task. Requires locks on both the JobTracker and the FairScheduler to be held | updatePreemptionVariables | {
"repo_name": "baggioss/hadoop-cdh3u5",
"path": "src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java",
"license": "apache-2.0",
"size": 41826
} | [
"org.apache.hadoop.mapreduce.TaskType"
] | import org.apache.hadoop.mapreduce.TaskType; | import org.apache.hadoop.mapreduce.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 106,407 |
@Test
public void testNextLong() {
final long result = RandomUtils.nextLong(33L, 42L);
assertTrue(result >= 33L && result < 42L);
}
| void function() { final long result = RandomUtils.nextLong(33L, 42L); assertTrue(result >= 33L && result < 42L); } | /**
* Tests next long range.
*/ | Tests next long range | testNextLong | {
"repo_name": "vanta/commons-lang",
"path": "src/test/java/org/apache/commons/lang3/RandomUtilsTest.java",
"license": "apache-2.0",
"size": 5607
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 1,548,607 |
public static OauthConfigEntity toOauthConfigEntity(final OauthTwoConfigDto oauthTwoConfigDto,
final OauthConfigEntity oauthConfigEntity) {
oauthConfigEntity.setUsername(oauthTwoConfigDto.getUsername());
try {
oauthConfigEntity.setPassword(GeneratePassword.encrypt(oauthTwoConfigDto.getPassword()));
} catch (Exception e) {
LOGGER.error("Password encryption failed for user: " + oauthTwoConfigDto.getUsername(), e);
}
oauthConfigEntity.setGenerateToken(UrlConverter.toUrlEntity(Url.OAUTH_GENERATE_TOKEN.getName(),
oauthTwoConfigDto.getOauthGenerateTokenUrl(), oauthConfigEntity.getGenerateToken()));
oauthConfigEntity.setRefreshToken(UrlConverter.toUrlEntity(Url.OAUTH_REFRESH_TOKEN.getName(),
oauthTwoConfigDto.getOauthRefreshTokenUrl(), oauthConfigEntity.getRefreshToken()));
oauthConfigEntity.setAuthType(AuthType.OAUTH_TWO.getAuthTypeEntity());
return oauthConfigEntity;
} | static OauthConfigEntity function(final OauthTwoConfigDto oauthTwoConfigDto, final OauthConfigEntity oauthConfigEntity) { oauthConfigEntity.setUsername(oauthTwoConfigDto.getUsername()); try { oauthConfigEntity.setPassword(GeneratePassword.encrypt(oauthTwoConfigDto.getPassword())); } catch (Exception e) { LOGGER.error(STR + oauthTwoConfigDto.getUsername(), e); } oauthConfigEntity.setGenerateToken(UrlConverter.toUrlEntity(Url.OAUTH_GENERATE_TOKEN.getName(), oauthTwoConfigDto.getOauthGenerateTokenUrl(), oauthConfigEntity.getGenerateToken())); oauthConfigEntity.setRefreshToken(UrlConverter.toUrlEntity(Url.OAUTH_REFRESH_TOKEN.getName(), oauthTwoConfigDto.getOauthRefreshTokenUrl(), oauthConfigEntity.getRefreshToken())); oauthConfigEntity.setAuthType(AuthType.OAUTH_TWO.getAuthTypeEntity()); return oauthConfigEntity; } | /**
* To oauth config entity.
*
* @param oauthTwoConfigDto the oauth two config dto
* @param oauthConfigEntity the oauth config entity
* @return the oauth config entity
*/ | To oauth config entity | toOauthConfigEntity | {
"repo_name": "jonvestal/open-kilda",
"path": "src-gui/src/main/java/org/openkilda/store/service/converter/OauthConfigConverter.java",
"license": "apache-2.0",
"size": 3324
} | [
"org.openkilda.store.auth.constants.AuthType",
"org.openkilda.store.auth.dao.entity.OauthConfigEntity",
"org.openkilda.store.common.constants.Url",
"org.openkilda.store.model.OauthTwoConfigDto",
"org.openkilda.utility.GeneratePassword"
] | import org.openkilda.store.auth.constants.AuthType; import org.openkilda.store.auth.dao.entity.OauthConfigEntity; import org.openkilda.store.common.constants.Url; import org.openkilda.store.model.OauthTwoConfigDto; import org.openkilda.utility.GeneratePassword; | import org.openkilda.store.auth.constants.*; import org.openkilda.store.auth.dao.entity.*; import org.openkilda.store.common.constants.*; import org.openkilda.store.model.*; import org.openkilda.utility.*; | [
"org.openkilda.store",
"org.openkilda.utility"
] | org.openkilda.store; org.openkilda.utility; | 298,431 |
public static void setStart(IteratorSetting is, long start, boolean startInclusive) {
is.addOption(START, LONG_PREFIX + Long.toString(start));
is.addOption(START_INCL, Boolean.toString(startInclusive));
} | static void function(IteratorSetting is, long start, boolean startInclusive) { is.addOption(START, LONG_PREFIX + Long.toString(start)); is.addOption(START_INCL, Boolean.toString(startInclusive)); } | /**
* A convenience method for setting the start timestamp accepted by the timestamp filter.
*
* @param is
* the iterator setting object to configure
* @param start
* the start timestamp
* @param startInclusive
* boolean indicating whether the start is inclusive
*/ | A convenience method for setting the start timestamp accepted by the timestamp filter | setStart | {
"repo_name": "wjsl/jaredcumulo",
"path": "core/src/main/java/org/apache/accumulo/core/iterators/user/TimestampFilter.java",
"license": "apache-2.0",
"size": 10758
} | [
"org.apache.accumulo.core.client.IteratorSetting"
] | import org.apache.accumulo.core.client.IteratorSetting; | import org.apache.accumulo.core.client.*; | [
"org.apache.accumulo"
] | org.apache.accumulo; | 2,519,699 |
EReference getvariantSection_TypeRef(); | EReference getvariantSection_TypeRef(); | /**
* Returns the meta object for the containment reference '{@link org.xtext.example.delphi.delphi.variantSection#getTypeRef <em>Type Ref</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference '<em>Type Ref</em>'.
* @see org.xtext.example.delphi.delphi.variantSection#getTypeRef()
* @see #getvariantSection()
* @generated
*/ | Returns the meta object for the containment reference '<code>org.xtext.example.delphi.delphi.variantSection#getTypeRef Type Ref</code>'. | getvariantSection_TypeRef | {
"repo_name": "adolfosbh/cs2as",
"path": "org.xtext.example.delphi/src-gen/org/xtext/example/delphi/delphi/DelphiPackage.java",
"license": "epl-1.0",
"size": 434880
} | [
"org.eclipse.emf.ecore.EReference"
] | import org.eclipse.emf.ecore.EReference; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 416,186 |
public static void addToResult(IGObject node, Map<String, Collection<String>> result) {
String key = node.getClass().toString();
String value = node.getName();
Collection<String> labels = new ArrayList<String>();
if (result.containsKey(key)) {
labels = result.get(key);
}
labels.add(value);
result.put(key, labels);
} | static void function(IGObject node, Map<String, Collection<String>> result) { String key = node.getClass().toString(); String value = node.getName(); Collection<String> labels = new ArrayList<String>(); if (result.containsKey(key)) { labels = result.get(key); } labels.add(value); result.put(key, labels); } | /**
* Adds a graph object (which is in this case the label) to the result list, which is
* ordered by entity type (thus, all Event labels, all Activity labels are collected).
* @param node to add to result
* @param result the label map
*/ | Adds a graph object (which is in this case the label) to the result list, which is ordered by entity type (thus, all Event labels, all Activity labels are collected) | addToResult | {
"repo_name": "tobiashoppe/promnicat",
"path": "src/de/uni_potsdam/hpi/bpt/promnicat/utilityUnits/extractor/ProcessModelLabelExtractorUnit.java",
"license": "gpl-3.0",
"size": 4472
} | [
"java.util.ArrayList",
"java.util.Collection",
"java.util.Map",
"org.jbpt.hypergraph.abs.IGObject"
] | import java.util.ArrayList; import java.util.Collection; import java.util.Map; import org.jbpt.hypergraph.abs.IGObject; | import java.util.*; import org.jbpt.hypergraph.abs.*; | [
"java.util",
"org.jbpt.hypergraph"
] | java.util; org.jbpt.hypergraph; | 2,599,592 |
Scanner scn = new Scanner(System.in);
System.out.print("Enter word: ");
String word = scn.nextLine().trim();
while (!word.equals("")) {
System.out.printf("\t%s %s\n", EnglishUtils.pickIndefinite(word), word);
System.out.print("Enter word: ");
word = scn.nextLine().trim();
}
scn.close();
} | Scanner scn = new Scanner(System.in); System.out.print(STR); String word = scn.nextLine().trim(); while (!word.equals(STR\t%s %s\n", EnglishUtils.pickIndefinite(word), word); System.out.print(STR); word = scn.nextLine().trim(); } scn.close(); } | /**
* Main method.
*
* @param args
* Unused CLI args.
*/ | Main method | main | {
"repo_name": "bculkin2442/inflexion",
"path": "src/examples/java/bjc/inflexion/examples/IndefTester.java",
"license": "apache-2.0",
"size": 646
} | [
"java.util.Scanner"
] | import java.util.Scanner; | import java.util.*; | [
"java.util"
] | java.util; | 74,610 |
public de.darwinspl.preferences.resource.dwprofile.mopp.DwprofileInputStreamProcessor getInputStreamProcessor(InputStream inputStream); | de.darwinspl.preferences.resource.dwprofile.mopp.DwprofileInputStreamProcessor function(InputStream inputStream); | /**
* <p>
* Returns a processor for the given input stream.
* </p>
*
* @param inputStream the actual stream that provides the content of a resource
*
* @return a processor that pre-processes the input stream
*/ | Returns a processor for the given input stream. | getInputStreamProcessor | {
"repo_name": "HyVar/DarwinSPL",
"path": "plugins/de.darwinspl.preferences.resource.dwprofile/src-gen/de/darwinspl/preferences/resource/dwprofile/IDwprofileInputStreamProcessorProvider.java",
"license": "apache-2.0",
"size": 815
} | [
"java.io.InputStream"
] | import java.io.InputStream; | import java.io.*; | [
"java.io"
] | java.io; | 68,098 |
private void createLocation(DescribedObjectType dot, SmlLocation location) throws OwsExceptionReport {
dot.addNewLocation().addNewAbstractGeometry()
.set(CodingHelper.encodeObjectToXml(GmlConstants.NS_GML, location.getPoint()));
} | void function(DescribedObjectType dot, SmlLocation location) throws OwsExceptionReport { dot.addNewLocation().addNewAbstractGeometry() .set(CodingHelper.encodeObjectToXml(GmlConstants.NS_GML, location.getPoint())); } | /**
* Creates the location section of the SensorML description.
*
* @param location
* SOS location representation.
* @return XML SmlLocation2 element
* @throws OwsExceptionReport
* if an error occurs
*/ | Creates the location section of the SensorML description | createLocation | {
"repo_name": "shane-axiom/SOS",
"path": "coding/sensorML-v20/src/main/java/org/n52/sos/encode/SensorMLEncoderv20.java",
"license": "gpl-2.0",
"size": 68226
} | [
"net.opengis.sensorml.x20.DescribedObjectType",
"org.n52.sos.ogc.gml.GmlConstants",
"org.n52.sos.ogc.ows.OwsExceptionReport",
"org.n52.sos.ogc.sensorML.elements.SmlLocation",
"org.n52.sos.util.CodingHelper"
] | import net.opengis.sensorml.x20.DescribedObjectType; import org.n52.sos.ogc.gml.GmlConstants; import org.n52.sos.ogc.ows.OwsExceptionReport; import org.n52.sos.ogc.sensorML.elements.SmlLocation; import org.n52.sos.util.CodingHelper; | import net.opengis.sensorml.x20.*; import org.n52.sos.ogc.*; import org.n52.sos.ogc.gml.*; import org.n52.sos.ogc.ows.*; import org.n52.sos.util.*; | [
"net.opengis.sensorml",
"org.n52.sos"
] | net.opengis.sensorml; org.n52.sos; | 2,137,129 |
public RelDataType createNewRowType(RelDataTypeFactory factory) {
return factory.createStructType(types, fieldNames);
}
} | RelDataType function(RelDataTypeFactory factory) { return factory.createStructType(types, fieldNames); } } | /**
* Creates new row type based on stores types and field names.
*
* @param factory factory for data type descriptors.
* @return new row type
*/ | Creates new row type based on stores types and field names | createNewRowType | {
"repo_name": "superbstreak/drill",
"path": "exec/java-exec/src/main/java/org/apache/drill/exec/planner/common/DrillRelOptUtil.java",
"license": "apache-2.0",
"size": 20162
} | [
"org.apache.calcite.rel.type.RelDataType",
"org.apache.calcite.rel.type.RelDataTypeFactory"
] | import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; | import org.apache.calcite.rel.type.*; | [
"org.apache.calcite"
] | org.apache.calcite; | 228,342 |
public void addSlide(@NonNull Fragment fragment) {
if (isRtl())
fragments.add(0, fragment);
else
fragments.add(fragment);
if (isWizardMode) {
setOffScreenPageLimit(fragments.size());
}
mPagerAdapter.notifyDataSetChanged();
} | void function(@NonNull Fragment fragment) { if (isRtl()) fragments.add(0, fragment); else fragments.add(fragment); if (isWizardMode) { setOffScreenPageLimit(fragments.size()); } mPagerAdapter.notifyDataSetChanged(); } | /**
* Adds a new slide
*
* @param fragment Instance of Fragment which should be added as slide
*/ | Adds a new slide | addSlide | {
"repo_name": "PaoloRotolo/AppIntro",
"path": "appintro/src/main/java/com/github/paolorotolo/appintro/AppIntroBase.java",
"license": "apache-2.0",
"size": 41293
} | [
"androidx.annotation.NonNull",
"androidx.fragment.app.Fragment"
] | import androidx.annotation.NonNull; import androidx.fragment.app.Fragment; | import androidx.annotation.*; import androidx.fragment.app.*; | [
"androidx.annotation",
"androidx.fragment"
] | androidx.annotation; androidx.fragment; | 1,860,505 |
public Builder initializeAsFromCloseToOpen(IndexMetaData indexMetaData) {
return initializeEmpty(indexMetaData, new UnassignedInfo(UnassignedInfo.Reason.INDEX_REOPENED, null));
} | Builder function(IndexMetaData indexMetaData) { return initializeEmpty(indexMetaData, new UnassignedInfo(UnassignedInfo.Reason.INDEX_REOPENED, null)); } | /**
* Initializes a new empty index, as as a result of opening a closed index.
*/ | Initializes a new empty index, as as a result of opening a closed index | initializeAsFromCloseToOpen | {
"repo_name": "henakamaMSFT/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java",
"license": "apache-2.0",
"size": 25434
} | [
"org.elasticsearch.cluster.metadata.IndexMetaData"
] | import org.elasticsearch.cluster.metadata.IndexMetaData; | import org.elasticsearch.cluster.metadata.*; | [
"org.elasticsearch.cluster"
] | org.elasticsearch.cluster; | 799,333 |
List<String> getSkus(); | List<String> getSkus(); | /**
* List of SKUs
*
* @return list of SKUs
*/ | List of SKUs | getSkus | {
"repo_name": "midhunhk/ae-apps-library",
"path": "modules/billing-client/src/main/java/com/ae/apps/lib/billingclient/AeBillingClient.java",
"license": "apache-2.0",
"size": 6946
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,879,661 |
protected static boolean containsNoneSpecialChars(String string) {
return StringUtils.containsNone(string, specialChars);
} | static boolean function(String string) { return StringUtils.containsNone(string, specialChars); } | /**
* Checks that a string does not contain special characters (only
* alphanumeric ones).
*
* @param string
* the string to check
* @return <code>true</code> if the strings contains a special character
*/ | Checks that a string does not contain special characters (only alphanumeric ones) | containsNoneSpecialChars | {
"repo_name": "cfscosta/fenix",
"path": "src/main/java/org/fenixedu/academic/util/StringFormatter.java",
"license": "lgpl-3.0",
"size": 11302
} | [
"org.apache.commons.lang.StringUtils"
] | import org.apache.commons.lang.StringUtils; | import org.apache.commons.lang.*; | [
"org.apache.commons"
] | org.apache.commons; | 117,993 |
public boolean nodeExists(Path path) {
try {
sanityCheck();
NodeId id = hierMgr.resolveNodePath(path);
return (id != null) && itemExists(id, path);
} catch (RepositoryException re) {
return false;
}
} | boolean function(Path path) { try { sanityCheck(); NodeId id = hierMgr.resolveNodePath(path); return (id != null) && itemExists(id, path); } catch (RepositoryException re) { return false; } } | /**
* Checks whether a node exists at the specified path.
*
* @param path path to the node to be checked
* @return true if a node exists at the specified path
*/ | Checks whether a node exists at the specified path | nodeExists | {
"repo_name": "afilimonov/jackrabbit",
"path": "jackrabbit-core/src/main/java/org/apache/jackrabbit/core/ItemManager.java",
"license": "apache-2.0",
"size": 48909
} | [
"javax.jcr.RepositoryException",
"org.apache.jackrabbit.core.id.NodeId",
"org.apache.jackrabbit.spi.Path"
] | import javax.jcr.RepositoryException; import org.apache.jackrabbit.core.id.NodeId; import org.apache.jackrabbit.spi.Path; | import javax.jcr.*; import org.apache.jackrabbit.core.id.*; import org.apache.jackrabbit.spi.*; | [
"javax.jcr",
"org.apache.jackrabbit"
] | javax.jcr; org.apache.jackrabbit; | 2,346,478 |
public void testCheckSharingConstant() {
// positive
assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_OWNER) );
assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_PRIVATE) );
assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_PUBLIC) );
assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_SHARED) );
assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_VISIBLE) );
// negative
// exception
try {
EvalUtils.validateSharingConstant("INVALID");
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
try {
EvalUtils.validateSharingConstant("");
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
try {
EvalUtils.validateSharingConstant(null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
} | void function() { assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_OWNER) ); assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_PRIVATE) ); assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_PUBLIC) ); assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_SHARED) ); assertTrue( EvalUtils.validateSharingConstant(EvalConstants.SHARING_VISIBLE) ); try { EvalUtils.validateSharingConstant(STR); fail(STR); } catch (IllegalArgumentException e) { assertNotNull(e); } try { EvalUtils.validateSharingConstant(""); fail(STR); } catch (IllegalArgumentException e) { assertNotNull(e); } try { EvalUtils.validateSharingConstant(null); fail(STR); } catch (IllegalArgumentException e) { assertNotNull(e); } } | /**
* Test method for {@link org.sakaiproject.evaluation.utils.EvalUtils#validateSharingConstant(java.lang.String)}.
*/ | Test method for <code>org.sakaiproject.evaluation.utils.EvalUtils#validateSharingConstant(java.lang.String)</code> | testCheckSharingConstant | {
"repo_name": "buckett/evaluation",
"path": "impl/src/test/org/sakaiproject/evaluation/utils/EvalUtilsTest.java",
"license": "apache-2.0",
"size": 29802
} | [
"org.sakaiproject.evaluation.constant.EvalConstants",
"org.sakaiproject.evaluation.utils.EvalUtils"
] | import org.sakaiproject.evaluation.constant.EvalConstants; import org.sakaiproject.evaluation.utils.EvalUtils; | import org.sakaiproject.evaluation.constant.*; import org.sakaiproject.evaluation.utils.*; | [
"org.sakaiproject.evaluation"
] | org.sakaiproject.evaluation; | 1,142,543 |
private JPanel createSelectionPane()
{
JPanel buttonPanel = new JPanel();
buttonPanel.setLayout(new BoxLayout(buttonPanel, BoxLayout.Y_AXIS));
buttonPanel.add(Box.createVerticalStrut(30));
buttonPanel.add(addButton);
buttonPanel.add(Box.createVerticalStrut(10));
buttonPanel.add(removeButton);
buttonPanel.add(Box.createVerticalStrut(10));
buttonPanel.add(addAllButton);
buttonPanel.add(Box.createVerticalStrut(10));
buttonPanel.add(removeAllButton);
buttonPanel.add(Box.createVerticalStrut(10));
return buttonPanel;
}
| JPanel function() { JPanel buttonPanel = new JPanel(); buttonPanel.setLayout(new BoxLayout(buttonPanel, BoxLayout.Y_AXIS)); buttonPanel.add(Box.createVerticalStrut(30)); buttonPanel.add(addButton); buttonPanel.add(Box.createVerticalStrut(10)); buttonPanel.add(removeButton); buttonPanel.add(Box.createVerticalStrut(10)); buttonPanel.add(addAllButton); buttonPanel.add(Box.createVerticalStrut(10)); buttonPanel.add(removeAllButton); buttonPanel.add(Box.createVerticalStrut(10)); return buttonPanel; } | /**
* Builds and lays out the buttons used to select tags.
*
* @return See above.
*/ | Builds and lays out the buttons used to select tags | createSelectionPane | {
"repo_name": "joshmoore/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/agents/util/SelectionWizardUI.java",
"license": "gpl-2.0",
"size": 18116
} | [
"javax.swing.Box",
"javax.swing.BoxLayout",
"javax.swing.JPanel"
] | import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.JPanel; | import javax.swing.*; | [
"javax.swing"
] | javax.swing; | 710,816 |
@Test
public void testBaselineCollectCrd() throws Exception {
Ignite ignite = startGrids(2);
assertFalse(ignite.cluster().active());
ignite.cluster().active(true);
injectTestSystemOut();
assertEquals(EXIT_CODE_OK, execute("--baseline", "--port", "11212"));
String crdStr = findCrdInfo();
assertEquals("(Coordinator: ConsistentId=" +
grid(0).cluster().localNode().consistentId() + ", Order=1)", crdStr);
stopGrid(0);
assertEquals(EXIT_CODE_OK, execute("--baseline", "--port", "11212"));
crdStr = findCrdInfo();
assertEquals("(Coordinator: ConsistentId=" +
grid(1).cluster().localNode().consistentId() + ", Order=2)", crdStr);
startGrid(0);
assertEquals(EXIT_CODE_OK, execute("--baseline", "--port", "11212"));
crdStr = findCrdInfo();
assertEquals("(Coordinator: ConsistentId=" +
grid(1).cluster().localNode().consistentId() + ", Order=2)", crdStr);
stopGrid(1);
assertEquals(EXIT_CODE_OK, execute("--baseline", "--port", "11211"));
crdStr = findCrdInfo();
assertEquals("(Coordinator: ConsistentId=" +
grid(0).cluster().localNode().consistentId() + ", Order=4)", crdStr);
} | void function() throws Exception { Ignite ignite = startGrids(2); assertFalse(ignite.cluster().active()); ignite.cluster().active(true); injectTestSystemOut(); assertEquals(EXIT_CODE_OK, execute(STR, STR, "11212")); String crdStr = findCrdInfo(); assertEquals(STR + grid(0).cluster().localNode().consistentId() + STR, crdStr); stopGrid(0); assertEquals(EXIT_CODE_OK, execute(STR, STR, "11212")); crdStr = findCrdInfo(); assertEquals(STR + grid(1).cluster().localNode().consistentId() + STR, crdStr); startGrid(0); assertEquals(EXIT_CODE_OK, execute(STR, STR, "11212")); crdStr = findCrdInfo(); assertEquals(STR + grid(1).cluster().localNode().consistentId() + STR, crdStr); stopGrid(1); assertEquals(EXIT_CODE_OK, execute(STR, STR, "11211")); crdStr = findCrdInfo(); assertEquals(STR + grid(0).cluster().localNode().consistentId() + STR, crdStr); } | /**
* Test baseline collect works via control.sh
*
* @throws Exception If failed.
*/ | Test baseline collect works via control.sh | testBaselineCollectCrd | {
"repo_name": "andrey-kuznetsov/ignite",
"path": "modules/core/src/test/java/org/apache/ignite/util/GridCommandHandlerTest.java",
"license": "apache-2.0",
"size": 67072
} | [
"org.apache.ignite.Ignite"
] | import org.apache.ignite.Ignite; | import org.apache.ignite.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 2,718,175 |
private boolean hasEndPointWithPort(final PoolImpl pool, final int port) {
EndpointManager endpointManager = pool.getEndpointManager();
final Set<ServerLocationAndMemberId> slAndMemberIds = endpointManager.getEndpointMap().keySet();
return slAndMemberIds.stream()
.anyMatch(slAndMemberId -> slAndMemberId.getServerLocation().getPort() == port);
} | boolean function(final PoolImpl pool, final int port) { EndpointManager endpointManager = pool.getEndpointManager(); final Set<ServerLocationAndMemberId> slAndMemberIds = endpointManager.getEndpointMap().keySet(); return slAndMemberIds.stream() .anyMatch(slAndMemberId -> slAndMemberId.getServerLocation().getPort() == port); } | /**
* Check to see if a client is connected to an endpoint with a specific port
*/ | Check to see if a client is connected to an endpoint with a specific port | hasEndPointWithPort | {
"repo_name": "smgoller/geode",
"path": "geode-core/src/distributedTest/java/org/apache/geode/internal/cache/tier/sockets/UpdatePropagationDUnitTest.java",
"license": "apache-2.0",
"size": 12609
} | [
"java.util.Set",
"org.apache.geode.cache.client.internal.EndpointManager",
"org.apache.geode.cache.client.internal.PoolImpl",
"org.apache.geode.distributed.internal.ServerLocationAndMemberId"
] | import java.util.Set; import org.apache.geode.cache.client.internal.EndpointManager; import org.apache.geode.cache.client.internal.PoolImpl; import org.apache.geode.distributed.internal.ServerLocationAndMemberId; | import java.util.*; import org.apache.geode.cache.client.internal.*; import org.apache.geode.distributed.internal.*; | [
"java.util",
"org.apache.geode"
] | java.util; org.apache.geode; | 1,190,080 |
public static String toPDB(Chain chain){
StringBuffer w = new StringBuffer();
int nrGroups = chain.getAtomLength();
for ( int h=0; h<nrGroups;h++){
Group g= chain.getAtomGroup(h);
toPDB(g,w);
}
return w.toString();
} | static String function(Chain chain){ StringBuffer w = new StringBuffer(); int nrGroups = chain.getAtomLength(); for ( int h=0; h<nrGroups;h++){ Group g= chain.getAtomGroup(h); toPDB(g,w); } return w.toString(); } | /**
* Convert a Chain object to PDB representation
*
* @param chain
* @return
*/ | Convert a Chain object to PDB representation | toPDB | {
"repo_name": "pwrose/biojava",
"path": "biojava-structure/src/main/java/org/biojava/nbio/structure/io/FileConvert.java",
"license": "lgpl-2.1",
"size": 17941
} | [
"org.biojava.nbio.structure.Chain",
"org.biojava.nbio.structure.Group"
] | import org.biojava.nbio.structure.Chain; import org.biojava.nbio.structure.Group; | import org.biojava.nbio.structure.*; | [
"org.biojava.nbio"
] | org.biojava.nbio; | 787,808 |
public static QuoteId of(StandardId standardId, FieldName fieldName) {
return new QuoteId(standardId, fieldName, ObservableSource.NONE);
} | static QuoteId function(StandardId standardId, FieldName fieldName) { return new QuoteId(standardId, fieldName, ObservableSource.NONE); } | /**
* Obtains an instance used to obtain an observable value.
* <p>
* The market data source is {@link ObservableSource#NONE}.
*
* @param standardId the standard identifier of the data in the underlying data provider
* @param fieldName the name of the field in the market data record holding the data
* @return the identifier
*/ | Obtains an instance used to obtain an observable value. The market data source is <code>ObservableSource#NONE</code> | of | {
"repo_name": "jmptrader/Strata",
"path": "modules/market/src/main/java/com/opengamma/strata/market/observable/QuoteId.java",
"license": "apache-2.0",
"size": 15494
} | [
"com.opengamma.strata.basics.StandardId",
"com.opengamma.strata.data.FieldName",
"com.opengamma.strata.data.ObservableSource"
] | import com.opengamma.strata.basics.StandardId; import com.opengamma.strata.data.FieldName; import com.opengamma.strata.data.ObservableSource; | import com.opengamma.strata.basics.*; import com.opengamma.strata.data.*; | [
"com.opengamma.strata"
] | com.opengamma.strata; | 2,676,274 |
Accessor result;
if (AbstractStruct.class.isAssignableFrom(method.getReturnType())) {
if (!accessorType.isGetter(method.getName())) {
throw new AssertionError("Unsupported composite accessor: " + accessorType);
}
result = new GetterComposite(method);
} else {
String name = method.getName();
if (accessorType.isGetter(name)) {
result = new GetterDirect(method);
} else if (accessorType.isSetter(name)) {
result = new SetterDirect(method);
} else if (accessorType == AccessorType.GETTER_INDEXED) {
result = new GetterIndexed(method);
} else if (accessorType == AccessorType.SETTER_INDEXED) {
result = new SetterIndexed(method);
} else {
throw new AssertionError(String.format(
"Unsupported accessor type %s for method %s", accessorType, name));
}
}
return result;
} | Accessor result; if (AbstractStruct.class.isAssignableFrom(method.getReturnType())) { if (!accessorType.isGetter(method.getName())) { throw new AssertionError(STR + accessorType); } result = new GetterComposite(method); } else { String name = method.getName(); if (accessorType.isGetter(name)) { result = new GetterDirect(method); } else if (accessorType.isSetter(name)) { result = new SetterDirect(method); } else if (accessorType == AccessorType.GETTER_INDEXED) { result = new GetterIndexed(method); } else if (accessorType == AccessorType.SETTER_INDEXED) { result = new SetterIndexed(method); } else { throw new AssertionError(String.format( STR, accessorType, name)); } } return result; } | /**
* Creates {@link AbstractAccessor} instance based on accessor type.
*
* @param accessorType Accessor type - getter or setter, indexed or non-indexed.
* @param method Accessor method declaration.
* @return Accessor type instance.
*/ | Creates <code>AbstractAccessor</code> instance based on accessor type | of | {
"repo_name": "uliashkevich/native-struct",
"path": "src/main/java/net/nativestruct/implementation/field/AbstractAccessor.java",
"license": "mit",
"size": 10993
} | [
"net.nativestruct.AbstractStruct",
"net.nativestruct.AccessorType"
] | import net.nativestruct.AbstractStruct; import net.nativestruct.AccessorType; | import net.nativestruct.*; | [
"net.nativestruct"
] | net.nativestruct; | 2,807,375 |
return false;
}
public void beforeDetach(@NotNull Module module) {} | return false; } public void beforeDetach(@NotNull Module module) {} | /**
* Called to attach the directory projectDir as a module to the specified project.
*
* @param project the project to attach the directory to.
* @param projectDir the directory to attach.
* @param callback the callback to call on successful attachment
* @return true if the attach succeeded, false if the project should be opened in a new window.
*/ | Called to attach the directory projectDir as a module to the specified project | attachToProject | {
"repo_name": "siosio/intellij-community",
"path": "platform/platform-api/src/com/intellij/projectImport/ProjectAttachProcessor.java",
"license": "apache-2.0",
"size": 1298
} | [
"com.intellij.openapi.module.Module",
"org.jetbrains.annotations.NotNull"
] | import com.intellij.openapi.module.Module; import org.jetbrains.annotations.NotNull; | import com.intellij.openapi.module.*; import org.jetbrains.annotations.*; | [
"com.intellij.openapi",
"org.jetbrains.annotations"
] | com.intellij.openapi; org.jetbrains.annotations; | 18,139 |
@Test
public void testClassImplements() throws Exception {
ResourceSet setA = TestUtil.extractModel(BASE_PATH + "classimplements/" + "a");
ResourceSet setB = TestUtil.extractModel(BASE_PATH + "classimplements/" + "b");
JaMoPPDiffer differ = new JaMoPPDiffer();
Map<String, String> diffOptions = TestUtil.getDiffOptions();
diffOptions.put(JaMoPPDiffer.OPTION_JAMOPP_IGNORE_FILES, "");
Comparison comparison = differ.doDiff(setA, setB, diffOptions);
EList<Diff> differences = comparison.getDifferences();
assertThat("Wrong number of differences", differences.size(), is(4));
for (Diff diffElement : differences) {
if (diffElement instanceof ImplementsChange) {
ImplementsChange implementsChange = ((ImplementsChange) diffElement);
NamedElement target = (NamedElement) implementsChange.getChangedReference().getTarget();
if (implementsChange.getKind() == DifferenceKind.ADD) {
assertThat(target.getName(), equalTo("InterfaceA"));
} else if (implementsChange.getKind() == DifferenceKind.DELETE) {
assertThat(target.getName(), equalTo("InterfaceB"));
} else {
fail("Unexpected Difference Kind: " + implementsChange.getKind());
}
} else {
fail("No other diff elements than ExtendsChange should have been detected: " + diffElement);
}
}
} | void function() throws Exception { ResourceSet setA = TestUtil.extractModel(BASE_PATH + STR + "a"); ResourceSet setB = TestUtil.extractModel(BASE_PATH + STR + "b"); JaMoPPDiffer differ = new JaMoPPDiffer(); Map<String, String> diffOptions = TestUtil.getDiffOptions(); diffOptions.put(JaMoPPDiffer.OPTION_JAMOPP_IGNORE_FILES, STRWrong number of differencesSTRInterfaceASTRInterfaceBSTRUnexpected Difference Kind: STRNo other diff elements than ExtendsChange should have been detected: " + diffElement); } } } | /**
* Test method to detect changes in the class and package declarations.
*
* @throws Exception
* Identifies a failed diffing.
*/ | Test method to detect changes in the class and package declarations | testClassImplements | {
"repo_name": "kopl/SPLevo",
"path": "JaMoPPCartridge/org.splevo.jamopp.diffing.tests/src/org/splevo/jamopp/diffing/ImplementsTest.java",
"license": "epl-1.0",
"size": 3369
} | [
"java.util.Map",
"org.eclipse.emf.ecore.resource.ResourceSet"
] | import java.util.Map; import org.eclipse.emf.ecore.resource.ResourceSet; | import java.util.*; import org.eclipse.emf.ecore.resource.*; | [
"java.util",
"org.eclipse.emf"
] | java.util; org.eclipse.emf; | 1,556,620 |
protected List<AttributeMetaData> getUnionAttributesMetaData(ExampleSetMetaData emd1, ExampleSetMetaData emd2) {
if (!leftInput.isConnected() || !rightInput.isConnected()) {
return new LinkedList<>();
}
if (this.isIdNeeded()) {
AttributeMetaData id1 = emd1.getSpecial(Attributes.ID_NAME);
AttributeMetaData id2 = emd2.getSpecial(Attributes.ID_NAME);
// sanity checks
// if (id1 == null) leftInput.addError(new SimpleMetaDataError(Severity.ERROR,
// leftInput, "missing_id"));
// if (id2 == null) rightInput.addError(new SimpleMetaDataError(Severity.ERROR,
// rightInput, "missing_id"));
if (id1 == null || id2 == null) {
return new LinkedList<>();
}
if (!Ontology.ATTRIBUTE_VALUE_TYPE.isA(id1.getValueType(), id2.getValueType())
&& !Ontology.ATTRIBUTE_VALUE_TYPE.isA(id2.getValueType(), id1.getValueType())) {
// this.addError(new SimpleProcessSetupError(Severity.ERROR, getPortOwner(),
// "attributes_type_mismatch", id1.getName(), "left",
// id2.getName(), "right"));
return new LinkedList<>();
}
}
Set<Pair<Integer, AttributeMetaData>> excludedAttributes = new HashSet<>();
try {
excludedAttributes = getExcludedAttributesMD(emd1, emd2);
} catch (OperatorException e) {
excludedAttributes = Collections.emptySet();
}
// adding attributes
List<AttributeMetaData> unionAttributeList = new LinkedList<>();
List<String> unionSpecialRoleList = new LinkedList<>();
for (AttributeMetaData attributeMD : emd1.getAllAttributes()) {
if (!excludedAttributes.contains(new Pair<>(AttributeSource.FIRST_SOURCE, attributeMD))) {
unionAttributeList.add(attributeMD.clone());
if (attributeMD.isSpecial()) {
unionSpecialRoleList.add(attributeMD.getRole());
}
}
}
for (AttributeMetaData attributeMD : emd2.getAllAttributes()) {
if (!excludedAttributes.contains(new Pair<>(AttributeSource.SECOND_SOURCE, attributeMD))) {
AttributeMetaData cloneAttribute = attributeMD.clone();
if (containsAttributeMD(unionAttributeList, attributeMD)) { // in list...
if (!getParameterAsBoolean(PARAMETER_REMOVE_DOUBLE_ATTRIBUTES)) { // ... but
// should
// not be
// removed
// -->
// rename
if (attributeMD.isSpecial() && unionSpecialRoleList.contains(attributeMD.getRole())) {
// this special attribute's role already exists
rightInput.addError(new SimpleMetaDataError(Severity.WARNING, rightInput,
"already_contains_role", attributeMD.getRole()));
continue;
}
cloneAttribute.setName(cloneAttribute.getName() + "_from_ES2");
if (containsAttributeMD(unionAttributeList, cloneAttribute)) {
cloneAttribute.setName(cloneAttribute.getName() + "_from_ES2");
}
unionAttributeList.add(cloneAttribute);
} // else do nothing, i.e. remove
} else { // not in list --> add
if (attributeMD.isSpecial() && unionSpecialRoleList.contains(attributeMD.getRole())) {
// this special attribute's role already exists
rightInput.addError(new SimpleMetaDataError(Severity.WARNING, rightInput, "already_contains_role",
attributeMD.getRole()));
continue;
}
unionAttributeList.add(cloneAttribute);
}
}
}
// special attributes check
for (AttributeMetaData attributeMD : unionAttributeList) {
if (attributeMD.isSpecial()) {
}
}
return unionAttributeList;
}
| List<AttributeMetaData> function(ExampleSetMetaData emd1, ExampleSetMetaData emd2) { if (!leftInput.isConnected() !rightInput.isConnected()) { return new LinkedList<>(); } if (this.isIdNeeded()) { AttributeMetaData id1 = emd1.getSpecial(Attributes.ID_NAME); AttributeMetaData id2 = emd2.getSpecial(Attributes.ID_NAME); if (id1 == null id2 == null) { return new LinkedList<>(); } if (!Ontology.ATTRIBUTE_VALUE_TYPE.isA(id1.getValueType(), id2.getValueType()) && !Ontology.ATTRIBUTE_VALUE_TYPE.isA(id2.getValueType(), id1.getValueType())) { return new LinkedList<>(); } } Set<Pair<Integer, AttributeMetaData>> excludedAttributes = new HashSet<>(); try { excludedAttributes = getExcludedAttributesMD(emd1, emd2); } catch (OperatorException e) { excludedAttributes = Collections.emptySet(); } List<AttributeMetaData> unionAttributeList = new LinkedList<>(); List<String> unionSpecialRoleList = new LinkedList<>(); for (AttributeMetaData attributeMD : emd1.getAllAttributes()) { if (!excludedAttributes.contains(new Pair<>(AttributeSource.FIRST_SOURCE, attributeMD))) { unionAttributeList.add(attributeMD.clone()); if (attributeMD.isSpecial()) { unionSpecialRoleList.add(attributeMD.getRole()); } } } for (AttributeMetaData attributeMD : emd2.getAllAttributes()) { if (!excludedAttributes.contains(new Pair<>(AttributeSource.SECOND_SOURCE, attributeMD))) { AttributeMetaData cloneAttribute = attributeMD.clone(); if (containsAttributeMD(unionAttributeList, attributeMD)) { if (!getParameterAsBoolean(PARAMETER_REMOVE_DOUBLE_ATTRIBUTES)) { if (attributeMD.isSpecial() && unionSpecialRoleList.contains(attributeMD.getRole())) { rightInput.addError(new SimpleMetaDataError(Severity.WARNING, rightInput, STR, attributeMD.getRole())); continue; } cloneAttribute.setName(cloneAttribute.getName() + STR); if (containsAttributeMD(unionAttributeList, cloneAttribute)) { cloneAttribute.setName(cloneAttribute.getName() + STR); } unionAttributeList.add(cloneAttribute); } } else { if (attributeMD.isSpecial() && unionSpecialRoleList.contains(attributeMD.getRole())) { rightInput.addError(new SimpleMetaDataError(Severity.WARNING, rightInput, STR, attributeMD.getRole())); continue; } unionAttributeList.add(cloneAttribute); } } } for (AttributeMetaData attributeMD : unionAttributeList) { if (attributeMD.isSpecial()) { } } return unionAttributeList; } | /**
* Returns a list of AttributeMetaData which contains the correctly joined MetaData arising from
* both input ports.
*/ | Returns a list of AttributeMetaData which contains the correctly joined MetaData arising from both input ports | getUnionAttributesMetaData | {
"repo_name": "brtonnies/rapidminer-studio",
"path": "src/main/java/com/rapidminer/operator/preprocessing/join/AbstractExampleSetJoin.java",
"license": "agpl-3.0",
"size": 17262
} | [
"com.rapidminer.example.Attributes",
"com.rapidminer.operator.OperatorException",
"com.rapidminer.operator.ProcessSetupError",
"com.rapidminer.operator.ports.metadata.AttributeMetaData",
"com.rapidminer.operator.ports.metadata.ExampleSetMetaData",
"com.rapidminer.operator.ports.metadata.SimpleMetaDataError",
"com.rapidminer.tools.Ontology",
"com.rapidminer.tools.container.Pair",
"java.util.Collections",
"java.util.HashSet",
"java.util.LinkedList",
"java.util.List",
"java.util.Set"
] | import com.rapidminer.example.Attributes; import com.rapidminer.operator.OperatorException; import com.rapidminer.operator.ProcessSetupError; import com.rapidminer.operator.ports.metadata.AttributeMetaData; import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; import com.rapidminer.operator.ports.metadata.SimpleMetaDataError; import com.rapidminer.tools.Ontology; import com.rapidminer.tools.container.Pair; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; | import com.rapidminer.example.*; import com.rapidminer.operator.*; import com.rapidminer.operator.ports.metadata.*; import com.rapidminer.tools.*; import com.rapidminer.tools.container.*; import java.util.*; | [
"com.rapidminer.example",
"com.rapidminer.operator",
"com.rapidminer.tools",
"java.util"
] | com.rapidminer.example; com.rapidminer.operator; com.rapidminer.tools; java.util; | 1,301,009 |
private static String getVideoFileName(final Project project) {
return project.getId() + "-" + project.getName().trim()
.toLowerCase()
.replaceAll(" ", "-")
.replaceAll("[^a-z0-9-]", "") + ".mp4";
} | static String function(final Project project) { return project.getId() + "-" + project.getName().trim() .toLowerCase() .replaceAll(" ", "-") .replaceAll(STR, STR.mp4"; } | /**
* Returns a ... {id}-{canonicalized name}.mp4
*
* @param project the project
* @return the project's video file name
*/ | Returns a ... {id}-{canonicalized name}.mp4 | getVideoFileName | {
"repo_name": "neurovelho/slidemup",
"path": "src/main/java/com/slidemup/service/LocalEncodingService.java",
"license": "gpl-3.0",
"size": 7980
} | [
"com.slidemup.domain.Project"
] | import com.slidemup.domain.Project; | import com.slidemup.domain.*; | [
"com.slidemup.domain"
] | com.slidemup.domain; | 66,828 |
public FixedSizeConverter objectIdConverter() {
return LongConverter.DEFAULT_INSTANCE;
} | FixedSizeConverter function() { return LongConverter.DEFAULT_INSTANCE; } | /**
* Returns a converter for the ids which are translated by the
* manager into TIds.
* @return a converter for serializing the identifiers.
*/ | Returns a converter for the ids which are translated by the manager into TIds | objectIdConverter | {
"repo_name": "hannoman/xxl",
"path": "src/xxl/core/collections/containers/recordManager/MapTIdManager.java",
"license": "lgpl-3.0",
"size": 6503
} | [
"xxl.core.io.converters.FixedSizeConverter",
"xxl.core.io.converters.LongConverter"
] | import xxl.core.io.converters.FixedSizeConverter; import xxl.core.io.converters.LongConverter; | import xxl.core.io.converters.*; | [
"xxl.core.io"
] | xxl.core.io; | 28,527 |
protected void tearDown() {
TimeZone.setDefault(defaultTimeZone);
} | void function() { TimeZone.setDefault(defaultTimeZone); } | /**
* Tears down the fixture, for example, close a network connection. This
* method is called after a test is executed.
*/ | Tears down the fixture, for example, close a network connection. This method is called after a test is executed | tearDown | {
"repo_name": "groschovskiy/j2objc",
"path": "jre_emul/apache_harmony/classlib/modules/luni/src/test/api/common/org/apache/harmony/luni/tests/java/util/DateTest.java",
"license": "apache-2.0",
"size": 15447
} | [
"java.util.TimeZone"
] | import java.util.TimeZone; | import java.util.*; | [
"java.util"
] | java.util; | 2,812,748 |
private static final void writeIntoStream(final ByteBuffer bytebuf, final FileChannel fc, final byte[] contents)
throws IOException {
final int chopSize = 6 * 1024;
if (contents.length >= bytebuf.capacity()) {
List<byte[]> chops = PnmlExport.chopBytes(contents, chopSize);
for (byte[] buf : chops) {
bytebuf.put(buf);
bytebuf.flip();
fc.write(bytebuf);
bytebuf.clear();
}
} else {
bytebuf.put(contents);
bytebuf.flip();
fc.write(bytebuf);
bytebuf.clear();
}
} | static final void function(final ByteBuffer bytebuf, final FileChannel fc, final byte[] contents) throws IOException { final int chopSize = 6 * 1024; if (contents.length >= bytebuf.capacity()) { List<byte[]> chops = PnmlExport.chopBytes(contents, chopSize); for (byte[] buf : chops) { bytebuf.put(buf); bytebuf.flip(); fc.write(bytebuf); bytebuf.clear(); } } else { bytebuf.put(contents); bytebuf.flip(); fc.write(bytebuf); bytebuf.clear(); } } | /**
* Writes buffer of a given max size into file channel.
*/ | Writes buffer of a given max size into file channel | writeIntoStream | {
"repo_name": "lhillah/pnmlframework",
"path": "pnmlFw-SNNet/src/fr/lip6/move/pnml/symmetricnet/hlcorestructure/impl/FontImpl.java",
"license": "epl-1.0",
"size": 26114
} | [
"fr.lip6.move.pnml.framework.general.PnmlExport",
"java.io.IOException",
"java.nio.ByteBuffer",
"java.nio.channels.FileChannel",
"java.util.List"
] | import fr.lip6.move.pnml.framework.general.PnmlExport; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.util.List; | import fr.lip6.move.pnml.framework.general.*; import java.io.*; import java.nio.*; import java.nio.channels.*; import java.util.*; | [
"fr.lip6.move",
"java.io",
"java.nio",
"java.util"
] | fr.lip6.move; java.io; java.nio; java.util; | 851,875 |
public void addInclude(URI targetPath, ZipFile zipFile, ZipEntry zipEntry) throws IOException {
resourceContext.addInclude(targetPath, zipFile, zipEntry);
} | void function(URI targetPath, ZipFile zipFile, ZipEntry zipEntry) throws IOException { resourceContext.addInclude(targetPath, zipFile, zipEntry); } | /**
* Add a ZIP file entry into the deployment context and place it into the
* path specified in the target path. The newly added entry is added
* to the classpath of the configuration.
*
* @param targetPath where the ZIP file entry should be placed
* @param zipFile the ZIP file
* @param zipEntry the ZIP file entry
* @throws IOException if there's a problem copying the ZIP entry
*/ | Add a ZIP file entry into the deployment context and place it into the path specified in the target path. The newly added entry is added to the classpath of the configuration | addInclude | {
"repo_name": "apache/geronimo",
"path": "framework/modules/geronimo-deployment/src/main/java/org/apache/geronimo/deployment/DeploymentContext.java",
"license": "apache-2.0",
"size": 31246
} | [
"java.io.IOException",
"java.util.zip.ZipEntry",
"java.util.zip.ZipFile"
] | import java.io.IOException; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; | import java.io.*; import java.util.zip.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 1,340,308 |
public void setPreferenceFloatValue(String key, float value) {
if(sharedEditor == null) {
Editor editor = prefs.edit();
editor.putFloat(key, value);
editor.commit();
}else {
sharedEditor.putFloat(key, value);
}
}
| void function(String key, float value) { if(sharedEditor == null) { Editor editor = prefs.edit(); editor.putFloat(key, value); editor.commit(); }else { sharedEditor.putFloat(key, value); } } | /**
* Set a preference float value
* @param key the preference key to set
* @param value the value for this key
*/ | Set a preference float value | setPreferenceFloatValue | {
"repo_name": "NewCell/Call-Text-v1",
"path": "src/com/newcell/calltext/utils/PreferencesWrapper.java",
"license": "gpl-3.0",
"size": 26594
} | [
"android.content.SharedPreferences"
] | import android.content.SharedPreferences; | import android.content.*; | [
"android.content"
] | android.content; | 1,783,132 |
ServiceFuture<List<LocalDate>> getDateInvalidNullAsync(final ServiceCallback<List<LocalDate>> serviceCallback); | ServiceFuture<List<LocalDate>> getDateInvalidNullAsync(final ServiceCallback<List<LocalDate>> serviceCallback); | /**
* Get date array value ['2012-01-01', null, '1776-07-04'].
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/ | Get date array value ['2012-01-01', null, '1776-07-04'] | getDateInvalidNullAsync | {
"repo_name": "balajikris/autorest",
"path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/bodyarray/Arrays.java",
"license": "mit",
"size": 104816
} | [
"com.microsoft.rest.ServiceCallback",
"com.microsoft.rest.ServiceFuture",
"java.util.List",
"org.joda.time.LocalDate"
] | import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import java.util.List; import org.joda.time.LocalDate; | import com.microsoft.rest.*; import java.util.*; import org.joda.time.*; | [
"com.microsoft.rest",
"java.util",
"org.joda.time"
] | com.microsoft.rest; java.util; org.joda.time; | 120,548 |
protected void refreshBendpoints() {
EList modelConstraint = getLinkRef().getBendpoints();
List figureConstraint = new ArrayList();
for (int i = 0; i < modelConstraint.size(); i++) {
LinkRefBendpoint bendpoint = (LinkRefBendpoint) modelConstraint.get(i);
AbsoluteBendpoint abp = new AbsoluteBendpoint(bendpoint.getX(), bendpoint.getY());
figureConstraint.add(abp);
}
getConnectionFigure().setRoutingConstraint(figureConstraint);
} | void function() { EList modelConstraint = getLinkRef().getBendpoints(); List figureConstraint = new ArrayList(); for (int i = 0; i < modelConstraint.size(); i++) { LinkRefBendpoint bendpoint = (LinkRefBendpoint) modelConstraint.get(i); AbsoluteBendpoint abp = new AbsoluteBendpoint(bendpoint.getX(), bendpoint.getY()); figureConstraint.add(abp); } getConnectionFigure().setRoutingConstraint(figureConstraint); } | /**
* Updates the bendpoints, based on the model.
*/ | Updates the bendpoints, based on the model | refreshBendpoints | {
"repo_name": "gmussbacher/seg.jUCMNav",
"path": "src/seg/jUCMNav/editparts/LinkRefEditPart.java",
"license": "epl-1.0",
"size": 13849
} | [
"java.util.ArrayList",
"java.util.List",
"org.eclipse.draw2d.AbsoluteBendpoint",
"org.eclipse.emf.common.util.EList"
] | import java.util.ArrayList; import java.util.List; import org.eclipse.draw2d.AbsoluteBendpoint; import org.eclipse.emf.common.util.EList; | import java.util.*; import org.eclipse.draw2d.*; import org.eclipse.emf.common.util.*; | [
"java.util",
"org.eclipse.draw2d",
"org.eclipse.emf"
] | java.util; org.eclipse.draw2d; org.eclipse.emf; | 686,904 |
@Test
public void testResourceChangeMessage() {
ResourceChangeRequest msg = new ResourceChangeRequest("hardware", Status.ENABLED);
Assert.assertEquals(msg.getResource(), "hardware");
Assert.assertEquals(msg.getStatus(), Status.ENABLED);
msg.hashCode();
Assert.assertFalse(msg.equals(null));
Assert.assertTrue(msg.equals(msg));
Assert.assertNotNull(msg.elevate());
}
| void function() { ResourceChangeRequest msg = new ResourceChangeRequest(STR, Status.ENABLED); Assert.assertEquals(msg.getResource(), STR); Assert.assertEquals(msg.getStatus(), Status.ENABLED); msg.hashCode(); Assert.assertFalse(msg.equals(null)); Assert.assertTrue(msg.equals(msg)); Assert.assertNotNull(msg.elevate()); } | /**
* Tests the resource change message (contents).
*/ | Tests the resource change message (contents) | testResourceChangeMessage | {
"repo_name": "QualiMaster/Infrastructure",
"path": "AdaptationLayer/src/tests/eu/qualimaster/adaptation/ExternalTests.java",
"license": "apache-2.0",
"size": 33340
} | [
"eu.qualimaster.adaptation.external.ResourceChangeRequest",
"org.junit.Assert"
] | import eu.qualimaster.adaptation.external.ResourceChangeRequest; import org.junit.Assert; | import eu.qualimaster.adaptation.external.*; import org.junit.*; | [
"eu.qualimaster.adaptation",
"org.junit"
] | eu.qualimaster.adaptation; org.junit; | 2,617,808 |
public BytesWritable getEndKey() {
return endKey;
} | BytesWritable function() { return endKey; } | /**
* Get the end key. If the end key is unknown, null will be returned.
*
* @return end key.
*/ | Get the end key. If the end key is unknown, null will be returned | getEndKey | {
"repo_name": "kaituo/sedge",
"path": "trunk/contrib/zebra/src/java/org/apache/hadoop/zebra/io/BasicTableStatus.java",
"license": "mit",
"size": 2870
} | [
"org.apache.hadoop.io.BytesWritable"
] | import org.apache.hadoop.io.BytesWritable; | import org.apache.hadoop.io.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 1,178,235 |
public ConnectorConfiguration setIdleTimeout(long idleTimeout) {
this.idleTimeout = idleTimeout;
return this;
}
/**
* Whether secure socket layer should be enabled on binary rest server.
* <p>
* Note that if this flag is set to {@code true}, an instance of {@link GridSslContextFactory} | ConnectorConfiguration function(long idleTimeout) { this.idleTimeout = idleTimeout; return this; } /** * Whether secure socket layer should be enabled on binary rest server. * <p> * Note that if this flag is set to {@code true}, an instance of {@link GridSslContextFactory} | /**
* Sets idle timeout for REST server.
*
* @param idleTimeout Idle timeout in milliseconds.
* @see #getIdleTimeout()
* @return {@code this} for chaining.
*/ | Sets idle timeout for REST server | setIdleTimeout | {
"repo_name": "alexzaitzev/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/configuration/ConnectorConfiguration.java",
"license": "apache-2.0",
"size": 21885
} | [
"org.apache.ignite.internal.client.ssl.GridSslContextFactory"
] | import org.apache.ignite.internal.client.ssl.GridSslContextFactory; | import org.apache.ignite.internal.client.ssl.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 2,809,286 |
@Aspect(advice = org.support.project.ormapping.transaction.Transaction.class)
public void activation(Integer user, KnowledgesEntity entity) {
activation(user, entity.getKnowledgeId());
} | @Aspect(advice = org.support.project.ormapping.transaction.Transaction.class) void function(Integer user, KnowledgesEntity entity) { activation(user, entity.getKnowledgeId()); } | /**
* Ativation.
* if delete flag is exists and delete flag is true, delete flug is false to activate.
* set saved user id.
* @param user saved userid
* @param entity entity
*/ | Ativation. if delete flag is exists and delete flag is true, delete flug is false to activate. set saved user id | activation | {
"repo_name": "support-project/knowledge",
"path": "src/main/java/org/support/project/knowledge/dao/gen/GenKnowledgesDao.java",
"license": "apache-2.0",
"size": 18044
} | [
"org.support.project.aop.Aspect",
"org.support.project.knowledge.entity.KnowledgesEntity"
] | import org.support.project.aop.Aspect; import org.support.project.knowledge.entity.KnowledgesEntity; | import org.support.project.aop.*; import org.support.project.knowledge.entity.*; | [
"org.support.project"
] | org.support.project; | 155,257 |
public final static RTMPMessage build(IRTMPEvent body, int eventTime) {
return new RTMPMessage(body, eventTime);
}
| final static RTMPMessage function(IRTMPEvent body, int eventTime) { return new RTMPMessage(body, eventTime); } | /**
* Builder for RTMPMessage.
*
* @param body event data
* @param eventTime time value to set on the event body
* @return Immutable RTMPMessage
*/ | Builder for RTMPMessage | build | {
"repo_name": "cwpenhale/red5-mobileconsole",
"path": "red5_server/src/main/java/org/red5/server/stream/message/RTMPMessage.java",
"license": "apache-2.0",
"size": 2570
} | [
"org.red5.server.net.rtmp.event.IRTMPEvent"
] | import org.red5.server.net.rtmp.event.IRTMPEvent; | import org.red5.server.net.rtmp.event.*; | [
"org.red5.server"
] | org.red5.server; | 1,404,939 |
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jSeparator1 = new javax.swing.JSeparator();
filler1 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 4), new java.awt.Dimension(0, 4), new java.awt.Dimension(0, 4));
jPanel1 = new javax.swing.JPanel();
applyButton = new javax.swing.JButton();
cancelButton = new javax.swing.JButton();
filler2 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 4), new java.awt.Dimension(0, 4), new java.awt.Dimension(0, 4));
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
getContentPane().setLayout(new javax.swing.BoxLayout(getContentPane(), javax.swing.BoxLayout.PAGE_AXIS));
getContentPane().add(jSeparator1);
getContentPane().add(filler1);
jPanel1.setMaximumSize(new java.awt.Dimension(4000, 27));
jPanel1.setMinimumSize(new java.awt.Dimension(100, 27));
jPanel1.setPreferredSize(new java.awt.Dimension(400, 27));
applyButton.setText(org.openide.util.NbBundle.getMessage(AdvancedConfigurationDialog.class, "AdvancedConfigurationDialog.applyButton.text")); // NOI18N | @SuppressWarnings(STR) void function() { jSeparator1 = new javax.swing.JSeparator(); filler1 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 4), new java.awt.Dimension(0, 4), new java.awt.Dimension(0, 4)); jPanel1 = new javax.swing.JPanel(); applyButton = new javax.swing.JButton(); cancelButton = new javax.swing.JButton(); filler2 = new javax.swing.Box.Filler(new java.awt.Dimension(0, 4), new java.awt.Dimension(0, 4), new java.awt.Dimension(0, 4)); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); getContentPane().setLayout(new javax.swing.BoxLayout(getContentPane(), javax.swing.BoxLayout.PAGE_AXIS)); getContentPane().add(jSeparator1); getContentPane().add(filler1); jPanel1.setMaximumSize(new java.awt.Dimension(4000, 27)); jPanel1.setMinimumSize(new java.awt.Dimension(100, 27)); jPanel1.setPreferredSize(new java.awt.Dimension(400, 27)); applyButton.setText(org.openide.util.NbBundle.getMessage(AdvancedConfigurationDialog.class, STR)); | /**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/ | This method is called from within the constructor to initialize the form. regenerated by the Form Editor | initComponents | {
"repo_name": "APriestman/autopsy",
"path": "Core/src/org/sleuthkit/autopsy/corecomponents/AdvancedConfigurationDialog.java",
"license": "apache-2.0",
"size": 6006
} | [
"java.awt.Dimension",
"javax.swing.JPanel"
] | import java.awt.Dimension; import javax.swing.JPanel; | import java.awt.*; import javax.swing.*; | [
"java.awt",
"javax.swing"
] | java.awt; javax.swing; | 171,258 |
protected TableColumnModelListener getColumnModelListener() {
if (columnModelListener == null) {
columnModelListener = createColumnModelListener();
}
return columnModelListener;
} | TableColumnModelListener function() { if (columnModelListener == null) { columnModelListener = createColumnModelListener(); } return columnModelListener; } | /**
* Returns the listener to table's column model. The listener is
* lazily created if necessary.
* @return the <code>TableColumnModelListener</code> for use with the
* table's column model, guaranteed to be not <code>null</code>.
*/ | Returns the listener to table's column model. The listener is lazily created if necessary | getColumnModelListener | {
"repo_name": "Mindtoeye/Hoop",
"path": "src/org/jdesktop/swingx/table/ColumnControlButton.java",
"license": "lgpl-3.0",
"size": 32314
} | [
"javax.swing.event.TableColumnModelListener"
] | import javax.swing.event.TableColumnModelListener; | import javax.swing.event.*; | [
"javax.swing"
] | javax.swing; | 2,715,335 |
//--------//
// modify //
//--------//
public void modify (Shape shape,
TimeRational timeRational)
{
if (shape == null) {
shape = predefinedShape(timeRational);
if (shape == null) {
shape = Shape.CUSTOM_TIME;
}
}
logger.debug("{} assigned to {}", shape, this);
this.shape = shape;
this.timeRational = timeRational;
}
| void function (Shape shape, TimeRational timeRational) { if (shape == null) { shape = predefinedShape(timeRational); if (shape == null) { shape = Shape.CUSTOM_TIME; } } logger.debug(STR, shape, this); this.shape = shape; this.timeRational = timeRational; } | /**
* Modify in situ this time signature using provided shape and rational value.
*
* @param shape the shape (perhaps null) of correct signature
* @param timeRational the new sig rational value
*/ | Modify in situ this time signature using provided shape and rational value | modify | {
"repo_name": "Audiveris/audiveris",
"path": "src/main/org/audiveris/omr/sig/inter/AbstractTimeInter.java",
"license": "agpl-3.0",
"size": 16562
} | [
"org.audiveris.omr.glyph.Shape",
"org.audiveris.omr.score.TimeRational"
] | import org.audiveris.omr.glyph.Shape; import org.audiveris.omr.score.TimeRational; | import org.audiveris.omr.glyph.*; import org.audiveris.omr.score.*; | [
"org.audiveris.omr"
] | org.audiveris.omr; | 1,808,299 |
private void initValuesFrom(CmsImageScaler source) {
m_color = source.m_color;
m_cropHeight = source.m_cropHeight;
m_cropWidth = source.m_cropWidth;
m_cropX = source.m_cropX;
m_cropY = source.m_cropY;
m_filters = new ArrayList<String>(source.m_filters);
m_focalPoint = source.m_focalPoint;
m_height = source.m_height;
m_isOriginalScaler = source.m_isOriginalScaler;
m_maxBlurSize = source.m_maxBlurSize;
m_position = source.m_position;
m_quality = source.m_quality;
m_renderMode = source.m_renderMode;
m_type = source.m_type;
m_width = source.m_width;
} | void function(CmsImageScaler source) { m_color = source.m_color; m_cropHeight = source.m_cropHeight; m_cropWidth = source.m_cropWidth; m_cropX = source.m_cropX; m_cropY = source.m_cropY; m_filters = new ArrayList<String>(source.m_filters); m_focalPoint = source.m_focalPoint; m_height = source.m_height; m_isOriginalScaler = source.m_isOriginalScaler; m_maxBlurSize = source.m_maxBlurSize; m_position = source.m_position; m_quality = source.m_quality; m_renderMode = source.m_renderMode; m_type = source.m_type; m_width = source.m_width; } | /**
* Copies all values from the given scaler into this scaler.<p>
*
* @param source the source scaler
*/ | Copies all values from the given scaler into this scaler | initValuesFrom | {
"repo_name": "alkacon/opencms-core",
"path": "src/org/opencms/loader/CmsImageScaler.java",
"license": "lgpl-2.1",
"size": 65718
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 2,834,118 |
private void initialize() {
frame = new JFrame();
frame.setBounds(100, 100, 450, 300);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
} | void function() { frame = new JFrame(); frame.setBounds(100, 100, 450, 300); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); } | /**
* Initialize the contents of the frame.
*/ | Initialize the contents of the frame | initialize | {
"repo_name": "gnomex/javando",
"path": "src/br/github/gnomex/two_th_exercises/products_inventory/views/ProductsCRUD.java",
"license": "mit",
"size": 782
} | [
"javax.swing.JFrame"
] | import javax.swing.JFrame; | import javax.swing.*; | [
"javax.swing"
] | javax.swing; | 1,483,206 |
void onSavePost(Post post);
interface View extends BaseView { | void onSavePost(Post post); interface View extends BaseView { | /**
* saves given Post local on device
* @param post, Post: item to save
*/ | saves given Post local on device | onSavePost | {
"repo_name": "PascalDierich/Watchdog",
"path": "app/src/main/java/de/pascaldierich/watchdog/presenter/fragments/posts/PostPresenter.java",
"license": "apache-2.0",
"size": 1770
} | [
"de.pascaldierich.model.domainmodels.Post",
"de.pascaldierich.watchdog.presenter.base.BaseView"
] | import de.pascaldierich.model.domainmodels.Post; import de.pascaldierich.watchdog.presenter.base.BaseView; | import de.pascaldierich.model.domainmodels.*; import de.pascaldierich.watchdog.presenter.base.*; | [
"de.pascaldierich.model",
"de.pascaldierich.watchdog"
] | de.pascaldierich.model; de.pascaldierich.watchdog; | 2,043,498 |
public Messenger getMessenger() {
enforceAccessPermission();
enforceChangePermission();
return new Messenger(mP2pStateMachine.getHandler());
} | Messenger function() { enforceAccessPermission(); enforceChangePermission(); return new Messenger(mP2pStateMachine.getHandler()); } | /**
* Get a reference to handler. This is used by a client to establish
* an AsyncChannel communication with WifiP2pService
*/ | Get a reference to handler. This is used by a client to establish an AsyncChannel communication with WifiP2pService | getMessenger | {
"repo_name": "haikuowuya/android_system_code",
"path": "src/android/net/wifi/p2p/WifiP2pService.java",
"license": "apache-2.0",
"size": 119922
} | [
"android.os.Messenger"
] | import android.os.Messenger; | import android.os.*; | [
"android.os"
] | android.os; | 727,573 |
@Test
public void testGetExtraActionInfoOnAspects() throws Exception {
scratch.file(
"a/BUILD",
"load('//a:def.bzl', 'testrule')",
"testrule(name='a', deps=[':b'])",
"testrule(name='b')");
scratch.file(
"a/def.bzl",
"def _aspect_impl(target, ctx):",
" f = ctx.new_file('foo.txt')",
" ctx.action(outputs = [f], command = 'echo foo > \"$1\"')",
" return struct(output=f)",
"def _rule_impl(ctx):",
" return struct(files=depset([artifact.output for artifact in ctx.attr.deps]))",
"aspect1 = aspect(_aspect_impl, attr_aspects=['deps'], ",
" attrs = {'parameter': attr.string(values = ['param_value'])})",
"testrule = rule(_rule_impl, attrs = { ",
" 'deps' : attr.label_list(aspects = [aspect1]), ",
" 'parameter': attr.string(default='param_value') })");
update(
ImmutableList.of("//a:a"),
false ,
1 ,
true ,
new EventBus());
Artifact artifact = getOnlyElement(getFilesToBuild(getConfiguredTarget("//a:a")));
ExtraActionInfo.Builder extraActionInfo = getGeneratingAction(artifact).getExtraActionInfo();
assertThat(extraActionInfo.getAspectName()).isEqualTo("//a:def.bzl%aspect1");
assertThat(extraActionInfo.getAspectParametersMap())
.containsExactly(
"parameter", ExtraActionInfo.StringList.newBuilder().addValue("param_value").build());
} | void function() throws Exception { scratch.file( STR, STRtestrule(name='a', deps=[':b'])STRtestrule(name='b')STRa/def.bzlSTRdef _aspect_impl(target, ctx):STR f = ctx.new_file('foo.txt')STR ctx.action(outputs = [f], command = 'echo foo > \"$1\"')STR return struct(output=f)STRdef _rule_impl(ctx):STR return struct(files=depset([artifact.output for artifact in ctx.attr.deps]))STRaspect1 = aspect(_aspect_impl, attr_aspects=['deps'], STR attrs = {'parameter': attr.string(values = ['param_value'])})STRtestrule = rule(_rule_impl, attrs = { STR 'deps' : attr.label_list(aspects = [aspect1]), STR 'parameter': attr.string(default='param_value') })STR false , 1 , true , new EventBus()); Artifact artifact = getOnlyElement(getFilesToBuild(getConfiguredTarget(STR assertThat(extraActionInfo.getAspectParametersMap()) .containsExactly( STR, ExtraActionInfo.StringList.newBuilder().addValue(STR).build()); } | /**
* Tests that the ExtraActionInfo proto that's generated from an action, contains Aspect-related
* information.
*/ | Tests that the ExtraActionInfo proto that's generated from an action, contains Aspect-related information | testGetExtraActionInfoOnAspects | {
"repo_name": "mikelikespie/bazel",
"path": "src/test/java/com/google/devtools/build/lib/analysis/actions/SpawnActionTest.java",
"license": "apache-2.0",
"size": 20502
} | [
"com.google.common.collect.Iterables",
"com.google.common.eventbus.EventBus",
"com.google.common.truth.Truth",
"com.google.devtools.build.lib.actions.Artifact",
"com.google.devtools.build.lib.actions.extra.ExtraActionInfo"
] | import com.google.common.collect.Iterables; import com.google.common.eventbus.EventBus; import com.google.common.truth.Truth; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.extra.ExtraActionInfo; | import com.google.common.collect.*; import com.google.common.eventbus.*; import com.google.common.truth.*; import com.google.devtools.build.lib.actions.*; import com.google.devtools.build.lib.actions.extra.*; | [
"com.google.common",
"com.google.devtools"
] | com.google.common; com.google.devtools; | 27,527 |
void loadEndpointContent(String resourceGroupName, String profileName, String endpointName, List<String> contentPaths); | void loadEndpointContent(String resourceGroupName, String profileName, String endpointName, List<String> contentPaths); | /**
* Forcibly pre-loads CDN endpoint content. Available for Verizon profiles.
*
* @param resourceGroupName name of the resource group within the Azure subscription.
* @param profileName name of the CDN profile which is unique within the resource group.
* @param endpointName name of the endpoint under the profile which is unique globally.
* @param contentPaths the path to the content to be loaded. Should describe a file path.
*/ | Forcibly pre-loads CDN endpoint content. Available for Verizon profiles | loadEndpointContent | {
"repo_name": "pomortaz/azure-sdk-for-java",
"path": "azure-mgmt-cdn/src/main/java/com/microsoft/azure/management/cdn/CdnProfiles.java",
"license": "mit",
"size": 5000
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,147,219 |
public void setValue(String
value) {
this.value = value;
}
private gov.nih.nci.calims2.domain.administration.ContactInformation contactInformation;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CONTACTINFORMATION_FK")
@org.hibernate.annotations.ForeignKey(name = "TELEPHCONTAC_FK") | void function(String value) { this.value = value; } private gov.nih.nci.calims2.domain.administration.ContactInformation contactInformation; @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = STR) @org.hibernate.annotations.ForeignKey(name = STR) | /**
* Sets the value of value attribute.
* @param value .
**/ | Sets the value of value attribute | setValue | {
"repo_name": "NCIP/calims",
"path": "calims2-model/src/java/gov/nih/nci/calims2/domain/administration/TelephoneNumber.java",
"license": "bsd-3-clause",
"size": 6120
} | [
"javax.persistence.FetchType",
"javax.persistence.JoinColumn",
"javax.persistence.ManyToOne",
"org.hibernate.annotations.ForeignKey"
] | import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import org.hibernate.annotations.ForeignKey; | import javax.persistence.*; import org.hibernate.annotations.*; | [
"javax.persistence",
"org.hibernate.annotations"
] | javax.persistence; org.hibernate.annotations; | 1,623,623 |
@Test
public final void testParameters() throws Exception {
final URL url = XMLAPIURLBuilder.withBaseURL(BASE_URL)
.endpoint(Endpoint.DEVICELIST)
.parameter("param1", 3)
.parameter("param2", "some-value")
.parameter("param3", 0.21)
.build();
assertThat(url).isEqualTo(new URL(new StringBuilder(BASE_URL).append("/")
.append(Endpoint.DEVICELIST.getCGI())
.append("?param1=3¶m2=some-value¶m3=0.21").toString()));
}
| final void function() throws Exception { final URL url = XMLAPIURLBuilder.withBaseURL(BASE_URL) .endpoint(Endpoint.DEVICELIST) .parameter(STR, 3) .parameter(STR, STR) .parameter(STR, 0.21) .build(); assertThat(url).isEqualTo(new URL(new StringBuilder(BASE_URL).append("/") .append(Endpoint.DEVICELIST.getCGI()) .append(STR).toString())); } | /**
* Tests the endpoints.
*/ | Tests the endpoints | testParameters | {
"repo_name": "abollaert/homematic",
"path": "src/test/java/be/techniquez/homeautomation/homematic/impl/channel/XMLAPIURLBuilderTest.java",
"license": "apache-2.0",
"size": 2786
} | [
"be.techniquez.homeautomation.homematic.impl.channel.XMLAPIURLBuilder",
"org.assertj.core.api.Assertions"
] | import be.techniquez.homeautomation.homematic.impl.channel.XMLAPIURLBuilder; import org.assertj.core.api.Assertions; | import be.techniquez.homeautomation.homematic.impl.channel.*; import org.assertj.core.api.*; | [
"be.techniquez.homeautomation",
"org.assertj.core"
] | be.techniquez.homeautomation; org.assertj.core; | 947,116 |
private Node parseAndRecordTypeNameNode(JsDocToken token, int lineno,
int startCharno, boolean matchingLC) {
return parseAndRecordTypeNode(token, lineno, startCharno, matchingLC, true);
} | Node function(JsDocToken token, int lineno, int startCharno, boolean matchingLC) { return parseAndRecordTypeNode(token, lineno, startCharno, matchingLC, true); } | /**
* Looks for a type expression at the current token and if found,
* returns it. Note that this method consumes input.
*
* @param token The current token.
* @param lineno The line of the type expression.
* @param startCharno The starting character position of the type expression.
* @param matchingLC Whether the type expression starts with a "{".
* @return The type expression found or null if none.
*/ | Looks for a type expression at the current token and if found, returns it. Note that this method consumes input | parseAndRecordTypeNameNode | {
"repo_name": "lgeorgieff/closure-compiler",
"path": "src/com/google/javascript/jscomp/parsing/JsDocInfoParser.java",
"license": "apache-2.0",
"size": 83891
} | [
"com.google.javascript.rhino.Node"
] | import com.google.javascript.rhino.Node; | import com.google.javascript.rhino.*; | [
"com.google.javascript"
] | com.google.javascript; | 2,893,991 |
private void tryAndUpdatePredictedApps() {
boolean mRemoteDrawerEnabled = SettingsProvider.getBoolean(this,
SettingsProvider.SETTINGS_UI_DRAWER_REMOTE_APPS,
R.bool.preferences_interface_drawer_remote_apps_default);
if (!mRemoteDrawerEnabled) {
if (mLauncherCallbacks != null) {
List<ComponentKey> apps = mLauncherCallbacks.getPredictedApps();
if (apps != null) {
mAppsView.setPredictedAppComponents(apps);
}
}
}
} | void function() { boolean mRemoteDrawerEnabled = SettingsProvider.getBoolean(this, SettingsProvider.SETTINGS_UI_DRAWER_REMOTE_APPS, R.bool.preferences_interface_drawer_remote_apps_default); if (!mRemoteDrawerEnabled) { if (mLauncherCallbacks != null) { List<ComponentKey> apps = mLauncherCallbacks.getPredictedApps(); if (apps != null) { mAppsView.setPredictedAppComponents(apps); } } } } | /**
* Updates the set of predicted apps if it hasn't been updated since the last time Launcher was
* resumed.
*/ | Updates the set of predicted apps if it hasn't been updated since the last time Launcher was resumed | tryAndUpdatePredictedApps | {
"repo_name": "bojanvu23/android_packages_apps_Trebuchet_Gradle",
"path": "Trebuchet/src/main/java/com/lite/android/launcher3/Launcher.java",
"license": "apache-2.0",
"size": 211390
} | [
"com.lite.android.launcher3.settings.SettingsProvider",
"com.lite.android.launcher3.util.ComponentKey",
"java.util.List"
] | import com.lite.android.launcher3.settings.SettingsProvider; import com.lite.android.launcher3.util.ComponentKey; import java.util.List; | import com.lite.android.launcher3.settings.*; import com.lite.android.launcher3.util.*; import java.util.*; | [
"com.lite.android",
"java.util"
] | com.lite.android; java.util; | 2,658,472 |
public Timestamp getDateEntered() {
return (Timestamp) get(2);
} | Timestamp function() { return (Timestamp) get(2); } | /**
* Getter for <code>sugarcrm_4_12.ss_outgoing_checks.date_entered</code>.
*/ | Getter for <code>sugarcrm_4_12.ss_outgoing_checks.date_entered</code> | getDateEntered | {
"repo_name": "SmartMedicalServices/SpringJOOQ",
"path": "src/main/java/com/sms/sis/db/tables/records/SsOutgoingChecksRecord.java",
"license": "gpl-3.0",
"size": 15568
} | [
"java.sql.Timestamp"
] | import java.sql.Timestamp; | import java.sql.*; | [
"java.sql"
] | java.sql; | 1,778,515 |
public int addServerGroups(User loggedInUser, String key, List serverGroupIds) {
ActivationKeyManager manager = ActivationKeyManager.getInstance();
ActivationKey activationKey = lookupKey(key, loggedInUser);
for (Iterator it = serverGroupIds.iterator(); it.hasNext();) {
Number serverGroupId = (Number)it.next();
ManagedServerGroup group = null;
try {
group = ServerGroupManager.getInstance().lookup(
new Long(serverGroupId.longValue()), loggedInUser);
}
catch (LookupException e) {
throw new InvalidServerGroupException(e);
}
manager.addServerGroup(activationKey, group);
}
return 1;
} | int function(User loggedInUser, String key, List serverGroupIds) { ActivationKeyManager manager = ActivationKeyManager.getInstance(); ActivationKey activationKey = lookupKey(key, loggedInUser); for (Iterator it = serverGroupIds.iterator(); it.hasNext();) { Number serverGroupId = (Number)it.next(); ManagedServerGroup group = null; try { group = ServerGroupManager.getInstance().lookup( new Long(serverGroupId.longValue()), loggedInUser); } catch (LookupException e) { throw new InvalidServerGroupException(e); } manager.addServerGroup(activationKey, group); } return 1; } | /**
* Add server groups to an activation key.
*
* @param loggedInUser The current user
* @param key The activation key to act upon.
* @param serverGroupIds List of server group IDs to be added to this activation key.
* @return 1 on success, exception thrown otherwise.
*
* @xmlrpc.doc Add server groups to an activation key.
* @xmlrpc.param #param("string", "sessionKey")
* @xmlrpc.param #param("string", "key")
* @xmlrpc.param #array_single("int", "serverGroupId")
* @xmlrpc.returntype #return_int_success()
*/ | Add server groups to an activation key | addServerGroups | {
"repo_name": "aronparsons/spacewalk",
"path": "java/code/src/com/redhat/rhn/frontend/xmlrpc/activationkey/ActivationKeyHandler.java",
"license": "gpl-2.0",
"size": 46034
} | [
"com.redhat.rhn.common.hibernate.LookupException",
"com.redhat.rhn.domain.server.ManagedServerGroup",
"com.redhat.rhn.domain.token.ActivationKey",
"com.redhat.rhn.domain.user.User",
"com.redhat.rhn.frontend.xmlrpc.InvalidServerGroupException",
"com.redhat.rhn.manager.system.ServerGroupManager",
"com.redhat.rhn.manager.token.ActivationKeyManager",
"java.util.Iterator",
"java.util.List"
] | import com.redhat.rhn.common.hibernate.LookupException; import com.redhat.rhn.domain.server.ManagedServerGroup; import com.redhat.rhn.domain.token.ActivationKey; import com.redhat.rhn.domain.user.User; import com.redhat.rhn.frontend.xmlrpc.InvalidServerGroupException; import com.redhat.rhn.manager.system.ServerGroupManager; import com.redhat.rhn.manager.token.ActivationKeyManager; import java.util.Iterator; import java.util.List; | import com.redhat.rhn.common.hibernate.*; import com.redhat.rhn.domain.server.*; import com.redhat.rhn.domain.token.*; import com.redhat.rhn.domain.user.*; import com.redhat.rhn.frontend.xmlrpc.*; import com.redhat.rhn.manager.system.*; import com.redhat.rhn.manager.token.*; import java.util.*; | [
"com.redhat.rhn",
"java.util"
] | com.redhat.rhn; java.util; | 371,464 |
public Location getPreviousBlock() {
return prevPos.toLocation(world);
} | Location function() { return prevPos.toLocation(world); } | /**
* Returns the previous block in the aimed path
*
* @return block position
*/ | Returns the previous block in the aimed path | getPreviousBlock | {
"repo_name": "karlthepagan/Glowstone",
"path": "src/main/java/net/glowstone/util/TargetBlock.java",
"license": "mit",
"size": 7085
} | [
"org.bukkit.Location"
] | import org.bukkit.Location; | import org.bukkit.*; | [
"org.bukkit"
] | org.bukkit; | 156,769 |
public Optional<Path> extractAapt2(Path tempDir) {
String osDir = getOsSpecificJarDirectory();
// Attempt at locating the directory in question inside the jar.
URL osDirUrl = SdkToolsLocator.class.getResource(osDir);
if (osDirUrl == null) {
return Optional.empty();
}
Path aapt2;
try {
Path outputDir = tempDir.resolve("output");
// If we are in a jar, we are running from the executable.
// Extract aapt2 from the jar.
if ("jar".equals(osDirUrl.getProtocol())) {
extractFilesFromJar(outputDir, osDirUrl, osDir);
try (Stream<Path> aapt2Binaries = Files.find(outputDir, 3, AAPT2_MATCHER)) {
aapt2 = aapt2Binaries.collect(onlyElement());
}
} else {
// If we are not in a jar, this might be a test.
// Try to locate the aapt2 inside the directory.
try (Stream<Path> aapt2Binaries =
Files.find(Paths.get(osDirUrl.toURI()), 3, AAPT2_MATCHER)) {
Optional<Path> aapt2Path = aapt2Binaries.findFirst();
if (!aapt2Path.isPresent()) {
return Optional.empty();
}
aapt2 = aapt2Path.get();
}
}
} catch (NoSuchElementException e) {
throw CommandExecutionException.builder()
.withInternalMessage("Unable to locate aapt2 inside jar.")
.build();
} catch (IOException | URISyntaxException e) {
throw CommandExecutionException.builder()
.withInternalMessage("Unable to extract aapt2 from jar.")
.withCause(e)
.build();
}
// Sanity check.
checkState(Files.exists(aapt2));
// Ensure aapt2 is executable.
try {
aapt2.toFile().setExecutable(true);
} catch (SecurityException e) {
throw CommandExecutionException.builder()
.withInternalMessage(
"Unable to make aapt2 executable. This may be a permission issue. If it persists, "
+ "consider passing the path to aapt2 using the flag --aapt2.")
.withCause(e)
.build();
}
return Optional.of(aapt2);
} | Optional<Path> function(Path tempDir) { String osDir = getOsSpecificJarDirectory(); URL osDirUrl = SdkToolsLocator.class.getResource(osDir); if (osDirUrl == null) { return Optional.empty(); } Path aapt2; try { Path outputDir = tempDir.resolve(STR); if ("jar".equals(osDirUrl.getProtocol())) { extractFilesFromJar(outputDir, osDirUrl, osDir); try (Stream<Path> aapt2Binaries = Files.find(outputDir, 3, AAPT2_MATCHER)) { aapt2 = aapt2Binaries.collect(onlyElement()); } } else { try (Stream<Path> aapt2Binaries = Files.find(Paths.get(osDirUrl.toURI()), 3, AAPT2_MATCHER)) { Optional<Path> aapt2Path = aapt2Binaries.findFirst(); if (!aapt2Path.isPresent()) { return Optional.empty(); } aapt2 = aapt2Path.get(); } } } catch (NoSuchElementException e) { throw CommandExecutionException.builder() .withInternalMessage(STR) .build(); } catch (IOException URISyntaxException e) { throw CommandExecutionException.builder() .withInternalMessage(STR) .withCause(e) .build(); } checkState(Files.exists(aapt2)); try { aapt2.toFile().setExecutable(true); } catch (SecurityException e) { throw CommandExecutionException.builder() .withInternalMessage( STR + STR) .withCause(e) .build(); } return Optional.of(aapt2); } | /**
* Tries to extract aapt2 from the executable if found. The gradle tests extract aapt2 in a
* corresponding folder. In this case the folder is searched.
*
* <p>Returns an empty instance if no aapt2 binary is found inside the folder.
*
* @throws CommandExecutionException if aapt2 was not in or cannot be extracted from the
* executable.
*/ | Tries to extract aapt2 from the executable if found. The gradle tests extract aapt2 in a corresponding folder. In this case the folder is searched. Returns an empty instance if no aapt2 binary is found inside the folder | extractAapt2 | {
"repo_name": "google/bundletool",
"path": "src/main/java/com/android/tools/build/bundletool/model/utils/SdkToolsLocator.java",
"license": "apache-2.0",
"size": 10106
} | [
"com.android.tools.build.bundletool.model.exceptions.CommandExecutionException",
"com.google.common.base.Preconditions",
"java.io.IOException",
"java.net.URISyntaxException",
"java.nio.file.Files",
"java.nio.file.Path",
"java.nio.file.Paths",
"java.util.NoSuchElementException",
"java.util.Optional",
"java.util.stream.Stream"
] | import com.android.tools.build.bundletool.model.exceptions.CommandExecutionException; import com.google.common.base.Preconditions; import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.NoSuchElementException; import java.util.Optional; import java.util.stream.Stream; | import com.android.tools.build.bundletool.model.exceptions.*; import com.google.common.base.*; import java.io.*; import java.net.*; import java.nio.file.*; import java.util.*; import java.util.stream.*; | [
"com.android.tools",
"com.google.common",
"java.io",
"java.net",
"java.nio",
"java.util"
] | com.android.tools; com.google.common; java.io; java.net; java.nio; java.util; | 2,592,017 |
private void obtainPhotoIdsAndUrisToLoad(Set<Long> photoIds,
Set<String> photoIdsAsStrings, Set<Request> uris) {
photoIds.clear();
photoIdsAsStrings.clear();
uris.clear();
boolean jpegsDecoded = false;
Iterator<Request> iterator = mPendingRequests.values().iterator();
while (iterator.hasNext()) {
Request request = iterator.next();
final BitmapHolder holder = mBitmapHolderCache.get(request.getKey());
if (holder != null && holder.bytes != null && holder.fresh &&
(holder.bitmapRef == null || holder.bitmapRef.get() == null)) {
// This was previously loaded but we don't currently have the inflated Bitmap
inflateBitmap(holder, request.getRequestedExtent());
jpegsDecoded = true;
} else {
if (holder == null || !holder.fresh) {
if (request.isUriRequest()) {
uris.add(request);
} else {
photoIds.add(request.getId());
photoIdsAsStrings.add(String.valueOf(request.mId));
}
}
}
}
if (jpegsDecoded) mMainThreadHandler.sendEmptyMessage(MESSAGE_PHOTOS_LOADED);
}
private class LoaderThread extends HandlerThread implements Callback {
private static final int BUFFER_SIZE = 1024*16;
private static final int MESSAGE_PRELOAD_PHOTOS = 0;
private static final int MESSAGE_LOAD_PHOTOS = 1;
private static final int PHOTO_PRELOAD_DELAY = 1000;
private static final int PRELOAD_BATCH = 25;
private static final int MAX_PHOTOS_TO_PRELOAD = 100;
private final ContentResolver mResolver;
private final StringBuilder mStringBuilder = new StringBuilder();
private final Set<Long> mPhotoIds = Sets.newHashSet();
private final Set<String> mPhotoIdsAsStrings = Sets.newHashSet();
private final Set<Request> mPhotoUris = Sets.newHashSet();
private final List<Long> mPreloadPhotoIds = Lists.newArrayList();
private Handler mLoaderThreadHandler;
private byte mBuffer[];
private static final int PRELOAD_STATUS_NOT_STARTED = 0;
private static final int PRELOAD_STATUS_IN_PROGRESS = 1;
private static final int PRELOAD_STATUS_DONE = 2;
private int mPreloadStatus = PRELOAD_STATUS_NOT_STARTED;
public LoaderThread(ContentResolver resolver) {
super(LOADER_THREAD_NAME);
mResolver = resolver;
} | void function(Set<Long> photoIds, Set<String> photoIdsAsStrings, Set<Request> uris) { photoIds.clear(); photoIdsAsStrings.clear(); uris.clear(); boolean jpegsDecoded = false; Iterator<Request> iterator = mPendingRequests.values().iterator(); while (iterator.hasNext()) { Request request = iterator.next(); final BitmapHolder holder = mBitmapHolderCache.get(request.getKey()); if (holder != null && holder.bytes != null && holder.fresh && (holder.bitmapRef == null holder.bitmapRef.get() == null)) { inflateBitmap(holder, request.getRequestedExtent()); jpegsDecoded = true; } else { if (holder == null !holder.fresh) { if (request.isUriRequest()) { uris.add(request); } else { photoIds.add(request.getId()); photoIdsAsStrings.add(String.valueOf(request.mId)); } } } } if (jpegsDecoded) mMainThreadHandler.sendEmptyMessage(MESSAGE_PHOTOS_LOADED); } private class LoaderThread extends HandlerThread implements Callback { private static final int BUFFER_SIZE = 1024*16; private static final int MESSAGE_PRELOAD_PHOTOS = 0; private static final int MESSAGE_LOAD_PHOTOS = 1; private static final int PHOTO_PRELOAD_DELAY = 1000; private static final int PRELOAD_BATCH = 25; private static final int MAX_PHOTOS_TO_PRELOAD = 100; private final ContentResolver mResolver; private final StringBuilder mStringBuilder = new StringBuilder(); private final Set<Long> mPhotoIds = Sets.newHashSet(); private final Set<String> mPhotoIdsAsStrings = Sets.newHashSet(); private final Set<Request> mPhotoUris = Sets.newHashSet(); private final List<Long> mPreloadPhotoIds = Lists.newArrayList(); private Handler mLoaderThreadHandler; private byte mBuffer[]; private static final int PRELOAD_STATUS_NOT_STARTED = 0; private static final int PRELOAD_STATUS_IN_PROGRESS = 1; private static final int PRELOAD_STATUS_DONE = 2; private int mPreloadStatus = PRELOAD_STATUS_NOT_STARTED; public LoaderThread(ContentResolver resolver) { super(LOADER_THREAD_NAME); mResolver = resolver; } | /**
* Populates an array of photo IDs that need to be loaded. Also decodes bitmaps that we have
* already loaded
*/ | Populates an array of photo IDs that need to be loaded. Also decodes bitmaps that we have already loaded | obtainPhotoIdsAndUrisToLoad | {
"repo_name": "rex-xxx/mt6572_x201",
"path": "packages/apps/Email/src/com/mediatek/email/emailvip/activity/ListPhotoManager.java",
"license": "gpl-2.0",
"size": 42421
} | [
"android.content.ContentResolver",
"android.os.Handler",
"android.os.HandlerThread",
"com.google.android.collect.Lists",
"com.google.android.collect.Sets",
"java.util.Iterator",
"java.util.List",
"java.util.Set"
] | import android.content.ContentResolver; import android.os.Handler; import android.os.HandlerThread; import com.google.android.collect.Lists; import com.google.android.collect.Sets; import java.util.Iterator; import java.util.List; import java.util.Set; | import android.content.*; import android.os.*; import com.google.android.collect.*; import java.util.*; | [
"android.content",
"android.os",
"com.google.android",
"java.util"
] | android.content; android.os; com.google.android; java.util; | 2,795,099 |
private void insertBatch() {
if (bufferCount == 0) {
return;
}
Arrays.sort(buffer, 0, bufferCount);
// Base case: no samples
int start = 0;
if (samples.size() == 0) {
SampleItem newItem = new SampleItem(buffer[0], 1, 0);
samples.add(newItem);
start++;
}
ListIterator<SampleItem> it = samples.listIterator();
SampleItem item = it.next();
for (int i = start; i < bufferCount; i++) {
long v = buffer[i];
while (it.nextIndex() < samples.size() && item.value < v) {
item = it.next();
}
// If we found that bigger item, back up so we insert ourselves before it
if (item.value > v) {
it.previous();
}
// We use different indexes for the edge comparisons, because of the above
// if statement that adjusts the iterator
int delta;
if (it.previousIndex() == 0 || it.nextIndex() == samples.size()) {
delta = 0;
} else {
delta = ((int) Math.floor(allowableError(it.nextIndex()))) - 1;
}
SampleItem newItem = new SampleItem(v, 1, delta);
it.add(newItem);
item = newItem;
}
bufferCount = 0;
} | void function() { if (bufferCount == 0) { return; } Arrays.sort(buffer, 0, bufferCount); int start = 0; if (samples.size() == 0) { SampleItem newItem = new SampleItem(buffer[0], 1, 0); samples.add(newItem); start++; } ListIterator<SampleItem> it = samples.listIterator(); SampleItem item = it.next(); for (int i = start; i < bufferCount; i++) { long v = buffer[i]; while (it.nextIndex() < samples.size() && item.value < v) { item = it.next(); } if (item.value > v) { it.previous(); } int delta; if (it.previousIndex() == 0 it.nextIndex() == samples.size()) { delta = 0; } else { delta = ((int) Math.floor(allowableError(it.nextIndex()))) - 1; } SampleItem newItem = new SampleItem(v, 1, delta); it.add(newItem); item = newItem; } bufferCount = 0; } | /**
* Merges items from buffer into the samples array in one pass.
* This is more efficient than doing an insert on every item.
*/ | Merges items from buffer into the samples array in one pass. This is more efficient than doing an insert on every item | insertBatch | {
"repo_name": "Guavus/hbase",
"path": "hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java",
"license": "apache-2.0",
"size": 8727
} | [
"java.util.Arrays",
"java.util.ListIterator"
] | import java.util.Arrays; import java.util.ListIterator; | import java.util.*; | [
"java.util"
] | java.util; | 2,764,174 |
private Map<String, TypeInfo> buildTypeInfo(JavaClass[] allClasses) {
for (JavaClass javaClass : allClasses) {
if (javaClass == null) {
continue;
}
TypeInfo info = typeInfos.get(javaClass.getQualifiedName());
if (info == null || info.isPostBuilt()) {
continue;
}
info.setPostBuilt(true);
// handle factory methods
processFactoryMethods(javaClass, info);
PackageInfo packageInfo = getPackageInfoForPackage(javaClass);
XMLNameTransformer transformer = info.getXmlNameTransformer();
if(transformer == TypeInfo.DEFAULT_NAME_TRANSFORMER){
XMLNameTransformer nsInfoXmlNameTransformer = packageInfo.getXmlNameTransformer();
if (nsInfoXmlNameTransformer != null) {
info.setXmlNameTransformer(nsInfoXmlNameTransformer);
} else if (helper.isAnnotationPresent(javaClass, XmlNameTransformer.class)) {
XmlNameTransformer xmlNameTransformer = (XmlNameTransformer) helper.getAnnotation(javaClass, XmlNameTransformer.class);
Class nameTransformerClass = xmlNameTransformer.value();
try {
info.setXmlNameTransformer((XMLNameTransformer) nameTransformerClass.newInstance());
} catch (InstantiationException ex) {
throw JAXBException.exceptionWithNameTransformerClass(nameTransformerClass.getName(), ex);
} catch (IllegalAccessException ex) {
throw JAXBException.exceptionWithNameTransformerClass(nameTransformerClass.getName(), ex);
}
} else if (helper.isAnnotationPresent(javaClass.getPackage(), XmlNameTransformer.class)) {
XmlNameTransformer xmlNameTransformer = (XmlNameTransformer) helper.getAnnotation(javaClass.getPackage(), XmlNameTransformer.class);
Class nameTransformerClass = xmlNameTransformer.value();
try {
info.setXmlNameTransformer((XMLNameTransformer) nameTransformerClass.newInstance());
} catch (InstantiationException ex) {
throw JAXBException.exceptionWithNameTransformerClass(nameTransformerClass.getName(), ex);
} catch (IllegalAccessException ex) {
throw JAXBException.exceptionWithNameTransformerClass(nameTransformerClass.getName(), ex);
}
}
}
// handle @XmlAccessorType
postProcessXmlAccessorType(info, packageInfo);
// handle @XmlType
postProcessXmlType(javaClass, info, packageInfo);
// handle @XmlEnum
if (info.isEnumerationType()) {
addEnumTypeInfo(javaClass, ((EnumTypeInfo) info));
continue;
}
// process schema type name
processTypeQName(javaClass, info, packageInfo.getNamespaceInfo());
// handle superclass if necessary
JavaClass superClass = javaClass.getSuperclass();
processReferencedClass(superClass);
processPropertiesSuperClass(javaClass, info);
// add properties
info.setProperties(getPropertiesForClass(javaClass, info));
// process properties
processTypeInfoProperties(javaClass, info);
// handle @XmlAccessorOrder
postProcessXmlAccessorOrder(info, packageInfo);
validatePropOrderForInfo(info);
}
return typeInfos;
} | Map<String, TypeInfo> function(JavaClass[] allClasses) { for (JavaClass javaClass : allClasses) { if (javaClass == null) { continue; } TypeInfo info = typeInfos.get(javaClass.getQualifiedName()); if (info == null info.isPostBuilt()) { continue; } info.setPostBuilt(true); processFactoryMethods(javaClass, info); PackageInfo packageInfo = getPackageInfoForPackage(javaClass); XMLNameTransformer transformer = info.getXmlNameTransformer(); if(transformer == TypeInfo.DEFAULT_NAME_TRANSFORMER){ XMLNameTransformer nsInfoXmlNameTransformer = packageInfo.getXmlNameTransformer(); if (nsInfoXmlNameTransformer != null) { info.setXmlNameTransformer(nsInfoXmlNameTransformer); } else if (helper.isAnnotationPresent(javaClass, XmlNameTransformer.class)) { XmlNameTransformer xmlNameTransformer = (XmlNameTransformer) helper.getAnnotation(javaClass, XmlNameTransformer.class); Class nameTransformerClass = xmlNameTransformer.value(); try { info.setXmlNameTransformer((XMLNameTransformer) nameTransformerClass.newInstance()); } catch (InstantiationException ex) { throw JAXBException.exceptionWithNameTransformerClass(nameTransformerClass.getName(), ex); } catch (IllegalAccessException ex) { throw JAXBException.exceptionWithNameTransformerClass(nameTransformerClass.getName(), ex); } } else if (helper.isAnnotationPresent(javaClass.getPackage(), XmlNameTransformer.class)) { XmlNameTransformer xmlNameTransformer = (XmlNameTransformer) helper.getAnnotation(javaClass.getPackage(), XmlNameTransformer.class); Class nameTransformerClass = xmlNameTransformer.value(); try { info.setXmlNameTransformer((XMLNameTransformer) nameTransformerClass.newInstance()); } catch (InstantiationException ex) { throw JAXBException.exceptionWithNameTransformerClass(nameTransformerClass.getName(), ex); } catch (IllegalAccessException ex) { throw JAXBException.exceptionWithNameTransformerClass(nameTransformerClass.getName(), ex); } } } addEnumTypeInfo(javaClass, ((EnumTypeInfo) info)); continue; } processTypeQName(javaClass, info, packageInfo.getNamespaceInfo()); JavaClass superClass = javaClass.getSuperclass(); processReferencedClass(superClass); processPropertiesSuperClass(javaClass, info); info.setProperties(getPropertiesForClass(javaClass, info)); processTypeInfoProperties(javaClass, info); validatePropOrderForInfo(info); } return typeInfos; } | /**
* INTERNAL:
*
* Complete building TypeInfo objects for a given set of JavaClass
* instances. This method assumes that init, preBuildTypeInfo, and
* postBuildTypeInfo have been called.
*
* @param allClasses
* @return
*/ | Complete building TypeInfo objects for a given set of JavaClass instances. This method assumes that init, preBuildTypeInfo, and postBuildTypeInfo have been called | buildTypeInfo | {
"repo_name": "RallySoftware/eclipselink.runtime",
"path": "moxy/org.eclipse.persistence.moxy/src/org/eclipse/persistence/jaxb/compiler/AnnotationsProcessor.java",
"license": "epl-1.0",
"size": 252702
} | [
"java.util.Map",
"org.eclipse.persistence.exceptions.JAXBException",
"org.eclipse.persistence.jaxb.javamodel.JavaClass",
"org.eclipse.persistence.oxm.XMLNameTransformer",
"org.eclipse.persistence.oxm.annotations.XmlNameTransformer"
] | import java.util.Map; import org.eclipse.persistence.exceptions.JAXBException; import org.eclipse.persistence.jaxb.javamodel.JavaClass; import org.eclipse.persistence.oxm.XMLNameTransformer; import org.eclipse.persistence.oxm.annotations.XmlNameTransformer; | import java.util.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.jaxb.javamodel.*; import org.eclipse.persistence.oxm.*; import org.eclipse.persistence.oxm.annotations.*; | [
"java.util",
"org.eclipse.persistence"
] | java.util; org.eclipse.persistence; | 1,265,771 |
@Test
public void shouldFallBackWhenDatabaseInterfaceIsOfAnOldType() {
String statement = databaseMeta.getDropTableIfExistsStatement( TABLE_NAME );
assertEquals( DROP_STATEMENT_FALLBACK, statement );
} | void function() { String statement = databaseMeta.getDropTableIfExistsStatement( TABLE_NAME ); assertEquals( DROP_STATEMENT_FALLBACK, statement ); } | /**
* Given that the {@link DatabaseInterface} object is of an old type.
* <br/>
* When {@link DatabaseMeta#getDropTableIfExistsStatement(String)} is called,
* then a fallback statement should be returned.
*/ | Given that the <code>DatabaseInterface</code> object is of an old type. When <code>DatabaseMeta#getDropTableIfExistsStatement(String)</code> is called, then a fallback statement should be returned | shouldFallBackWhenDatabaseInterfaceIsOfAnOldType | {
"repo_name": "pavel-sakun/pentaho-kettle",
"path": "core/src/test/java/org/pentaho/di/core/database/DatabaseMetaTest.java",
"license": "apache-2.0",
"size": 17115
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 158,894 |
public static void removeRequiredCapability(File manifestFile, String capability) throws IOException {
Manifest mf = getManifest(manifestFile);
// update the Require-Capability
Attributes attr = mf.getMainAttributes();
String requiredCapability = getManifestRequiredCapability(mf);
Pattern p1 = Pattern.compile(BW6Constants.capabilityPattern);
List<String> capabilities = BW6PackagingConvertor.getCapabilites(requiredCapability);
for (Iterator<String> it = capabilities.iterator(); it.hasNext();) {
String c = (String) it.next();
Matcher m1 = p1.matcher(c);
if (m1.matches()) {
String capabilityType = m1.group(1);
String capabilityFilter = m1.group(2);
Pattern p2 = Pattern.compile(BW6Constants.capabilityFilterPattern);
Matcher m2 = p2.matcher(capabilityFilter);
if (capabilityType.equals("com.tibco.bw.module") && m2.matches()) {
String capabilityName = m2.group(1);
// String capabilityVersion = m2.group(2);
if (capabilityName.equals(capability)) {
it.remove();
}
}
}
}
requiredCapability = StringUtils.join(capabilities, ",");
attr.putValue("Require-Capability", requiredCapability);
//Write the updated file and return the same.
FileOutputStream os = new FileOutputStream(manifestFile);
mf.write(os);
os.close();
} | static void function(File manifestFile, String capability) throws IOException { Manifest mf = getManifest(manifestFile); Attributes attr = mf.getMainAttributes(); String requiredCapability = getManifestRequiredCapability(mf); Pattern p1 = Pattern.compile(BW6Constants.capabilityPattern); List<String> capabilities = BW6PackagingConvertor.getCapabilites(requiredCapability); for (Iterator<String> it = capabilities.iterator(); it.hasNext();) { String c = (String) it.next(); Matcher m1 = p1.matcher(c); if (m1.matches()) { String capabilityType = m1.group(1); String capabilityFilter = m1.group(2); Pattern p2 = Pattern.compile(BW6Constants.capabilityFilterPattern); Matcher m2 = p2.matcher(capabilityFilter); if (capabilityType.equals(STR) && m2.matches()) { String capabilityName = m2.group(1); if (capabilityName.equals(capability)) { it.remove(); } } } } requiredCapability = StringUtils.join(capabilities, ","); attr.putValue(STR, requiredCapability); FileOutputStream os = new FileOutputStream(manifestFile); mf.write(os); os.close(); } | /**
* <p>
* Change the Require-Capability field in a MANIFEST.MF file.
* </p>
* @param oldVersion
*
* @param manifestFile, the MANIFEST.MF file
* @param version, the new version to set
* @throws IOException
* @return true if the version has been changed, false otherwise
*/ | Change the Require-Capability field in a MANIFEST.MF file. | removeRequiredCapability | {
"repo_name": "teecube/tic-bw6",
"path": "src/main/java/t3/tic/bw6/util/ManifestManager.java",
"license": "apache-2.0",
"size": 6168
} | [
"java.io.File",
"java.io.FileOutputStream",
"java.io.IOException",
"java.util.Iterator",
"java.util.List",
"java.util.jar.Attributes",
"java.util.jar.Manifest",
"java.util.regex.Matcher",
"java.util.regex.Pattern",
"org.apache.commons.lang3.StringUtils"
] | import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.jar.Attributes; import java.util.jar.Manifest; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; | import java.io.*; import java.util.*; import java.util.jar.*; import java.util.regex.*; import org.apache.commons.lang3.*; | [
"java.io",
"java.util",
"org.apache.commons"
] | java.io; java.util; org.apache.commons; | 2,364,744 |
public Observable<ServiceResponse<VirtualHubInner>> getByResourceGroupWithServiceResponseAsync(String resourceGroupName, String virtualHubName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (virtualHubName == null) {
throw new IllegalArgumentException("Parameter virtualHubName is required and cannot be null.");
} | Observable<ServiceResponse<VirtualHubInner>> function(String resourceGroupName, String virtualHubName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (virtualHubName == null) { throw new IllegalArgumentException(STR); } | /**
* Retrieves the details of a VirtualHub.
*
* @param resourceGroupName The resource group name of the VirtualHub.
* @param virtualHubName The name of the VirtualHub.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the VirtualHubInner object
*/ | Retrieves the details of a VirtualHub | getByResourceGroupWithServiceResponseAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2018_12_01/src/main/java/com/microsoft/azure/management/network/v2018_12_01/implementation/VirtualHubsInner.java",
"license": "mit",
"size": 72294
} | [
"com.microsoft.rest.ServiceResponse"
] | import com.microsoft.rest.ServiceResponse; | import com.microsoft.rest.*; | [
"com.microsoft.rest"
] | com.microsoft.rest; | 2,850,493 |
@CalledByNative
private void dismiss() {
if (mNameFixFlowPrompt != null) {
mNameFixFlowPrompt.dismiss(DialogDismissalCause.DISMISSED_BY_NATIVE);
}
} | void function() { if (mNameFixFlowPrompt != null) { mNameFixFlowPrompt.dismiss(DialogDismissalCause.DISMISSED_BY_NATIVE); } } | /**
* Dismisses the prompt without returning any user response.
*/ | Dismisses the prompt without returning any user response | dismiss | {
"repo_name": "endlessm/chromium-browser",
"path": "chrome/android/java/src/org/chromium/chrome/browser/autofill/AutofillNameFixFlowBridge.java",
"license": "bsd-3-clause",
"size": 3903
} | [
"org.chromium.ui.modaldialog.DialogDismissalCause"
] | import org.chromium.ui.modaldialog.DialogDismissalCause; | import org.chromium.ui.modaldialog.*; | [
"org.chromium.ui"
] | org.chromium.ui; | 302,688 |
public void configureAttribute(Object bean, Node attribute)
throws LineConfigException
{
String attrName = attribute.getNodeName();
if (attrName.startsWith("xmlns"))
return;
String oldFile = _baseUri;
Thread thread = Thread.currentThread();
ClassLoader oldLoader = thread.getContextClassLoader();
XmlConfigContext oldBuilder = getCurrentBuilder();
try {
setCurrentBuilder(this);
_baseUri = attribute.getBaseURI();
ConfigType<?> type = TypeFactory.getType(bean);
QName qName = ((QAbstractNode) attribute).getQName();
type.beforeConfigure(this, bean, attribute);
configureChildNode(attribute, qName, bean, type, false);
type.afterConfigure(this, bean);
}
catch (LineConfigException e) {
throw e;
}
catch (Exception e) {
throw error(e, attribute);
} finally {
_baseUri = oldFile;
setCurrentBuilder(oldBuilder);
thread.setContextClassLoader(oldLoader);
}
} | void function(Object bean, Node attribute) throws LineConfigException { String attrName = attribute.getNodeName(); if (attrName.startsWith("xmlns")) return; String oldFile = _baseUri; Thread thread = Thread.currentThread(); ClassLoader oldLoader = thread.getContextClassLoader(); XmlConfigContext oldBuilder = getCurrentBuilder(); try { setCurrentBuilder(this); _baseUri = attribute.getBaseURI(); ConfigType<?> type = TypeFactory.getType(bean); QName qName = ((QAbstractNode) attribute).getQName(); type.beforeConfigure(this, bean, attribute); configureChildNode(attribute, qName, bean, type, false); type.afterConfigure(this, bean); } catch (LineConfigException e) { throw e; } catch (Exception e) { throw error(e, attribute); } finally { _baseUri = oldFile; setCurrentBuilder(oldBuilder); thread.setContextClassLoader(oldLoader); } } | /**
* External call to configure a bean's attribute.
*
* @param bean the bean to be configured
* @param attribute the node representing the configured attribute
* @throws LineConfigException
*/ | External call to configure a bean's attribute | configureAttribute | {
"repo_name": "christianchristensen/resin",
"path": "modules/kernel/src/com/caucho/config/xml/XmlConfigContext.java",
"license": "gpl-2.0",
"size": 31509
} | [
"com.caucho.config.LineConfigException",
"com.caucho.config.type.ConfigType",
"com.caucho.config.type.TypeFactory",
"com.caucho.xml.QAbstractNode",
"com.caucho.xml.QName",
"org.w3c.dom.Node"
] | import com.caucho.config.LineConfigException; import com.caucho.config.type.ConfigType; import com.caucho.config.type.TypeFactory; import com.caucho.xml.QAbstractNode; import com.caucho.xml.QName; import org.w3c.dom.Node; | import com.caucho.config.*; import com.caucho.config.type.*; import com.caucho.xml.*; import org.w3c.dom.*; | [
"com.caucho.config",
"com.caucho.xml",
"org.w3c.dom"
] | com.caucho.config; com.caucho.xml; org.w3c.dom; | 1,817,330 |
@Override
public String update() {
String param = this.getStrArgumentValue("PARAM_NAME");
if (!param.isEmpty()) {
String sVal = this.getStrArgumentValue("PARAM_STRVALUE");
int iVal = this.getIntArgumentValue("PARAM_INTVALUE");
// DB update
IEntity entity = this.getBOFactory().getEntity("APP_PARAMS");
entity.reset();
entity.field("PARAM_NAME").setKeyValue(param);
entity.field("PARAM_LABEL").setValue(this.getIntArgumentValue("PARAM_LABEL"));
entity.field("PARAM_DISPLAY").setValue(this.getStrArgumentValue("PARAM_DISPLAY"));
entity.field("PARAM_STRVALUE").setValue(sVal);
entity.field("PARAM_INTVALUE").setValue(iVal);
entity.update();
}
return this.list();
}
| String function() { String param = this.getStrArgumentValue(STR); if (!param.isEmpty()) { String sVal = this.getStrArgumentValue(STR); int iVal = this.getIntArgumentValue(STR); IEntity entity = this.getBOFactory().getEntity(STR); entity.reset(); entity.field(STR).setKeyValue(param); entity.field(STR).setValue(this.getIntArgumentValue(STR)); entity.field(STR).setValue(this.getStrArgumentValue(STR)); entity.field(STR).setValue(sVal); entity.field(STR).setValue(iVal); entity.update(); } return this.list(); } | /**
* update a parameter into the DB
* @return
*/ | update a parameter into the DB | update | {
"repo_name": "caylabenoit/dgm",
"path": "src/java/com/dgm/form/admin/ParametersAction.java",
"license": "gpl-3.0",
"size": 3471
} | [
"com.joy.bo.IEntity"
] | import com.joy.bo.IEntity; | import com.joy.bo.*; | [
"com.joy.bo"
] | com.joy.bo; | 33,030 |
@Override public void exitMulDivAddSub(@NotNull InfixParser.MulDivAddSubContext ctx) { } | @Override public void exitMulDivAddSub(@NotNull InfixParser.MulDivAddSubContext ctx) { } | /**
* {@inheritDoc}
* <p/>
* The default implementation does nothing.
*/ | The default implementation does nothing | enterMulDivAddSub | {
"repo_name": "PulfordJ/small-compiler",
"path": "src/generated/java/InfixBaseListener.java",
"license": "gpl-2.0",
"size": 11441
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 2,286,901 |
public NamedOperatorHLAPI getContainerNamedOperatorHLAPI() {
if (item.getContainerNamedOperator() == null)
return null;
return new NamedOperatorHLAPI(item.getContainerNamedOperator());
} | NamedOperatorHLAPI function() { if (item.getContainerNamedOperator() == null) return null; return new NamedOperatorHLAPI(item.getContainerNamedOperator()); } | /**
* This accessor automatically encapsulate an element of the current object.
* WARNING : this creates a new object in memory.
*
* @return : null if the element is null
*/ | This accessor automatically encapsulate an element of the current object. WARNING : this creates a new object in memory | getContainerNamedOperatorHLAPI | {
"repo_name": "lhillah/pnmlframework",
"path": "pnmlFw-PT-HLPNG/src/fr/lip6/move/pnml/pthlpng/integers/hlapi/SubtractionHLAPI.java",
"license": "epl-1.0",
"size": 69869
} | [
"fr.lip6.move.pnml.pthlpng.terms.hlapi.NamedOperatorHLAPI"
] | import fr.lip6.move.pnml.pthlpng.terms.hlapi.NamedOperatorHLAPI; | import fr.lip6.move.pnml.pthlpng.terms.hlapi.*; | [
"fr.lip6.move"
] | fr.lip6.move; | 2,054,483 |
private static void activateOneNode() {
if(DO_HOBX_GROWTH) {
// Add the node with the most connections to active nodes
int maxConns = 0;
double decider = 0;
Node best = null;
for(int i=0;i<nodes.length;i++) {
Node n = nodes[i];
if(n.active) continue;
int conns = n.countConnectionsToActiveNodes();
double myDecider = r.nextDouble();
if(conns > maxConns ||
(conns == maxConns && myDecider > decider)) {
decider = myDecider;
maxConns = conns;
best = n;
}
}
best.activate();
return;
}
if(DO_RANDOMIZED_HOBX_GROWTH) {
Vector v = new Vector();
for(int i=0;i<nodes.length;i++) {
Node n = nodes[i];
if(n.active) continue;
int conns = n.countConnectionsToActiveNodes();
for(int j=0;j<conns;j++)
v.add(n);
}
Node n = (Node) (v.get(r.nextInt(v.size())));
n.activate();
return;
}
if(!DO_SILLY_GROWTH) {
if(Main.DO_QUEUE_GROWTH) {
while(true) {
// Pop bottom node
Node n = (Node) borderQueue.removeFirst();
if(n.active) continue;
n.activate();
break;
}
} else {
// Pick a random border node and activate it
Node[] borders = (Node[]) borderNodes.toArray(new Node[borderNodes.size()]);
Node randomNode = borders[r.nextInt(borders.length)];
if(randomNode.active) throw new IllegalStateException("Node active while activating one node");
randomNode.activate();
}
} else {
lastActivated++;
nodes[lastActivated].activate();
}
if(DO_VARIABLE_HTL) {
double l = Math.log(activeNodes.size())/Math.log(2.0);
INSERT_HTL = FETCH_HTL = (int) l;
//(int) (l*l/4);
System.err.println("Setting HTL to "+INSERT_HTL);
}
}
static int lastActivated = 0;
static long grandTotalHops = 0;
static long totalCyclingTime = 0; | static void function() { if(DO_HOBX_GROWTH) { int maxConns = 0; double decider = 0; Node best = null; for(int i=0;i<nodes.length;i++) { Node n = nodes[i]; if(n.active) continue; int conns = n.countConnectionsToActiveNodes(); double myDecider = r.nextDouble(); if(conns > maxConns (conns == maxConns && myDecider > decider)) { decider = myDecider; maxConns = conns; best = n; } } best.activate(); return; } if(DO_RANDOMIZED_HOBX_GROWTH) { Vector v = new Vector(); for(int i=0;i<nodes.length;i++) { Node n = nodes[i]; if(n.active) continue; int conns = n.countConnectionsToActiveNodes(); for(int j=0;j<conns;j++) v.add(n); } Node n = (Node) (v.get(r.nextInt(v.size()))); n.activate(); return; } if(!DO_SILLY_GROWTH) { if(Main.DO_QUEUE_GROWTH) { while(true) { Node n = (Node) borderQueue.removeFirst(); if(n.active) continue; n.activate(); break; } } else { Node[] borders = (Node[]) borderNodes.toArray(new Node[borderNodes.size()]); Node randomNode = borders[r.nextInt(borders.length)]; if(randomNode.active) throw new IllegalStateException(STR); randomNode.activate(); } } else { lastActivated++; nodes[lastActivated].activate(); } if(DO_VARIABLE_HTL) { double l = Math.log(activeNodes.size())/Math.log(2.0); INSERT_HTL = FETCH_HTL = (int) l; System.err.println(STR+INSERT_HTL); } } static int lastActivated = 0; static long grandTotalHops = 0; static long totalCyclingTime = 0; | /**
* Activate a single node.
*/ | Activate a single node | activateOneNode | {
"repo_name": "freenet/legacy",
"path": "src/freenet/node/simulator/whackysim/Main.java",
"license": "gpl-2.0",
"size": 50337
} | [
"java.util.Vector"
] | import java.util.Vector; | import java.util.*; | [
"java.util"
] | java.util; | 2,187,400 |
@SuppressWarnings({"ResultOfMethodCallIgnored", "ConstantConditions"})
public void testBlockMetrics() throws Exception {
IgfsEx igfs = (IgfsEx)igfsPrimary[0];
IgfsPath fileRemote = new IgfsPath("/fileRemote");
IgfsPath file1 = new IgfsPath("/primary/file1");
IgfsPath file2 = new IgfsPath("/primary/file2");
// Create remote file and write some data to it.
IgfsOutputStream out = igfsSecondary.create(fileRemote, 256, true, null, 1, 256, null);
int rmtBlockSize = igfsSecondary.info(fileRemote).blockSize();
out.write(new byte[rmtBlockSize]);
out.close();
// Start metrics measuring.
IgfsMetrics initMetrics = igfs.metrics();
// Create empty file.
igfs.create(file1, 256, true, null, 1, 256, null).close();
int blockSize = igfs.info(file1).blockSize();
checkBlockMetrics(initMetrics, igfs.metrics(), 0, 0, 0, 0, 0, 0);
// Write two blocks to the file.
IgfsOutputStream os = igfs.append(file1, false);
os.write(new byte[blockSize * 2]);
os.close();
checkBlockMetrics(initMetrics, igfs.metrics(), 0, 0, 0, 2, 0, blockSize * 2);
// Write one more file (one block).
os = igfs.create(file2, 256, true, null, 1, 256, null);
os.write(new byte[blockSize]);
os.close();
checkBlockMetrics(initMetrics, igfs.metrics(), 0, 0, 0, 3, 0, blockSize * 3);
// Read data from the first file.
IgfsInputStream is = igfs.open(file1);
is.readFully(0, new byte[blockSize * 2]);
is.close();
checkBlockMetrics(initMetrics, igfs.metrics(), 2, 0, blockSize * 2, 3, 0, blockSize * 3);
// Read data from the second file with hits.
is = igfs.open(file2);
is.read(new byte[blockSize]);
is.close();
checkBlockMetrics(initMetrics, igfs.metrics(), 3, 0, blockSize * 3, 3, 0, blockSize * 3);
// Clear the first file.
igfs.create(file1, true).close();
checkBlockMetrics(initMetrics, igfs.metrics(), 3, 0, blockSize * 3, 3, 0, blockSize * 3);
// Delete the second file.
igfs.delete(file2, false);
checkBlockMetrics(initMetrics, igfs.metrics(), 3, 0, blockSize * 3, 3, 0, blockSize * 3);
// Read remote file.
is = igfs.open(fileRemote);
is.read(new byte[rmtBlockSize]);
is.close();
checkBlockMetrics(initMetrics, igfs.metrics(), 4, 1, blockSize * 3 + rmtBlockSize, 3, 0, blockSize * 3);
// Lets wait for blocks will be placed to cache
U.sleep(300);
// Read remote file again.
is = igfs.open(fileRemote);
is.read(new byte[rmtBlockSize]);
is.close();
checkBlockMetrics(initMetrics, igfs.metrics(), 5, 1, blockSize * 3 + rmtBlockSize * 2, 3, 0, blockSize * 3);
IgfsMetrics metrics = igfs.metrics();
assert metrics.secondarySpaceSize() == rmtBlockSize;
// Write some data to the file working in DUAL mode.
os = igfs.append(fileRemote, false);
os.write(new byte[rmtBlockSize]);
os.close();
// Additional block read here due to file ending synchronization.
checkBlockMetrics(initMetrics, igfs.metrics(), 5, 1, blockSize * 3 + rmtBlockSize * 2, 4, 1,
blockSize * 3 + rmtBlockSize);
metrics = igfs.metrics();
assert metrics.secondarySpaceSize() == rmtBlockSize * 2;
igfs.delete(fileRemote, false);
U.sleep(300);
assert igfs.metrics().secondarySpaceSize() == 0;
// Write partial block to the first file.
os = igfs.append(file1, false);
os.write(new byte[blockSize / 2]);
os.close();
checkBlockMetrics(initMetrics, igfs.metrics(), 5, 1, blockSize * 3 + rmtBlockSize * 2, 5, 1,
blockSize * 7 / 2 + rmtBlockSize);
igfs.resetMetrics();
metrics = igfs.metrics();
assert metrics.blocksReadTotal() == 0;
assert metrics.blocksReadRemote() == 0;
assert metrics.blocksWrittenTotal() == 0;
assert metrics.blocksWrittenRemote() == 0;
assert metrics.bytesRead() == 0;
assert metrics.bytesReadTime() == 0;
assert metrics.bytesWritten() == 0;
assert metrics.bytesWriteTime() == 0;
} | @SuppressWarnings({STR, STR}) void function() throws Exception { IgfsEx igfs = (IgfsEx)igfsPrimary[0]; IgfsPath fileRemote = new IgfsPath(STR); IgfsPath file1 = new IgfsPath(STR); IgfsPath file2 = new IgfsPath(STR); IgfsOutputStream out = igfsSecondary.create(fileRemote, 256, true, null, 1, 256, null); int rmtBlockSize = igfsSecondary.info(fileRemote).blockSize(); out.write(new byte[rmtBlockSize]); out.close(); IgfsMetrics initMetrics = igfs.metrics(); igfs.create(file1, 256, true, null, 1, 256, null).close(); int blockSize = igfs.info(file1).blockSize(); checkBlockMetrics(initMetrics, igfs.metrics(), 0, 0, 0, 0, 0, 0); IgfsOutputStream os = igfs.append(file1, false); os.write(new byte[blockSize * 2]); os.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 0, 0, 0, 2, 0, blockSize * 2); os = igfs.create(file2, 256, true, null, 1, 256, null); os.write(new byte[blockSize]); os.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 0, 0, 0, 3, 0, blockSize * 3); IgfsInputStream is = igfs.open(file1); is.readFully(0, new byte[blockSize * 2]); is.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 2, 0, blockSize * 2, 3, 0, blockSize * 3); is = igfs.open(file2); is.read(new byte[blockSize]); is.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 3, 0, blockSize * 3, 3, 0, blockSize * 3); igfs.create(file1, true).close(); checkBlockMetrics(initMetrics, igfs.metrics(), 3, 0, blockSize * 3, 3, 0, blockSize * 3); igfs.delete(file2, false); checkBlockMetrics(initMetrics, igfs.metrics(), 3, 0, blockSize * 3, 3, 0, blockSize * 3); is = igfs.open(fileRemote); is.read(new byte[rmtBlockSize]); is.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 4, 1, blockSize * 3 + rmtBlockSize, 3, 0, blockSize * 3); U.sleep(300); is = igfs.open(fileRemote); is.read(new byte[rmtBlockSize]); is.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 5, 1, blockSize * 3 + rmtBlockSize * 2, 3, 0, blockSize * 3); IgfsMetrics metrics = igfs.metrics(); assert metrics.secondarySpaceSize() == rmtBlockSize; os = igfs.append(fileRemote, false); os.write(new byte[rmtBlockSize]); os.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 5, 1, blockSize * 3 + rmtBlockSize * 2, 4, 1, blockSize * 3 + rmtBlockSize); metrics = igfs.metrics(); assert metrics.secondarySpaceSize() == rmtBlockSize * 2; igfs.delete(fileRemote, false); U.sleep(300); assert igfs.metrics().secondarySpaceSize() == 0; os = igfs.append(file1, false); os.write(new byte[blockSize / 2]); os.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 5, 1, blockSize * 3 + rmtBlockSize * 2, 5, 1, blockSize * 7 / 2 + rmtBlockSize); igfs.resetMetrics(); metrics = igfs.metrics(); assert metrics.blocksReadTotal() == 0; assert metrics.blocksReadRemote() == 0; assert metrics.blocksWrittenTotal() == 0; assert metrics.blocksWrittenRemote() == 0; assert metrics.bytesRead() == 0; assert metrics.bytesReadTime() == 0; assert metrics.bytesWritten() == 0; assert metrics.bytesWriteTime() == 0; } | /**
* Test block metrics.
*
* @throws Exception If failed.
*/ | Test block metrics | testBlockMetrics | {
"repo_name": "alexzaitzev/ignite",
"path": "modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsMetricsSelfTest.java",
"license": "apache-2.0",
"size": 18216
} | [
"org.apache.ignite.igfs.IgfsInputStream",
"org.apache.ignite.igfs.IgfsMetrics",
"org.apache.ignite.igfs.IgfsOutputStream",
"org.apache.ignite.igfs.IgfsPath",
"org.apache.ignite.internal.util.typedef.internal.U"
] | import org.apache.ignite.igfs.IgfsInputStream; import org.apache.ignite.igfs.IgfsMetrics; import org.apache.ignite.igfs.IgfsOutputStream; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.util.typedef.internal.U; | import org.apache.ignite.igfs.*; import org.apache.ignite.internal.util.typedef.internal.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 681,680 |
public SystemOptions getUniversityFiscal() {
return universityFiscal;
}
| SystemOptions function() { return universityFiscal; } | /**
* Gets the universityFiscal attribute.
*
* @return Returns the universityFiscal
*/ | Gets the universityFiscal attribute | getUniversityFiscal | {
"repo_name": "ua-eas/ua-kfs-5.3",
"path": "work/src/org/kuali/kfs/coa/businessobject/ObjectCodeGlobal.java",
"license": "agpl-3.0",
"size": 25050
} | [
"org.kuali.kfs.sys.businessobject.SystemOptions"
] | import org.kuali.kfs.sys.businessobject.SystemOptions; | import org.kuali.kfs.sys.businessobject.*; | [
"org.kuali.kfs"
] | org.kuali.kfs; | 210,978 |
@Override
public PurgeState cancelPurge(EzSecurityToken userToken, long purgeId) throws TException {
HashMap<String, String> auditArgs = Maps.newHashMap();
auditArgs.put("action", "cancelPurge");
auditArgs.put("purgeId", Long.toString(purgeId));
auditLog(userToken, AuditEventType.FileObjectDelete, auditArgs);
validateEzCentralPurgeSecurityId(userToken);
PurgeState state = purgeStatus(userToken, purgeId); // TODO
state.setCancelStatus(CancelStatus.CANNOT_CANCEL);
state.setTimeStamp(TimeUtil.convertToThriftDateTime(System.currentTimeMillis()));
insertPurgeStatus(state, new Visibility().setFormalVisibility(userToken.getAuthorizationLevel()), userToken);
return state;
} | PurgeState function(EzSecurityToken userToken, long purgeId) throws TException { HashMap<String, String> auditArgs = Maps.newHashMap(); auditArgs.put(STR, STR); auditArgs.put(STR, Long.toString(purgeId)); auditLog(userToken, AuditEventType.FileObjectDelete, auditArgs); validateEzCentralPurgeSecurityId(userToken); PurgeState state = purgeStatus(userToken, purgeId); state.setCancelStatus(CancelStatus.CANNOT_CANCEL); state.setTimeStamp(TimeUtil.convertToThriftDateTime(System.currentTimeMillis())); insertPurgeStatus(state, new Visibility().setFormalVisibility(userToken.getAuthorizationLevel()), userToken); return state; } | /**
* Always returns a {@link CancelStatus#CANNOT_CANCEL} status and does
* not cancel previously started purges from ezElastic.
*/ | Always returns a <code>CancelStatus#CANNOT_CANCEL</code> status and does not cancel previously started purges from ezElastic | cancelPurge | {
"repo_name": "infochimps-forks/ezbake-platform-services",
"path": "search-ssr/src/main/java/ezbake/services/search/SSRServiceHandler.java",
"license": "apache-2.0",
"size": 95670
} | [
"com.google.common.collect.Maps",
"java.util.HashMap",
"org.apache.thrift.TException"
] | import com.google.common.collect.Maps; import java.util.HashMap; import org.apache.thrift.TException; | import com.google.common.collect.*; import java.util.*; import org.apache.thrift.*; | [
"com.google.common",
"java.util",
"org.apache.thrift"
] | com.google.common; java.util; org.apache.thrift; | 1,345,620 |
public final Iterator<V> iterator() {
return new ValueIterator<K,V>(map);
} | final Iterator<V> function() { return new ValueIterator<K,V>(map); } | /**
* Returns a "weakly consistent" iterator that will never
* throw {@link java.util.ConcurrentModificationException}, and
* guarantees to traverse elements as they existed upon
* construction of the iterator, and may (but is not
* guaranteed to) reflect any modifications subsequent to
* construction.
*
* @return an iterator over the values of this map
*/ | Returns a "weakly consistent" iterator that will never throw <code>java.util.ConcurrentModificationException</code>, and guarantees to traverse elements as they existed upon construction of the iterator, and may (but is not guaranteed to) reflect any modifications subsequent to construction | iterator | {
"repo_name": "menacher/netty",
"path": "common/src/main/java/io/netty/util/internal/chmv8/ConcurrentHashMapV8.java",
"license": "apache-2.0",
"size": 284179
} | [
"java.util.Iterator"
] | import java.util.Iterator; | import java.util.*; | [
"java.util"
] | java.util; | 1,694,567 |
static void internalSetInstance(final String config)
{
final XmlObjectReader reader = getReader();
final ConfigXml cfg = (ConfigXml) reader.read(config);
instance = new ConfigXml();
copyDeclaredFields(null, instance.getClass(), cfg, instance);
} | static void internalSetInstance(final String config) { final XmlObjectReader reader = getReader(); final ConfigXml cfg = (ConfigXml) reader.read(config); instance = new ConfigXml(); copyDeclaredFields(null, instance.getClass(), cfg, instance); } | /**
* For test cases.
* @param config
*/ | For test cases | internalSetInstance | {
"repo_name": "developerleo/ProjectForge-2nd",
"path": "src/main/java/org/projectforge/core/ConfigXml.java",
"license": "gpl-3.0",
"size": 36437
} | [
"org.projectforge.xml.stream.XmlObjectReader"
] | import org.projectforge.xml.stream.XmlObjectReader; | import org.projectforge.xml.stream.*; | [
"org.projectforge.xml"
] | org.projectforge.xml; | 2,021,727 |
public boolean hasLogImages() {
return CollectionUtils.isNotEmpty(logImages);
} | boolean function() { return CollectionUtils.isNotEmpty(logImages); } | /**
* Check if current LogType has Images.
* Check if current {@link LogType} has {@link Image}.
*
* @return
* {@code true} if {@link LogType} has images
*/ | Check if current LogType has Images. Check if current <code>LogType</code> has <code>Image</code> | hasLogImages | {
"repo_name": "Bananeweizen/cgeo",
"path": "main/src/cgeo/geocaching/log/LogEntry.java",
"license": "apache-2.0",
"size": 14198
} | [
"org.apache.commons.collections4.CollectionUtils"
] | import org.apache.commons.collections4.CollectionUtils; | import org.apache.commons.collections4.*; | [
"org.apache.commons"
] | org.apache.commons; | 879,202 |
@Override
public void show(){
if(!isLocked) {
if (title.getText().length() == 0) title.setVisibility(View.GONE);
if (this.imageBitmap == null) image.setVisibility(View.GONE);
if(allowAnimation) {
postiveButton.hide(false);
negativeButton.hide(false);
cameraButton.hide(false);
int delay = startControlAnimationDelay;
Handler handler = new Handler();
for (int i = -1; i < 2; i++) {
final int finalI = i; | void function(){ if(!isLocked) { if (title.getText().length() == 0) title.setVisibility(View.GONE); if (this.imageBitmap == null) image.setVisibility(View.GONE); if(allowAnimation) { postiveButton.hide(false); negativeButton.hide(false); cameraButton.hide(false); int delay = startControlAnimationDelay; Handler handler = new Handler(); for (int i = -1; i < 2; i++) { final int finalI = i; | /**
* Show the dialog.
*/ | Show the dialog | show | {
"repo_name": "Minitour/crofis-android-uikit",
"path": "ui/src/main/java/net/crofis/ui/dialog/NewMessageDialog.java",
"license": "apache-2.0",
"size": 16445
} | [
"android.os.Handler",
"android.view.View"
] | import android.os.Handler; import android.view.View; | import android.os.*; import android.view.*; | [
"android.os",
"android.view"
] | android.os; android.view; | 2,625,631 |
private Future<Void> deleteIsARelations(final URI uri) {
return ExecUtils.nonUIAsyncExec((Callable<Void>) () -> {
List<String> relationIds = AxialCodingComposite.this.jointjs
.getConnectedPermanentLinks(uri.toString()).get();
for (String relationId : relationIds) {
AxialCodingComposite.this.jointjs.remove(relationId).get();
}
return null;
});
} | Future<Void> function(final URI uri) { return ExecUtils.nonUIAsyncExec((Callable<Void>) () -> { List<String> relationIds = AxialCodingComposite.this.jointjs .getConnectedPermanentLinks(uri.toString()).get(); for (String relationId : relationIds) { AxialCodingComposite.this.jointjs.remove(relationId).get(); } return null; }); } | /**
* Removes all incoming and outgoing "is a" permanent relations of the
* specified {@link URI}.
*
* @return
*
* @NonUIThread
*/ | Removes all incoming and outgoing "is a" permanent relations of the specified <code>URI</code> | deleteIsARelations | {
"repo_name": "bkahlert/api-usability-analyzer",
"path": "de.fu_berlin.imp.apiua.groundedtheory/src/de/fu_berlin/imp/apiua/groundedtheory/views/AxialCodingComposite.java",
"license": "mit",
"size": 27856
} | [
"com.bkahlert.nebula.utils.ExecUtils",
"java.util.List",
"java.util.concurrent.Callable",
"java.util.concurrent.Future"
] | import com.bkahlert.nebula.utils.ExecUtils; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.Future; | import com.bkahlert.nebula.utils.*; import java.util.*; import java.util.concurrent.*; | [
"com.bkahlert.nebula",
"java.util"
] | com.bkahlert.nebula; java.util; | 1,208,952 |
public String[] getOptions() {
Vector result;
String[] options;
int i;
result = new Vector();
options = super.getOptions();
for (i = 0; i < options.length; i++)
result.add(options[i]);
result.add("-a");
result.add("" + getNumAttributes());
result.add("-c");
result.add("" + getNumClasses());
result.add("-N");
result.add("" + getNumNumeric());
result.add("-I");
result.add("" + getNumIrrelevant());
result.add("-M");
result.add("" + getMinRuleSize());
result.add("-R");
result.add("" + getMaxRuleSize());
if (getVoteFlag())
result.add("-V");
return (String[]) result.toArray(new String[result.size()]);
} | String[] function() { Vector result; String[] options; int i; result = new Vector(); options = super.getOptions(); for (i = 0; i < options.length; i++) result.add(options[i]); result.add("-a"); result.add(STR-cSTRSTR-NSTRSTR-ISTRSTR-MSTRSTR-RSTRSTR-V"); return (String[]) result.toArray(new String[result.size()]); } | /**
* Gets the current settings of the datagenerator RDG1.
*
* @return an array of strings suitable for passing to setOptions
*/ | Gets the current settings of the datagenerator RDG1 | getOptions | {
"repo_name": "dsibournemouth/autoweka",
"path": "weka-3.7.7/src/main/java/weka/datagenerators/classifiers/classification/RDG1.java",
"license": "gpl-3.0",
"size": 34983
} | [
"java.util.Vector"
] | import java.util.Vector; | import java.util.*; | [
"java.util"
] | java.util; | 2,224,616 |
public GridTcpRouterConfiguration setSecurityCredentialsProvider(SecurityCredentialsProvider credsProvider) {
this.credsProvider = credsProvider;
return this;
} | GridTcpRouterConfiguration function(SecurityCredentialsProvider credsProvider) { this.credsProvider = credsProvider; return this; } | /**
* Sets credentials provider for grid access.
*
* @param credsProvider Credentials provider.
* @return {@code this} for chaining.
*/ | Sets credentials provider for grid access | setSecurityCredentialsProvider | {
"repo_name": "samaitra/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/client/router/GridTcpRouterConfiguration.java",
"license": "apache-2.0",
"size": 10129
} | [
"org.apache.ignite.plugin.security.SecurityCredentialsProvider"
] | import org.apache.ignite.plugin.security.SecurityCredentialsProvider; | import org.apache.ignite.plugin.security.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 1,100,055 |
public String getItemImageUrl(final TreeItemImage item, final String itemId) {
if (item == null) {
return null;
}
// Check if has image url
String url = item.getUrl();
if (!Util.empty(url)) {
return url;
}
// Check if has image
Image image = item.getImage();
if (image == null) {
return null;
}
// Check static resource
if (image instanceof InternalResource) {
return ((InternalResource) image).getTargetUrl();
}
// Build targetted url
Environment env = getEnvironment();
Map<String, String> parameters = env.getHiddenParameters();
parameters.put(Environment.TARGET_ID, getTargetId());
String cacheKey = item.getImageCacheKey();
if (Util.empty(cacheKey)) {
// Add some randomness to the URL to prevent caching
String random = WebUtilities.generateRandom();
parameters.put(Environment.UNIQUE_RANDOM_PARAM, random);
} else {
// Remove step counter as not required for cached content
parameters.remove(Environment.STEP_VARIABLE);
parameters.remove(Environment.SESSION_TOKEN_VARIABLE);
// Add the cache key
parameters.put(Environment.CONTENT_CACHE_KEY, cacheKey);
}
// Item id
parameters.put(ITEM_REQUEST_KEY, itemId);
// The targetable path needs to be configured for the portal environment.
url = env.getWServletPath();
// Note the last parameter. In javascript we don't want to encode "&".
return WebUtilities.getPath(url, parameters, true);
}
| String function(final TreeItemImage item, final String itemId) { if (item == null) { return null; } String url = item.getUrl(); if (!Util.empty(url)) { return url; } Image image = item.getImage(); if (image == null) { return null; } if (image instanceof InternalResource) { return ((InternalResource) image).getTargetUrl(); } Environment env = getEnvironment(); Map<String, String> parameters = env.getHiddenParameters(); parameters.put(Environment.TARGET_ID, getTargetId()); String cacheKey = item.getImageCacheKey(); if (Util.empty(cacheKey)) { String random = WebUtilities.generateRandom(); parameters.put(Environment.UNIQUE_RANDOM_PARAM, random); } else { parameters.remove(Environment.STEP_VARIABLE); parameters.remove(Environment.SESSION_TOKEN_VARIABLE); parameters.put(Environment.CONTENT_CACHE_KEY, cacheKey); } parameters.put(ITEM_REQUEST_KEY, itemId); url = env.getWServletPath(); return WebUtilities.getPath(url, parameters, true); } | /**
* Retrieves a URL for the tree item image.
* <p>
* This method is used by the WTree Renderer.
* </p>
*
* @param item the tree item
* @param itemId the tree item id
* @return the URL to access the tree item image.
*/ | Retrieves a URL for the tree item image. This method is used by the WTree Renderer. | getItemImageUrl | {
"repo_name": "Joshua-Barclay/wcomponents",
"path": "wcomponents-core/src/main/java/com/github/bordertech/wcomponents/WTree.java",
"license": "gpl-3.0",
"size": 39637
} | [
"com.github.bordertech.wcomponents.util.Util",
"java.util.Map"
] | import com.github.bordertech.wcomponents.util.Util; import java.util.Map; | import com.github.bordertech.wcomponents.util.*; import java.util.*; | [
"com.github.bordertech",
"java.util"
] | com.github.bordertech; java.util; | 2,073,674 |
@Override
public void paintComponent(Graphics g) {
super.paintComponent(g);
// Calculating correct starting point.
calcStartingPoint();
// Do drawing.
doDrawing(g);
} | void function(Graphics g) { super.paintComponent(g); calcStartingPoint(); doDrawing(g); } | /**
* Overriding this so we can draw our own Graphic.
*/ | Overriding this so we can draw our own Graphic | paintComponent | {
"repo_name": "asmailov/IMG_Encryption",
"path": "src/GUI/DrawPanel.java",
"license": "mit",
"size": 15549
} | [
"java.awt.Graphics"
] | import java.awt.Graphics; | import java.awt.*; | [
"java.awt"
] | java.awt; | 876,277 |
@Override
protected AWSNetworkFirewall build(AwsSyncClientParams params) {
return new AWSNetworkFirewallClient(params);
} | AWSNetworkFirewall function(AwsSyncClientParams params) { return new AWSNetworkFirewallClient(params); } | /**
* Construct a synchronous implementation of AWSNetworkFirewall using the current builder configuration.
*
* @param params
* Current builder configuration represented as a parameter object.
* @return Fully configured implementation of AWSNetworkFirewall.
*/ | Construct a synchronous implementation of AWSNetworkFirewall using the current builder configuration | build | {
"repo_name": "aws/aws-sdk-java",
"path": "aws-java-sdk-networkfirewall/src/main/java/com/amazonaws/services/networkfirewall/AWSNetworkFirewallClientBuilder.java",
"license": "apache-2.0",
"size": 2416
} | [
"com.amazonaws.client.AwsSyncClientParams"
] | import com.amazonaws.client.AwsSyncClientParams; | import com.amazonaws.client.*; | [
"com.amazonaws.client"
] | com.amazonaws.client; | 2,914,966 |
public static void recycleElementMap(Map<?, ?> elementMap) {
if (elementMap instanceof LinkedHashMap) {
elementMap.clear();
RecycleUtils.recycle(elementMap);
}
}
| static void function(Map<?, ?> elementMap) { if (elementMap instanceof LinkedHashMap) { elementMap.clear(); RecycleUtils.recycle(elementMap); } } | /**
* Recycle a map returned by {@link #getElementsForLifecycle(LifecycleElement, String)}.
*
* @param elementMap map to recycle
*/ | Recycle a map returned by <code>#getElementsForLifecycle(LifecycleElement, String)</code> | recycleElementMap | {
"repo_name": "ricepanda/rice-git3",
"path": "rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/uif/lifecycle/ViewLifecycleUtils.java",
"license": "apache-2.0",
"size": 29363
} | [
"java.util.LinkedHashMap",
"java.util.Map",
"org.kuali.rice.krad.uif.util.RecycleUtils"
] | import java.util.LinkedHashMap; import java.util.Map; import org.kuali.rice.krad.uif.util.RecycleUtils; | import java.util.*; import org.kuali.rice.krad.uif.util.*; | [
"java.util",
"org.kuali.rice"
] | java.util; org.kuali.rice; | 2,384,810 |
public static boolean isFunctionDeclaration(Node n) {
// Note: There is currently one case where an unnamed function has a declaration parent.
// `export default function() {...}`
// In this case we consider the function to be an expression.
return n.isFunction() && isDeclarationParent(n.getParent()) && isNamedFunction(n);
}
/**
* Is this node a class or object literal member function?
*
* <p>examples:
*
* <pre><code>
* class C {
* f() {}
* get x() { return this.x_; }
* set x(v) { this.x_ = v; }
* [someExpr]() {}
* }
* obj = {
* f() {}
* get x() { return this.x_; }
* set x(v) { this.x_ = v; }
* [someExpr]() {}
* } | static boolean function(Node n) { return n.isFunction() && isDeclarationParent(n.getParent()) && isNamedFunction(n); } /** * Is this node a class or object literal member function? * * <p>examples: * * <pre><code> * class C { * f() {} * get x() { return this.x_; } * set x(v) { this.x_ = v; } * [someExpr]() {} * } * obj = { * f() {} * get x() { return this.x_; } * set x(v) { this.x_ = v; } * [someExpr]() {} * } | /**
* Is this node a function declaration? A function declaration is a function that has a name that
* is added to the current scope (i.e. a function that is not part of a expression; see {@link
* #isFunctionExpression}).
*/ | Is this node a function declaration? A function declaration is a function that has a name that is added to the current scope (i.e. a function that is not part of a expression; see <code>#isFunctionExpression</code>) | isFunctionDeclaration | {
"repo_name": "monetate/closure-compiler",
"path": "src/com/google/javascript/jscomp/NodeUtil.java",
"license": "apache-2.0",
"size": 186190
} | [
"com.google.javascript.rhino.Node"
] | import com.google.javascript.rhino.Node; | import com.google.javascript.rhino.*; | [
"com.google.javascript"
] | com.google.javascript; | 2,606,103 |
protected Set<Pair<Integer,AttributeMetaData>> getExcludedAttributesMD(ExampleSetMetaData esm1, ExampleSetMetaData esm2) throws OperatorException {
return new HashSet<Pair<Integer,AttributeMetaData>>();
} | Set<Pair<Integer,AttributeMetaData>> function(ExampleSetMetaData esm1, ExampleSetMetaData esm2) throws OperatorException { return new HashSet<Pair<Integer,AttributeMetaData>>(); } | /**
* Returns a set of original attributes which will not be copied to the output example set.
* The default implementation returns an empty set.
*/ | Returns a set of original attributes which will not be copied to the output example set. The default implementation returns an empty set | getExcludedAttributesMD | {
"repo_name": "rapidminer/rapidminer",
"path": "src/com/rapidminer/operator/preprocessing/join/AbstractExampleSetJoin.java",
"license": "agpl-3.0",
"size": 18497
} | [
"com.rapidminer.operator.OperatorException",
"com.rapidminer.operator.ports.metadata.AttributeMetaData",
"com.rapidminer.operator.ports.metadata.ExampleSetMetaData",
"com.rapidminer.tools.container.Pair",
"java.util.HashSet",
"java.util.Set"
] | import com.rapidminer.operator.OperatorException; import com.rapidminer.operator.ports.metadata.AttributeMetaData; import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; import com.rapidminer.tools.container.Pair; import java.util.HashSet; import java.util.Set; | import com.rapidminer.operator.*; import com.rapidminer.operator.ports.metadata.*; import com.rapidminer.tools.container.*; import java.util.*; | [
"com.rapidminer.operator",
"com.rapidminer.tools",
"java.util"
] | com.rapidminer.operator; com.rapidminer.tools; java.util; | 1,555,601 |
FinalApplicationStatus getFinalApplicationStatus();
/**
* Nodes on which the containers for this {@link RMAppAttempt} ran.
* @return the set of nodes that ran any containers from this {@link RMAppAttempt} | FinalApplicationStatus getFinalApplicationStatus(); /** * Nodes on which the containers for this {@link RMAppAttempt} ran. * @return the set of nodes that ran any containers from this {@link RMAppAttempt} | /**
* The final status set by the AM.
* @return the final status that is set by the AM when unregistering itself. Can return a null
* if the AM has not unregistered itself.
*/ | The final status set by the AM | getFinalApplicationStatus | {
"repo_name": "songweijia/fffs",
"path": "sources/hadoop-2.4.1-src/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttempt.java",
"license": "apache-2.0",
"size": 7275
} | [
"org.apache.hadoop.yarn.api.records.FinalApplicationStatus"
] | import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; | import org.apache.hadoop.yarn.api.records.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 2,413,204 |
public Map<Integer, PaqueteMovimiento> getUbicacionPersonajes() {
return ubicacionPersonajes;
} | Map<Integer, PaqueteMovimiento> function() { return ubicacionPersonajes; } | /**
* Gets the ubicacion personajes.
*
* @return the ubicacion personajes
*/ | Gets the ubicacion personajes | getUbicacionPersonajes | {
"repo_name": "JavaPeppers/jrpg-2017b-cliente",
"path": "src/main/java/juego/Juego.java",
"license": "mit",
"size": 13040
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 2,159,708 |
public final IntegerProperty measuredHeightProperty() {
return measuredHeight;
} | final IntegerProperty function() { return measuredHeight; } | /**
* Returns the read-only measured height property instance. The measured height property stores the untransformed
* height of this component, including padding and border.
*
* @return The read-only measured height property instance.
*/ | Returns the read-only measured height property instance. The measured height property stores the untransformed height of this component, including padding and border | measuredHeightProperty | {
"repo_name": "EagerLogic/Cubee",
"path": "src/Cubee/src/main/java/com/eagerlogic/cubee/client/components/AComponent.java",
"license": "apache-2.0",
"size": 63202
} | [
"com.eagerlogic.cubee.client.properties.IntegerProperty"
] | import com.eagerlogic.cubee.client.properties.IntegerProperty; | import com.eagerlogic.cubee.client.properties.*; | [
"com.eagerlogic.cubee"
] | com.eagerlogic.cubee; | 2,150,239 |
public static void close() {
flush();
try {
out.close();
}
catch (IOException e) {
e.printStackTrace();
}
} | static void function() { flush(); try { out.close(); } catch (IOException e) { e.printStackTrace(); } } | /**
* Flush and close standard output. Once standard output is closed, you can no
* longer write bits to it.
*/ | Flush and close standard output. Once standard output is closed, you can no longer write bits to it | close | {
"repo_name": "clbx/CS121",
"path": "STDTest/BinaryStdOut.java",
"license": "mit",
"size": 8386
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,491,385 |
public CompoundUpdateCommand
createRemoveSelectedTreeNodesCommand(ArrayList nodes) {
return new CompoundUpdateCommand(REMOVE_SELECTED_NODES);
} | CompoundUpdateCommand function(ArrayList nodes) { return new CompoundUpdateCommand(REMOVE_SELECTED_NODES); } | /**
* Creates the compound RemoveSelectedTreeNodesCommand. Used to create the
* 'dynamic' RemoveSelectedTreeNodesCommand name
*
* @param nodes
* The list of the nodes that are selected and should be removed
* @return the RemoveSelectedTreeNodesCommand
*/ | Creates the compound RemoveSelectedTreeNodesCommand. Used to create the 'dynamic' RemoveSelectedTreeNodesCommand name | createRemoveSelectedTreeNodesCommand | {
"repo_name": "git-moss/Push2Display",
"path": "lib/batik-1.8/sources/org/apache/batik/apps/svgbrowser/HistoryBrowserInterface.java",
"license": "lgpl-3.0",
"size": 42497
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 401,807 |
@Override
public void setOwner(String volume, String owner) throws IOException {
Preconditions.checkNotNull(volume);
Preconditions.checkNotNull(owner);
metadataManager.writeLock().lock();
try {
byte[] dbVolumeKey = metadataManager.getVolumeKey(volume);
byte[] volInfo = metadataManager.getVolumeTable().get(dbVolumeKey);
if (volInfo == null) {
LOG.debug("Changing volume ownership failed for user:{} volume:{}",
owner, volume);
throw new OMException(ResultCodes.FAILED_VOLUME_NOT_FOUND);
}
VolumeInfo volumeInfo = VolumeInfo.parseFrom(volInfo);
OmVolumeArgs volumeArgs = OmVolumeArgs.getFromProtobuf(volumeInfo);
Preconditions.checkState(volume.equals(volumeInfo.getVolume()));
try(WriteBatch batch = new WriteBatch()) {
delVolumeFromOwnerList(volume, volumeArgs.getOwnerName(), batch);
addVolumeToOwnerList(volume, owner, batch);
OmVolumeArgs newVolumeArgs =
OmVolumeArgs.newBuilder().setVolume(volumeArgs.getVolume())
.setAdminName(volumeArgs.getAdminName())
.setOwnerName(owner)
.setQuotaInBytes(volumeArgs.getQuotaInBytes())
.setCreationTime(volumeArgs.getCreationTime())
.build();
VolumeInfo newVolumeInfo = newVolumeArgs.getProtobuf();
batch.put(metadataManager.getVolumeTable().getHandle(),
dbVolumeKey, newVolumeInfo.toByteArray());
metadataManager.getStore().write(batch);
}
} catch (RocksDBException | IOException ex) {
if (!(ex instanceof OMException)) {
LOG.error("Changing volume ownership failed for user:{} volume:{}",
owner, volume, ex);
}
if(ex instanceof RocksDBException) {
throw RocksDBStore.toIOException("Volume creation failed.",
(RocksDBException) ex);
} else {
throw (IOException) ex;
}
} finally {
metadataManager.writeLock().unlock();
}
} | void function(String volume, String owner) throws IOException { Preconditions.checkNotNull(volume); Preconditions.checkNotNull(owner); metadataManager.writeLock().lock(); try { byte[] dbVolumeKey = metadataManager.getVolumeKey(volume); byte[] volInfo = metadataManager.getVolumeTable().get(dbVolumeKey); if (volInfo == null) { LOG.debug(STR, owner, volume); throw new OMException(ResultCodes.FAILED_VOLUME_NOT_FOUND); } VolumeInfo volumeInfo = VolumeInfo.parseFrom(volInfo); OmVolumeArgs volumeArgs = OmVolumeArgs.getFromProtobuf(volumeInfo); Preconditions.checkState(volume.equals(volumeInfo.getVolume())); try(WriteBatch batch = new WriteBatch()) { delVolumeFromOwnerList(volume, volumeArgs.getOwnerName(), batch); addVolumeToOwnerList(volume, owner, batch); OmVolumeArgs newVolumeArgs = OmVolumeArgs.newBuilder().setVolume(volumeArgs.getVolume()) .setAdminName(volumeArgs.getAdminName()) .setOwnerName(owner) .setQuotaInBytes(volumeArgs.getQuotaInBytes()) .setCreationTime(volumeArgs.getCreationTime()) .build(); VolumeInfo newVolumeInfo = newVolumeArgs.getProtobuf(); batch.put(metadataManager.getVolumeTable().getHandle(), dbVolumeKey, newVolumeInfo.toByteArray()); metadataManager.getStore().write(batch); } } catch (RocksDBException IOException ex) { if (!(ex instanceof OMException)) { LOG.error(STR, owner, volume, ex); } if(ex instanceof RocksDBException) { throw RocksDBStore.toIOException(STR, (RocksDBException) ex); } else { throw (IOException) ex; } } finally { metadataManager.writeLock().unlock(); } } | /**
* Changes the owner of a volume.
*
* @param volume - Name of the volume.
* @param owner - Name of the owner.
* @throws IOException
*/ | Changes the owner of a volume | setOwner | {
"repo_name": "dierobotsdie/hadoop",
"path": "hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/VolumeManagerImpl.java",
"license": "apache-2.0",
"size": 15316
} | [
"com.google.common.base.Preconditions",
"java.io.IOException",
"org.apache.hadoop.ozone.om.exceptions.OMException",
"org.apache.hadoop.ozone.om.helpers.OmVolumeArgs",
"org.apache.hadoop.utils.RocksDBStore",
"org.rocksdb.RocksDBException",
"org.rocksdb.WriteBatch"
] | import com.google.common.base.Preconditions; import java.io.IOException; import org.apache.hadoop.ozone.om.exceptions.OMException; import org.apache.hadoop.ozone.om.helpers.OmVolumeArgs; import org.apache.hadoop.utils.RocksDBStore; import org.rocksdb.RocksDBException; import org.rocksdb.WriteBatch; | import com.google.common.base.*; import java.io.*; import org.apache.hadoop.ozone.om.exceptions.*; import org.apache.hadoop.ozone.om.helpers.*; import org.apache.hadoop.utils.*; import org.rocksdb.*; | [
"com.google.common",
"java.io",
"org.apache.hadoop",
"org.rocksdb"
] | com.google.common; java.io; org.apache.hadoop; org.rocksdb; | 93,085 |
@Override
public void write(char[] chr) throws IOException {
out.write(chr);
}
| void function(char[] chr) throws IOException { out.write(chr); } | /**
* Write the characters from an array.
* @param chr the characters to write
* @throws IOException if an I/O error occurs
*/ | Write the characters from an array | write | {
"repo_name": "smargav/android-api-library",
"path": "src/main/java/org/apache/commons/io/output/LockableFileWriter.java",
"license": "apache-2.0",
"size": 13560
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 268,875 |
public void onMessage(Message msg) {
try {
checkFreeSpace();
activeThreads.getAndIncrement();
if (msg instanceof TextMessage) {
String msgText = ((TextMessage) msg).getText();
final String correlationID = msg.getJMSCorrelationID();
Map<String, String> responseMsgMap;
String type = msg.getStringProperty("type");
CmsWorkOrderSimpleBase wo;
switch (type) {
// WorkOrder
case WORK_ORDER_TYPE: {
long t = System.currentTimeMillis();
wo = getWorkOrderOf(msgText, CmsWorkOrderSimple.class);
wo.putSearchTag("iWoCrtTime", Long.toString(System.currentTimeMillis() - t));
String logKey = workOrderExecutor.getLogKey(wo);
logger.info(logKey + " Inductor: " + config.getIpAddr());
preProcess(wo);
wo.putSearchTag("rfcAction", wo.getAction());
Response response = runWoWithMatchingExecutor((CmsWorkOrderSimple)wo);
if (response == null || response.getResult() == Result.NOT_MATCHED) {
responseMsgMap = workOrderExecutor.processAndVerify(wo, correlationID);
}
else {
responseMsgMap = response.getResponseMap();
postExecTags(wo);
}
break;
}
// ActionOrder
case ACTION_ORDER_TYPE: {
long t = System.currentTimeMillis();
wo = getWorkOrderOf(msgText, CmsActionOrderSimple.class);
wo.putSearchTag("iAoCrtTime", Long.toString(System.currentTimeMillis() - t));
preProcess(wo);
Response response = runAoWithMatchingExecutor((CmsActionOrderSimple) wo);
if (response == null || response.getResult() == Result.NOT_MATCHED) {
responseMsgMap = actionOrderExecutor.processAndVerify(wo, correlationID);
}
else {
responseMsgMap = response.getResponseMap();
postExecTags(wo);
}
break;
}
default:
logger.error(new IllegalArgumentException("Unknown msg type - " + type));
msg.acknowledge();
return;
}
// Controller will process this message
responseMsgMap.put("correlationID", correlationID);
responseMsgMap.put("type", type);
long startTime = System.currentTimeMillis();
if (!correlationID.equals("test")) {
messagePublisher.publishMessage(responseMsgMap);
}
long endTime = System.currentTimeMillis();
long duration = endTime - startTime;
// ack message
logger.debug("Send message took:" + duration + "ms");
msg.acknowledge();
}
} catch (JMSException | SecurityException | IOException | IllegalArgumentException e) {
logger.error("Error occurred in processing message", e);
} finally {
// Decrement the total number of active threads consumed by 1
activeThreads.getAndDecrement();
clearStateFile();
}
} | void function(Message msg) { try { checkFreeSpace(); activeThreads.getAndIncrement(); if (msg instanceof TextMessage) { String msgText = ((TextMessage) msg).getText(); final String correlationID = msg.getJMSCorrelationID(); Map<String, String> responseMsgMap; String type = msg.getStringProperty("type"); CmsWorkOrderSimpleBase wo; switch (type) { case WORK_ORDER_TYPE: { long t = System.currentTimeMillis(); wo = getWorkOrderOf(msgText, CmsWorkOrderSimple.class); wo.putSearchTag(STR, Long.toString(System.currentTimeMillis() - t)); String logKey = workOrderExecutor.getLogKey(wo); logger.info(logKey + STR + config.getIpAddr()); preProcess(wo); wo.putSearchTag(STR, wo.getAction()); Response response = runWoWithMatchingExecutor((CmsWorkOrderSimple)wo); if (response == null response.getResult() == Result.NOT_MATCHED) { responseMsgMap = workOrderExecutor.processAndVerify(wo, correlationID); } else { responseMsgMap = response.getResponseMap(); postExecTags(wo); } break; } case ACTION_ORDER_TYPE: { long t = System.currentTimeMillis(); wo = getWorkOrderOf(msgText, CmsActionOrderSimple.class); wo.putSearchTag(STR, Long.toString(System.currentTimeMillis() - t)); preProcess(wo); Response response = runAoWithMatchingExecutor((CmsActionOrderSimple) wo); if (response == null response.getResult() == Result.NOT_MATCHED) { responseMsgMap = actionOrderExecutor.processAndVerify(wo, correlationID); } else { responseMsgMap = response.getResponseMap(); postExecTags(wo); } break; } default: logger.error(new IllegalArgumentException(STR + type)); msg.acknowledge(); return; } responseMsgMap.put(STR, correlationID); responseMsgMap.put("type", type); long startTime = System.currentTimeMillis(); if (!correlationID.equals("test")) { messagePublisher.publishMessage(responseMsgMap); } long endTime = System.currentTimeMillis(); long duration = endTime - startTime; logger.debug(STR + duration + "ms"); msg.acknowledge(); } } catch (JMSException SecurityException IOException IllegalArgumentException e) { logger.error(STR, e); } finally { activeThreads.getAndDecrement(); clearStateFile(); } } | /**
* MessageListener mapped in application-context.xml - will deserialize to a WorkOrder
* (iaas/swdist) or ActionOrder (procedure)
*
* @param msg Message
* @see javax.jms.MessageListener#onMessage(javax.jms.Message)
*/ | MessageListener mapped in application-context.xml - will deserialize to a WorkOrder (iaas/swdist) or ActionOrder (procedure) | onMessage | {
"repo_name": "oneops/OneOps",
"path": "inductor/src/main/java/com/oneops/inductor/Listener.java",
"license": "apache-2.0",
"size": 13973
} | [
"com.oneops.cms.domain.CmsWorkOrderSimpleBase",
"com.oneops.cms.execution.Response",
"com.oneops.cms.execution.Result",
"com.oneops.cms.simple.domain.CmsActionOrderSimple",
"com.oneops.cms.simple.domain.CmsWorkOrderSimple",
"java.io.IOException",
"java.util.Map",
"javax.jms.JMSException",
"javax.jms.Message",
"javax.jms.TextMessage"
] | import com.oneops.cms.domain.CmsWorkOrderSimpleBase; import com.oneops.cms.execution.Response; import com.oneops.cms.execution.Result; import com.oneops.cms.simple.domain.CmsActionOrderSimple; import com.oneops.cms.simple.domain.CmsWorkOrderSimple; import java.io.IOException; import java.util.Map; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.TextMessage; | import com.oneops.cms.domain.*; import com.oneops.cms.execution.*; import com.oneops.cms.simple.domain.*; import java.io.*; import java.util.*; import javax.jms.*; | [
"com.oneops.cms",
"java.io",
"java.util",
"javax.jms"
] | com.oneops.cms; java.io; java.util; javax.jms; | 2,164,112 |
@Nullable
protected AbstractUrlHandlerMapping getHandlerMapping() {
if (this.registrations.isEmpty()) {
return null;
}
Map<String, WebHandler> urlMap = new LinkedHashMap<>();
for (ResourceHandlerRegistration registration : this.registrations) {
ResourceWebHandler handler = getRequestHandler(registration);
for (String pathPattern : registration.getPathPatterns()) {
urlMap.put(pathPattern, handler);
}
}
return new SimpleUrlHandlerMapping(urlMap, this.order);
} | AbstractUrlHandlerMapping function() { if (this.registrations.isEmpty()) { return null; } Map<String, WebHandler> urlMap = new LinkedHashMap<>(); for (ResourceHandlerRegistration registration : this.registrations) { ResourceWebHandler handler = getRequestHandler(registration); for (String pathPattern : registration.getPathPatterns()) { urlMap.put(pathPattern, handler); } } return new SimpleUrlHandlerMapping(urlMap, this.order); } | /**
* Return a handler mapping with the mapped resource handlers; or {@code null} in case
* of no registrations.
*/ | Return a handler mapping with the mapped resource handlers; or null in case of no registrations | getHandlerMapping | {
"repo_name": "spring-projects/spring-framework",
"path": "spring-webflux/src/main/java/org/springframework/web/reactive/config/ResourceHandlerRegistry.java",
"license": "apache-2.0",
"size": 6022
} | [
"java.util.LinkedHashMap",
"java.util.Map",
"org.springframework.web.reactive.handler.AbstractUrlHandlerMapping",
"org.springframework.web.reactive.handler.SimpleUrlHandlerMapping",
"org.springframework.web.reactive.resource.ResourceWebHandler",
"org.springframework.web.server.WebHandler"
] | import java.util.LinkedHashMap; import java.util.Map; import org.springframework.web.reactive.handler.AbstractUrlHandlerMapping; import org.springframework.web.reactive.handler.SimpleUrlHandlerMapping; import org.springframework.web.reactive.resource.ResourceWebHandler; import org.springframework.web.server.WebHandler; | import java.util.*; import org.springframework.web.reactive.handler.*; import org.springframework.web.reactive.resource.*; import org.springframework.web.server.*; | [
"java.util",
"org.springframework.web"
] | java.util; org.springframework.web; | 2,064,685 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.