method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
try { throw new Exception(msg); } catch (Exception ex) { System.out.println(msg); ex.printStackTrace(); } System.exit(-1); } static InetAddress localhost; public NioEngine() throws Exception { localhost = InetAddress.getByName("localhost"); } long lastEcho; long startTime;
try { throw new Exception(msg); } catch (Exception ex) { System.out.println(msg); ex.printStackTrace(); } System.exit(-1); } static InetAddress localhost; public NioEngine() throws Exception { localhost = InetAddress.getByName(STR); } long lastEcho; long startTime;
/** * Call this when something you don't understand happens. * This is your suicide, enforcing a fail-stop behavior. * It is also a single place where to have a breakpoint * to stop when a panic occurs, giving you a chance to see * what has happened. * @param msg */
Call this when something you don't understand happens. This is your suicide, enforcing a fail-stop behavior. It is also a single place where to have a breakpoint to stop when a panic occurs, giving you a chance to see what has happened
panic
{ "repo_name": "torresaa/Chat_Evenementiel", "path": "Chat_Torres_Gulfo_Fariss/src/nio/engine/NioEngine.java", "license": "gpl-2.0", "size": 4663 }
[ "java.net.InetAddress" ]
import java.net.InetAddress;
import java.net.*;
[ "java.net" ]
java.net;
400,680
@Test public void testPoint2fConstructor() { final Vector3f v = new ImmutableVector3f(new MutablePoint2f(3, 2)); assertEquals(3, v.x, 0.001f); assertEquals(2, v.y, 0.001f); assertEquals(0, v.z, 0.001f); }
void function() { final Vector3f v = new ImmutableVector3f(new MutablePoint2f(3, 2)); assertEquals(3, v.x, 0.001f); assertEquals(2, v.y, 0.001f); assertEquals(0, v.z, 0.001f); }
/** * Tests the constructor with a Point2f argument. */
Tests the constructor with a Point2f argument
testPoint2fConstructor
{ "repo_name": "kayahr/gramath", "path": "src/test/java/de/ailis/gramath/ImmutableVector3fTest.java", "license": "mit", "size": 6205 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
2,244,639
@Override protected void removeRange(int lower, int upper) { ArgumentChecks.ensureValidIndexRange(size, lower, upper); int lo = lower * bitCount; int hi = upper * bitCount; final int offset = (lo & OFFSET_MASK); if (offset == (hi & OFFSET_MASK)) { lo >>>= BASE_SHIFT; hi >>>= BASE_SHIFT; final long mask = (1L << offset) - 1; final long save = values[lo] & mask; System.arraycopy(values, hi, values, lo, length(size) - hi); values[lo] = (values[lo] & ~mask) | save; } else { while (upper < size) { setUnchecked(lower++, getUnchecked(upper++)); } } this.size -= (upper - lower); }
void function(int lower, int upper) { ArgumentChecks.ensureValidIndexRange(size, lower, upper); int lo = lower * bitCount; int hi = upper * bitCount; final int offset = (lo & OFFSET_MASK); if (offset == (hi & OFFSET_MASK)) { lo >>>= BASE_SHIFT; hi >>>= BASE_SHIFT; final long mask = (1L << offset) - 1; final long save = values[lo] & mask; System.arraycopy(values, hi, values, lo, length(size) - hi); values[lo] = (values[lo] & ~mask) save; } else { while (upper < size) { setUnchecked(lower++, getUnchecked(upper++)); } } this.size -= (upper - lower); }
/** * Removes all values in the given range of index. * Shifts any succeeding elements to the left (reduces their index). * * @param lower index of the first element to remove, inclusive. * @param upper index after the last element to be removed. */
Removes all values in the given range of index. Shifts any succeeding elements to the left (reduces their index)
removeRange
{ "repo_name": "Geomatys/sis", "path": "core/sis-utility/src/main/java/org/apache/sis/util/collection/IntegerList.java", "license": "apache-2.0", "size": 16010 }
[ "org.apache.sis.util.ArgumentChecks" ]
import org.apache.sis.util.ArgumentChecks;
import org.apache.sis.util.*;
[ "org.apache.sis" ]
org.apache.sis;
1,124,613
public Builder withMembershipService(ClusterMembershipService membershipService) { this.membershipService = checkNotNull(membershipService, "membershipService cannot be null"); return this; }
Builder function(ClusterMembershipService membershipService) { this.membershipService = checkNotNull(membershipService, STR); return this; }
/** * Sets the cluster membership service. * * @param membershipService the cluster membership service * @return the server builder */
Sets the cluster membership service
withMembershipService
{ "repo_name": "kuujo/copycat", "path": "protocols/raft/src/main/java/io/atomix/protocols/raft/RaftServer.java", "license": "apache-2.0", "size": 32773 }
[ "com.google.common.base.Preconditions", "io.atomix.cluster.ClusterMembershipService" ]
import com.google.common.base.Preconditions; import io.atomix.cluster.ClusterMembershipService;
import com.google.common.base.*; import io.atomix.cluster.*;
[ "com.google.common", "io.atomix.cluster" ]
com.google.common; io.atomix.cluster;
1,163,806
protected static <K, V> Map<K, V> newSynchronizedMap(Map<K, V> map) { if (map == null) { return CollectionHelper.synchronizedMap(0); } else { return Collections.synchronizedMap(new HashMap<K, V>(map)); } }
static <K, V> Map<K, V> function(Map<K, V> map) { if (map == null) { return CollectionHelper.synchronizedMap(0); } else { return Collections.synchronizedMap(new HashMap<K, V>(map)); } }
/** * Creates a new synchronized map from the specified map. * * @param <K> * the key type * @param <V> * the value type * @param map * the map * * @return the synchronized map */
Creates a new synchronized map from the specified map
newSynchronizedMap
{ "repo_name": "shane-axiom/SOS", "path": "core/cache/src/main/java/org/n52/sos/cache/AbstractContentCache.java", "license": "gpl-2.0", "size": 30678 }
[ "java.util.Collections", "java.util.HashMap", "java.util.Map", "org.n52.sos.util.CollectionHelper" ]
import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.n52.sos.util.CollectionHelper;
import java.util.*; import org.n52.sos.util.*;
[ "java.util", "org.n52.sos" ]
java.util; org.n52.sos;
1,525,712
private RemoteViews updateSummaryNotificationRemoteViews() { RemoteViews remoteViews = new RemoteViews(getPackageName(), R.layout.notification_big_view); // Fill in the data for the top two beacon views updateSummaryNotificationRemoteViewsFirstBeacon(mDeviceAddressToUrl.get(mSortedDevices.get(0)), remoteViews); updateSummaryNotificationRemoteViewsSecondBeacon(mDeviceAddressToUrl.get(mSortedDevices.get(1)), remoteViews); // Create a pending intent that will open the physical web app // TODO: Use a clickListener on the VIEW MORE button to do this PendingIntent pendingIntent = createReturnToAppPendingIntent(); remoteViews.setOnClickPendingIntent(R.id.otherBeaconsLayout, pendingIntent); return remoteViews; }
RemoteViews function() { RemoteViews remoteViews = new RemoteViews(getPackageName(), R.layout.notification_big_view); updateSummaryNotificationRemoteViewsFirstBeacon(mDeviceAddressToUrl.get(mSortedDevices.get(0)), remoteViews); updateSummaryNotificationRemoteViewsSecondBeacon(mDeviceAddressToUrl.get(mSortedDevices.get(1)), remoteViews); PendingIntent pendingIntent = createReturnToAppPendingIntent(); remoteViews.setOnClickPendingIntent(R.id.otherBeaconsLayout, pendingIntent); return remoteViews; }
/** * Create the big view for the summary notification */
Create the big view for the summary notification
updateSummaryNotificationRemoteViews
{ "repo_name": "matthewsibigtroth/physical-web", "path": "android/PhysicalWeb/app/src/main/java/org/physical_web/physicalweb/UriBeaconDiscoveryService.java", "license": "apache-2.0", "size": 23660 }
[ "android.app.PendingIntent", "android.widget.RemoteViews" ]
import android.app.PendingIntent; import android.widget.RemoteViews;
import android.app.*; import android.widget.*;
[ "android.app", "android.widget" ]
android.app; android.widget;
1,595,835
public DateHolder setEndOfWeek() { final int firstDayOfWeek = getFirstDayOfWeek(); short endlessLoopDetection = 0; do { calendar.add(Calendar.DAY_OF_YEAR, 1); if (++endlessLoopDetection > 10) { throw new RuntimeException("Endless loop protection. Please contact developer!"); } } while (calendar.get(Calendar.DAY_OF_WEEK) != firstDayOfWeek); calendar.add(Calendar.DAY_OF_YEAR, -1); // Get one day before first day of next week. setEndOfDay(); return this; }
DateHolder function() { final int firstDayOfWeek = getFirstDayOfWeek(); short endlessLoopDetection = 0; do { calendar.add(Calendar.DAY_OF_YEAR, 1); if (++endlessLoopDetection > 10) { throw new RuntimeException(STR); } } while (calendar.get(Calendar.DAY_OF_WEEK) != firstDayOfWeek); calendar.add(Calendar.DAY_OF_YEAR, -1); setEndOfDay(); return this; }
/** * Sets the date to the ending of the week (last day of week) and calls setEndOfDay. * @see #setEndOfDay() */
Sets the date to the ending of the week (last day of week) and calls setEndOfDay
setEndOfWeek
{ "repo_name": "micromata/projectforge-webapp", "path": "src/main/java/org/projectforge/common/DateHolder.java", "license": "gpl-3.0", "size": 21653 }
[ "java.util.Calendar" ]
import java.util.Calendar;
import java.util.*;
[ "java.util" ]
java.util;
655,289
public static final String addTagValue(String tag, byte[] val) throws IOException { return addTagValue(tag, val, true); }
static final String function(String tag, byte[] val) throws IOException { return addTagValue(tag, val, true); }
/** * Build an XML string (including a carriage return) for a certain tag binary (byte[]) value * @param tag The XML tag * @param val The binary value of the tag * @return The XML String for the tag. * @throws IOException in case there is an Base64 or GZip encoding problem */
Build an XML string (including a carriage return) for a certain tag binary (byte[]) value
addTagValue
{ "repo_name": "soluvas/pdi-ce", "path": "src-core/org/pentaho/di/core/xml/XMLHandler.java", "license": "apache-2.0", "size": 34948 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,913,832
public Collection<Region> getOnlineRegionsLocalContext() { Collection<Region> regions = this.onlineRegions.values(); return Collections.unmodifiableCollection(regions); }
Collection<Region> function() { Collection<Region> regions = this.onlineRegions.values(); return Collections.unmodifiableCollection(regions); }
/** * For tests, web ui and metrics. * This method will only work if HRegionServer is in the same JVM as client; * HRegion cannot be serialized to cross an rpc. */
For tests, web ui and metrics. This method will only work if HRegionServer is in the same JVM as client; HRegion cannot be serialized to cross an rpc
getOnlineRegionsLocalContext
{ "repo_name": "ibmsoe/hbase", "path": "hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java", "license": "apache-2.0", "size": 124784 }
[ "java.util.Collection", "java.util.Collections" ]
import java.util.Collection; import java.util.Collections;
import java.util.*;
[ "java.util" ]
java.util;
2,057,377
EditorPartStack split(EditorPartStack relativePartStack, Constraints constraints, double size);
EditorPartStack split(EditorPartStack relativePartStack, Constraints constraints, double size);
/** * Split part stack * * @param relativePartStack the relative part stack * @param constraints the constraints of split(should contains direction of splitting:vertical or * horizontal) * @param size the size of splits part stack (use -1 if not set) * @return the new splits part stack */
Split part stack
split
{ "repo_name": "sleshchenko/che", "path": "ide/che-core-ide-api/src/main/java/org/eclipse/che/ide/api/parts/EditorMultiPartStack.java", "license": "epl-1.0", "size": 3895 }
[ "org.eclipse.che.ide.api.constraints.Constraints" ]
import org.eclipse.che.ide.api.constraints.Constraints;
import org.eclipse.che.ide.api.constraints.*;
[ "org.eclipse.che" ]
org.eclipse.che;
953,390
public void addParser(Parser parser) { if (parser!=null && !parsers.contains(parser)) { if (running) { timer.stop(); } parsers.add(parser); if (parsers.size()==1) { // Okay to call more than once. ToolTipManager.sharedInstance().registerComponent(textArea); } if (running) { timer.restart(); } } }
void function(Parser parser) { if (parser!=null && !parsers.contains(parser)) { if (running) { timer.stop(); } parsers.add(parser); if (parsers.size()==1) { ToolTipManager.sharedInstance().registerComponent(textArea); } if (running) { timer.restart(); } } }
/** * Adds a parser for the text area. * * @param parser The new parser. If this is <code>null</code>, nothing * happens. * @see #getParser(int) * @see #removeParser(Parser) */
Adds a parser for the text area
addParser
{ "repo_name": "Thecarisma/powertext", "path": "Power Text/src/com/power/text/ui/pteditor/ParserManager.java", "license": "gpl-3.0", "size": 22597 }
[ "com.power.text.pteditor.parser.Parser", "javax.swing.ToolTipManager" ]
import com.power.text.pteditor.parser.Parser; import javax.swing.ToolTipManager;
import com.power.text.pteditor.parser.*; import javax.swing.*;
[ "com.power.text", "javax.swing" ]
com.power.text; javax.swing;
62,153
public void deleteTree() throws IOException { fileSystem.deleteTree(asFragment()); }
void function() throws IOException { fileSystem.deleteTree(asFragment()); }
/** * Deletes all directory trees recursively beneath this path and removes the path as well. * * @throws IOException if the hierarchy cannot be removed successfully */
Deletes all directory trees recursively beneath this path and removes the path as well
deleteTree
{ "repo_name": "perezd/bazel", "path": "src/main/java/com/google/devtools/build/lib/vfs/Path.java", "license": "apache-2.0", "size": 35929 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
789,637
String relative = file.getAbsolutePath().substring( root.getAbsolutePath().length()); String script = remove(relative, "/org/apache/jsp"); script = remove(script, ".class"); script = remove(script, ".java"); script = remove(script, ".deps"); if (File.separatorChar == '\\') { script = script.replace(File.separatorChar, '/'); } return StringUtils.substringBeforeLast(script, "_") + "." + StringUtils.substringAfterLast(script, "_"); }
String relative = file.getAbsolutePath().substring( root.getAbsolutePath().length()); String script = remove(relative, STR); script = remove(script, STR); script = remove(script, ".java"); script = remove(script, ".deps"); if (File.separatorChar == '\\') { script = script.replace(File.separatorChar, '/'); } return StringUtils.substringBeforeLast(script, "_") + "." + StringUtils.substringAfterLast(script, "_"); }
/** * Gets the script associated with the file. * * @param file * the file to find the associate script * @return the associated script */
Gets the script associated with the file
getScript
{ "repo_name": "Nimco/sling", "path": "bundles/commons/fsclassloader/src/main/java/org/apache/sling/commons/fsclassloader/impl/FSClassLoaderWebConsole.java", "license": "apache-2.0", "size": 16600 }
[ "java.io.File", "org.apache.commons.lang3.StringUtils" ]
import java.io.File; import org.apache.commons.lang3.StringUtils;
import java.io.*; import org.apache.commons.lang3.*;
[ "java.io", "org.apache.commons" ]
java.io; org.apache.commons;
2,799,463
public final void testRSAMultiPrimePrivateCrtKeySpec04() { try { new RSAMultiPrimePrivateCrtKeySpec( BigInteger.ONE, BigInteger.ONE, null, BigInteger.ONE, BigInteger.ONE, BigInteger.ONE, BigInteger.ONE, BigInteger.ONE, opi); fail("Expected NPE not thrown"); } catch (NullPointerException e) { } }
final void function() { try { new RSAMultiPrimePrivateCrtKeySpec( BigInteger.ONE, BigInteger.ONE, null, BigInteger.ONE, BigInteger.ONE, BigInteger.ONE, BigInteger.ONE, BigInteger.ONE, opi); fail(STR); } catch (NullPointerException e) { } }
/** * Test #4 for * <code>RSAMultiPrimePrivateCrtKeySpec(BigInteger modulus, * BigInteger publicExponent, * BigInteger privateExponent, * BigInteger primeP, * BigInteger primeQ, * BigInteger primeExponentP, * BigInteger primeExponentQ, * BigInteger crtCoefficient, * RSAOtherPrimeInfo[] otherPrimeInfo) * </code> ctor<br> * Assertion: NullPointerException if privateExponent is null */
Test #4 for <code>RSAMultiPrimePrivateCrtKeySpec(BigInteger modulus, BigInteger publicExponent, BigInteger privateExponent, BigInteger primeP, BigInteger primeQ, BigInteger primeExponentP, BigInteger primeExponentQ, BigInteger crtCoefficient, RSAOtherPrimeInfo[] otherPrimeInfo) </code> ctor Assertion: NullPointerException if privateExponent is null
testRSAMultiPrimePrivateCrtKeySpec04
{ "repo_name": "rex-xxx/mt6572_x201", "path": "external/apache-harmony/security/src/test/api/java/org/apache/harmony/security/tests/java/security/spec/RSAMultiPrimePrivateCrtKeySpecTest.java", "license": "gpl-2.0", "size": 26733 }
[ "java.math.BigInteger", "java.security.spec.RSAMultiPrimePrivateCrtKeySpec" ]
import java.math.BigInteger; import java.security.spec.RSAMultiPrimePrivateCrtKeySpec;
import java.math.*; import java.security.spec.*;
[ "java.math", "java.security" ]
java.math; java.security;
1,001,475
public static <TItem> Bson in(final String fieldName, final TItem... values) { return in(fieldName, asList(values)); }
static <TItem> Bson function(final String fieldName, final TItem... values) { return in(fieldName, asList(values)); }
/** * Creates a filter that matches all documents where the value of a field equals any value in the list of specified values. * * @param fieldName the field name * @param values the list of values * @param <TItem> the value type * @return the filter * @mongodb.driver.manual reference/operator/query/in $in */
Creates a filter that matches all documents where the value of a field equals any value in the list of specified values
in
{ "repo_name": "gianpaj/mongo-java-driver", "path": "driver-core/src/main/com/mongodb/client/model/Filters.java", "license": "apache-2.0", "size": 42343 }
[ "java.util.Arrays", "org.bson.conversions.Bson" ]
import java.util.Arrays; import org.bson.conversions.Bson;
import java.util.*; import org.bson.conversions.*;
[ "java.util", "org.bson.conversions" ]
java.util; org.bson.conversions;
1,579,167
private List<WatchServiceCallback.FileService> getFileServiceCallbackListeners() { return SingletonHolder.FILE_CALLBACK_LISTENERS.get(); }
List<WatchServiceCallback.FileService> function() { return SingletonHolder.FILE_CALLBACK_LISTENERS.get(); }
/** * Gets file service callback listeners. * * @return the file service callback listeners */
Gets file service callback listeners
getFileServiceCallbackListeners
{ "repo_name": "sohailalam2/File_Watch_Service", "path": "src/com/sohail/alam/watchservice/WatchService.java", "license": "apache-2.0", "size": 16367 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,134,388
public void init() throws IgniteInterruptedCheckedException { if (isDone()) return; if (init.compareAndSet(false, true)) { if (isDone()) return; initTs = U.currentTimeMillis(); try { // Wait for event to occur to make sure that discovery // will return corresponding nodes. U.await(evtLatch); assert discoEvt != null : this; assert !dummy && !forcePreload : this; ClusterNode oldest = CU.oldestAliveCacheServerNode(cctx, exchId.topologyVersion()); oldestNode.set(oldest); if (!F.isEmpty(reqs)) blockGateways(); startCaches(); // True if client node joined or failed. boolean clientNodeEvt; if (F.isEmpty(reqs)) { int type = discoEvt.type(); assert type == EVT_NODE_JOINED || type == EVT_NODE_LEFT || type == EVT_NODE_FAILED : discoEvt; clientNodeEvt = CU.clientNode(discoEvt.eventNode()); } else { assert discoEvt.type() == EVT_DISCOVERY_CUSTOM_EVT : discoEvt; boolean clientOnlyCacheEvt = true; for (DynamicCacheChangeRequest req : reqs) { if (req.clientStartOnly() || req.close()) continue; clientOnlyCacheEvt = false; break; } clientNodeEvt = clientOnlyCacheEvt; } if (clientNodeEvt) { ClusterNode node = discoEvt.eventNode(); // Client need to initialize affinity for local join event or for stated client caches. if (!node.isLocal() || clientCacheClose()) { for (GridCacheContext cacheCtx : cctx.cacheContexts()) { if (cacheCtx.isLocal()) continue; GridDhtPartitionTopology top = cacheCtx.topology(); top.updateTopologyVersion(exchId, this, -1, stopping(cacheCtx.cacheId())); if (cacheCtx.affinity().affinityTopologyVersion() == AffinityTopologyVersion.NONE) { initTopology(cacheCtx); top.beforeExchange(this); } else cacheCtx.affinity().clientEventTopologyChange(discoEvt, exchId.topologyVersion()); } if (exchId.isLeft()) cctx.mvcc().removeExplicitNodeLocks(exchId.nodeId(), exchId.topologyVersion()); rmtIds = Collections.emptyList(); rmtNodes = Collections.emptyList(); onDone(exchId.topologyVersion()); skipPreload = cctx.kernalContext().clientNode(); return; } } clientOnlyExchange = clientNodeEvt || cctx.kernalContext().clientNode(); if (clientOnlyExchange) { skipPreload = cctx.kernalContext().clientNode(); for (GridCacheContext cacheCtx : cctx.cacheContexts()) { if (cacheCtx.isLocal()) continue; GridDhtPartitionTopology top = cacheCtx.topology(); top.updateTopologyVersion(exchId, this, -1, stopping(cacheCtx.cacheId())); } for (GridCacheContext cacheCtx : cctx.cacheContexts()) { if (cacheCtx.isLocal()) continue; initTopology(cacheCtx); } if (oldest != null) { rmtNodes = new ConcurrentLinkedQueue<>(CU.aliveRemoteServerNodesWithCaches(cctx, exchId.topologyVersion())); rmtIds = Collections.unmodifiableSet(new HashSet<>(F.nodeIds(rmtNodes))); initFut.onDone(true); if (log.isDebugEnabled()) log.debug("Initialized future: " + this); if (cctx.localNode().equals(oldest)) { for (GridCacheContext cacheCtx : cctx.cacheContexts()) { boolean updateTop = !cacheCtx.isLocal() && exchId.topologyVersion().equals(cacheCtx.startTopologyVersion()); if (updateTop) { for (GridClientPartitionTopology top : cctx.exchange().clientTopologies()) { if (top.cacheId() == cacheCtx.cacheId()) { cacheCtx.topology().update(exchId, top.partitionMap(true), top.updateCounters()); break; } } } } onDone(exchId.topologyVersion()); } else sendPartitions(oldest); } else { rmtIds = Collections.emptyList(); rmtNodes = Collections.emptyList(); onDone(exchId.topologyVersion()); } return; } assert oldestNode.get() != null; for (GridCacheContext cacheCtx : cctx.cacheContexts()) { if (isCacheAdded(cacheCtx.cacheId(), exchId.topologyVersion())) { if (cacheCtx.discovery().cacheAffinityNodes(cacheCtx.name(), topologyVersion()).isEmpty()) U.quietAndWarn(log, "No server nodes found for cache client: " + cacheCtx.namex()); } cacheCtx.preloader().onExchangeFutureAdded(); } List<String> cachesWithoutNodes = null; if (exchId.isLeft()) { for (String name : cctx.cache().cacheNames()) { if (cctx.discovery().cacheAffinityNodes(name, topologyVersion()).isEmpty()) { if (cachesWithoutNodes == null) cachesWithoutNodes = new ArrayList<>(); cachesWithoutNodes.add(name); // Fire event even if there is no client cache started. if (cctx.gridEvents().isRecordable(EventType.EVT_CACHE_NODES_LEFT)) { Event evt = new CacheEvent( name, cctx.localNode(), cctx.localNode(), "All server nodes have left the cluster.", EventType.EVT_CACHE_NODES_LEFT, 0, false, null, null, null, null, false, null, false, null, null, null ); cctx.gridEvents().record(evt); } } } } if (cachesWithoutNodes != null) { StringBuilder sb = new StringBuilder("All server nodes for the following caches have left the cluster: "); for (int i = 0; i < cachesWithoutNodes.size(); i++) { String cache = cachesWithoutNodes.get(i); sb.append('\'').append(cache).append('\''); if (i != cachesWithoutNodes.size() - 1) sb.append(", "); } U.quietAndWarn(log, sb.toString()); U.quietAndWarn(log, "Must have server nodes for caches to operate."); } assert discoEvt != null; assert exchId.nodeId().equals(discoEvt.eventNode().id()); for (GridCacheContext cacheCtx : cctx.cacheContexts()) { GridClientPartitionTopology clientTop = cctx.exchange().clearClientTopology( cacheCtx.cacheId()); long updSeq = clientTop == null ? -1 : clientTop.lastUpdateSequence(); // Update before waiting for locks. if (!cacheCtx.isLocal()) cacheCtx.topology().updateTopologyVersion(exchId, this, updSeq, stopping(cacheCtx.cacheId())); } // Grab all alive remote nodes with order of equal or less than last joined node. rmtNodes = new ConcurrentLinkedQueue<>(CU.aliveRemoteServerNodesWithCaches(cctx, exchId.topologyVersion())); rmtIds = Collections.unmodifiableSet(new HashSet<>(F.nodeIds(rmtNodes))); for (Map.Entry<UUID, GridDhtPartitionsSingleMessage> m : singleMsgs.entrySet()) // If received any messages, process them. onReceive(m.getKey(), m.getValue()); for (Map.Entry<UUID, GridDhtPartitionsFullMessage> m : fullMsgs.entrySet()) // If received any messages, process them. onReceive(m.getKey(), m.getValue()); AffinityTopologyVersion topVer = exchId.topologyVersion(); for (GridCacheContext cacheCtx : cctx.cacheContexts()) { if (cacheCtx.isLocal()) continue; // Must initialize topology after we get discovery event. initTopology(cacheCtx); cacheCtx.preloader().onTopologyChanged(exchId.topologyVersion()); cacheCtx.preloader().updateLastExchangeFuture(this); } IgniteInternalFuture<?> partReleaseFut = cctx.partitionReleaseFuture(topVer); // Assign to class variable so it will be included into toString() method. this.partReleaseFut = partReleaseFut; if (exchId.isLeft()) cctx.mvcc().removeExplicitNodeLocks(exchId.nodeId(), exchId.topologyVersion()); if (log.isDebugEnabled()) log.debug("Before waiting for partition release future: " + this); int dumpedObjects = 0; while (true) { try { partReleaseFut.get(2 * cctx.gridConfig().getNetworkTimeout(), TimeUnit.MILLISECONDS); break; } catch (IgniteFutureTimeoutCheckedException ignored) { // Print pending transactions and locks that might have led to hang. if (dumpedObjects < DUMP_PENDING_OBJECTS_THRESHOLD) { dumpPendingObjects(); dumpedObjects++; } } } if (log.isDebugEnabled()) log.debug("After waiting for partition release future: " + this); IgniteInternalFuture<?> locksFut = cctx.mvcc().finishLocks(exchId.topologyVersion()); dumpedObjects = 0; while (true) { try { locksFut.get(2 * cctx.gridConfig().getNetworkTimeout(), TimeUnit.MILLISECONDS); break; } catch (IgniteFutureTimeoutCheckedException ignored) { if (dumpedObjects < DUMP_PENDING_OBJECTS_THRESHOLD) { U.warn(log, "Failed to wait for locks release future. " + "Dumping pending objects that might be the cause: " + cctx.localNodeId()); U.warn(log, "Locked keys:"); for (IgniteTxKey key : cctx.mvcc().lockedKeys()) U.warn(log, "Locked key: " + key); for (IgniteTxKey key : cctx.mvcc().nearLockedKeys()) U.warn(log, "Locked near key: " + key); Map<IgniteTxKey, Collection<GridCacheMvccCandidate>> locks = cctx.mvcc().unfinishedLocks(exchId.topologyVersion()); for (Map.Entry<IgniteTxKey, Collection<GridCacheMvccCandidate>> e : locks.entrySet()) U.warn(log, "Awaited locked entry [key=" + e.getKey() + ", mvcc=" + e.getValue() + ']'); dumpedObjects++; } } } boolean topChanged = discoEvt.type() != EVT_DISCOVERY_CUSTOM_EVT; for (GridCacheContext cacheCtx : cctx.cacheContexts()) { if (cacheCtx.isLocal()) continue; // Notify replication manager. GridCacheContext drCacheCtx = cacheCtx.isNear() ? cacheCtx.near().dht().context() : cacheCtx; if (drCacheCtx.isDrEnabled()) drCacheCtx.dr().beforeExchange(topVer, exchId.isLeft()); if (topChanged) cacheCtx.continuousQueries().beforeExchange(exchId.topologyVersion()); // Partition release future is done so we can flush the write-behind store. cacheCtx.store().forceFlush(); // Process queued undeploys prior to sending/spreading map. cacheCtx.preloader().unwindUndeploys(); GridDhtPartitionTopology top = cacheCtx.topology(); assert topVer.equals(top.topologyVersion()) : "Topology version is updated only in this class instances inside single ExchangeWorker thread."; top.beforeExchange(this); } for (GridClientPartitionTopology top : cctx.exchange().clientTopologies()) { top.updateTopologyVersion(exchId, this, -1, stopping(top.cacheId())); top.beforeExchange(this); } } catch (IgniteInterruptedCheckedException e) { onDone(e); throw e; } catch (Throwable e) { U.error(log, "Failed to reinitialize local partitions (preloading will be stopped): " + exchId, e); onDone(e); if (e instanceof Error) throw (Error)e; return; } if (F.isEmpty(rmtIds)) { onDone(exchId.topologyVersion()); return; } ready.set(true); initFut.onDone(true); if (log.isDebugEnabled()) log.debug("Initialized future: " + this); ClusterNode oldest = oldestNode.get(); // If this node is not oldest. if (!oldest.id().equals(cctx.localNodeId())) sendPartitions(oldest); else { boolean allReceived = allReceived(); if (allReceived && replied.compareAndSet(false, true)) { if (spreadPartitions()) onDone(exchId.topologyVersion()); } } scheduleRecheck(); } else assert false : "Skipped init future: " + this; }
void function() throws IgniteInterruptedCheckedException { if (isDone()) return; if (init.compareAndSet(false, true)) { if (isDone()) return; initTs = U.currentTimeMillis(); try { U.await(evtLatch); assert discoEvt != null : this; assert !dummy && !forcePreload : this; ClusterNode oldest = CU.oldestAliveCacheServerNode(cctx, exchId.topologyVersion()); oldestNode.set(oldest); if (!F.isEmpty(reqs)) blockGateways(); startCaches(); boolean clientNodeEvt; if (F.isEmpty(reqs)) { int type = discoEvt.type(); assert type == EVT_NODE_JOINED type == EVT_NODE_LEFT type == EVT_NODE_FAILED : discoEvt; clientNodeEvt = CU.clientNode(discoEvt.eventNode()); } else { assert discoEvt.type() == EVT_DISCOVERY_CUSTOM_EVT : discoEvt; boolean clientOnlyCacheEvt = true; for (DynamicCacheChangeRequest req : reqs) { if (req.clientStartOnly() req.close()) continue; clientOnlyCacheEvt = false; break; } clientNodeEvt = clientOnlyCacheEvt; } if (clientNodeEvt) { ClusterNode node = discoEvt.eventNode(); if (!node.isLocal() clientCacheClose()) { for (GridCacheContext cacheCtx : cctx.cacheContexts()) { if (cacheCtx.isLocal()) continue; GridDhtPartitionTopology top = cacheCtx.topology(); top.updateTopologyVersion(exchId, this, -1, stopping(cacheCtx.cacheId())); if (cacheCtx.affinity().affinityTopologyVersion() == AffinityTopologyVersion.NONE) { initTopology(cacheCtx); top.beforeExchange(this); } else cacheCtx.affinity().clientEventTopologyChange(discoEvt, exchId.topologyVersion()); } if (exchId.isLeft()) cctx.mvcc().removeExplicitNodeLocks(exchId.nodeId(), exchId.topologyVersion()); rmtIds = Collections.emptyList(); rmtNodes = Collections.emptyList(); onDone(exchId.topologyVersion()); skipPreload = cctx.kernalContext().clientNode(); return; } } clientOnlyExchange = clientNodeEvt cctx.kernalContext().clientNode(); if (clientOnlyExchange) { skipPreload = cctx.kernalContext().clientNode(); for (GridCacheContext cacheCtx : cctx.cacheContexts()) { if (cacheCtx.isLocal()) continue; GridDhtPartitionTopology top = cacheCtx.topology(); top.updateTopologyVersion(exchId, this, -1, stopping(cacheCtx.cacheId())); } for (GridCacheContext cacheCtx : cctx.cacheContexts()) { if (cacheCtx.isLocal()) continue; initTopology(cacheCtx); } if (oldest != null) { rmtNodes = new ConcurrentLinkedQueue<>(CU.aliveRemoteServerNodesWithCaches(cctx, exchId.topologyVersion())); rmtIds = Collections.unmodifiableSet(new HashSet<>(F.nodeIds(rmtNodes))); initFut.onDone(true); if (log.isDebugEnabled()) log.debug(STR + this); if (cctx.localNode().equals(oldest)) { for (GridCacheContext cacheCtx : cctx.cacheContexts()) { boolean updateTop = !cacheCtx.isLocal() && exchId.topologyVersion().equals(cacheCtx.startTopologyVersion()); if (updateTop) { for (GridClientPartitionTopology top : cctx.exchange().clientTopologies()) { if (top.cacheId() == cacheCtx.cacheId()) { cacheCtx.topology().update(exchId, top.partitionMap(true), top.updateCounters()); break; } } } } onDone(exchId.topologyVersion()); } else sendPartitions(oldest); } else { rmtIds = Collections.emptyList(); rmtNodes = Collections.emptyList(); onDone(exchId.topologyVersion()); } return; } assert oldestNode.get() != null; for (GridCacheContext cacheCtx : cctx.cacheContexts()) { if (isCacheAdded(cacheCtx.cacheId(), exchId.topologyVersion())) { if (cacheCtx.discovery().cacheAffinityNodes(cacheCtx.name(), topologyVersion()).isEmpty()) U.quietAndWarn(log, STR + cacheCtx.namex()); } cacheCtx.preloader().onExchangeFutureAdded(); } List<String> cachesWithoutNodes = null; if (exchId.isLeft()) { for (String name : cctx.cache().cacheNames()) { if (cctx.discovery().cacheAffinityNodes(name, topologyVersion()).isEmpty()) { if (cachesWithoutNodes == null) cachesWithoutNodes = new ArrayList<>(); cachesWithoutNodes.add(name); if (cctx.gridEvents().isRecordable(EventType.EVT_CACHE_NODES_LEFT)) { Event evt = new CacheEvent( name, cctx.localNode(), cctx.localNode(), STR, EventType.EVT_CACHE_NODES_LEFT, 0, false, null, null, null, null, false, null, false, null, null, null ); cctx.gridEvents().record(evt); } } } } if (cachesWithoutNodes != null) { StringBuilder sb = new StringBuilder(STR); for (int i = 0; i < cachesWithoutNodes.size(); i++) { String cache = cachesWithoutNodes.get(i); sb.append('\'').append(cache).append('\''); if (i != cachesWithoutNodes.size() - 1) sb.append(STR); } U.quietAndWarn(log, sb.toString()); U.quietAndWarn(log, STR); } assert discoEvt != null; assert exchId.nodeId().equals(discoEvt.eventNode().id()); for (GridCacheContext cacheCtx : cctx.cacheContexts()) { GridClientPartitionTopology clientTop = cctx.exchange().clearClientTopology( cacheCtx.cacheId()); long updSeq = clientTop == null ? -1 : clientTop.lastUpdateSequence(); if (!cacheCtx.isLocal()) cacheCtx.topology().updateTopologyVersion(exchId, this, updSeq, stopping(cacheCtx.cacheId())); } rmtNodes = new ConcurrentLinkedQueue<>(CU.aliveRemoteServerNodesWithCaches(cctx, exchId.topologyVersion())); rmtIds = Collections.unmodifiableSet(new HashSet<>(F.nodeIds(rmtNodes))); for (Map.Entry<UUID, GridDhtPartitionsSingleMessage> m : singleMsgs.entrySet()) onReceive(m.getKey(), m.getValue()); for (Map.Entry<UUID, GridDhtPartitionsFullMessage> m : fullMsgs.entrySet()) onReceive(m.getKey(), m.getValue()); AffinityTopologyVersion topVer = exchId.topologyVersion(); for (GridCacheContext cacheCtx : cctx.cacheContexts()) { if (cacheCtx.isLocal()) continue; initTopology(cacheCtx); cacheCtx.preloader().onTopologyChanged(exchId.topologyVersion()); cacheCtx.preloader().updateLastExchangeFuture(this); } IgniteInternalFuture<?> partReleaseFut = cctx.partitionReleaseFuture(topVer); this.partReleaseFut = partReleaseFut; if (exchId.isLeft()) cctx.mvcc().removeExplicitNodeLocks(exchId.nodeId(), exchId.topologyVersion()); if (log.isDebugEnabled()) log.debug(STR + this); int dumpedObjects = 0; while (true) { try { partReleaseFut.get(2 * cctx.gridConfig().getNetworkTimeout(), TimeUnit.MILLISECONDS); break; } catch (IgniteFutureTimeoutCheckedException ignored) { if (dumpedObjects < DUMP_PENDING_OBJECTS_THRESHOLD) { dumpPendingObjects(); dumpedObjects++; } } } if (log.isDebugEnabled()) log.debug(STR + this); IgniteInternalFuture<?> locksFut = cctx.mvcc().finishLocks(exchId.topologyVersion()); dumpedObjects = 0; while (true) { try { locksFut.get(2 * cctx.gridConfig().getNetworkTimeout(), TimeUnit.MILLISECONDS); break; } catch (IgniteFutureTimeoutCheckedException ignored) { if (dumpedObjects < DUMP_PENDING_OBJECTS_THRESHOLD) { U.warn(log, STR + STR + cctx.localNodeId()); U.warn(log, STR); for (IgniteTxKey key : cctx.mvcc().lockedKeys()) U.warn(log, STR + key); for (IgniteTxKey key : cctx.mvcc().nearLockedKeys()) U.warn(log, STR + key); Map<IgniteTxKey, Collection<GridCacheMvccCandidate>> locks = cctx.mvcc().unfinishedLocks(exchId.topologyVersion()); for (Map.Entry<IgniteTxKey, Collection<GridCacheMvccCandidate>> e : locks.entrySet()) U.warn(log, STR + e.getKey() + STR + e.getValue() + ']'); dumpedObjects++; } } } boolean topChanged = discoEvt.type() != EVT_DISCOVERY_CUSTOM_EVT; for (GridCacheContext cacheCtx : cctx.cacheContexts()) { if (cacheCtx.isLocal()) continue; GridCacheContext drCacheCtx = cacheCtx.isNear() ? cacheCtx.near().dht().context() : cacheCtx; if (drCacheCtx.isDrEnabled()) drCacheCtx.dr().beforeExchange(topVer, exchId.isLeft()); if (topChanged) cacheCtx.continuousQueries().beforeExchange(exchId.topologyVersion()); cacheCtx.store().forceFlush(); cacheCtx.preloader().unwindUndeploys(); GridDhtPartitionTopology top = cacheCtx.topology(); assert topVer.equals(top.topologyVersion()) : STR; top.beforeExchange(this); } for (GridClientPartitionTopology top : cctx.exchange().clientTopologies()) { top.updateTopologyVersion(exchId, this, -1, stopping(top.cacheId())); top.beforeExchange(this); } } catch (IgniteInterruptedCheckedException e) { onDone(e); throw e; } catch (Throwable e) { U.error(log, STR + exchId, e); onDone(e); if (e instanceof Error) throw (Error)e; return; } if (F.isEmpty(rmtIds)) { onDone(exchId.topologyVersion()); return; } ready.set(true); initFut.onDone(true); if (log.isDebugEnabled()) log.debug(STR + this); ClusterNode oldest = oldestNode.get(); if (!oldest.id().equals(cctx.localNodeId())) sendPartitions(oldest); else { boolean allReceived = allReceived(); if (allReceived && replied.compareAndSet(false, true)) { if (spreadPartitions()) onDone(exchId.topologyVersion()); } } scheduleRecheck(); } else assert false : STR + this; }
/** * Starts activity. * * @throws IgniteInterruptedCheckedException If interrupted. */
Starts activity
init
{ "repo_name": "dmagda/incubator-ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/preloader/GridDhtPartitionsExchangeFuture.java", "license": "apache-2.0", "size": 61018 }
[ "java.util.ArrayList", "java.util.Collection", "java.util.Collections", "java.util.HashSet", "java.util.List", "java.util.Map", "java.util.concurrent.ConcurrentLinkedQueue", "java.util.concurrent.TimeUnit", "org.apache.ignite.cluster.ClusterNode", "org.apache.ignite.events.CacheEvent", "org.apache.ignite.events.Event", "org.apache.ignite.events.EventType", "org.apache.ignite.internal.IgniteFutureTimeoutCheckedException", "org.apache.ignite.internal.IgniteInternalFuture", "org.apache.ignite.internal.IgniteInterruptedCheckedException", "org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion", "org.apache.ignite.internal.processors.cache.DynamicCacheChangeRequest", "org.apache.ignite.internal.processors.cache.GridCacheContext", "org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate", "org.apache.ignite.internal.processors.cache.distributed.dht.GridClientPartitionTopology", "org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtPartitionTopology", "org.apache.ignite.internal.processors.cache.transactions.IgniteTxKey", "org.apache.ignite.internal.util.typedef.F", "org.apache.ignite.internal.util.typedef.internal.CU", "org.apache.ignite.internal.util.typedef.internal.U" ]
import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.TimeUnit; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.events.CacheEvent; import org.apache.ignite.events.Event; import org.apache.ignite.events.EventType; import org.apache.ignite.internal.IgniteFutureTimeoutCheckedException; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.DynamicCacheChangeRequest; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate; import org.apache.ignite.internal.processors.cache.distributed.dht.GridClientPartitionTopology; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtPartitionTopology; import org.apache.ignite.internal.processors.cache.transactions.IgniteTxKey; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.U;
import java.util.*; import java.util.concurrent.*; import org.apache.ignite.cluster.*; import org.apache.ignite.events.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.processors.affinity.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.cache.distributed.dht.*; import org.apache.ignite.internal.processors.cache.transactions.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*;
[ "java.util", "org.apache.ignite" ]
java.util; org.apache.ignite;
1,711,633
protected T assertSerialization(T testInstance) throws IOException { return assertSerialization(testInstance, Version.CURRENT); }
T function(T testInstance) throws IOException { return assertSerialization(testInstance, Version.CURRENT); }
/** * Serialize the given instance and asserts that both are equal */
Serialize the given instance and asserts that both are equal
assertSerialization
{ "repo_name": "shreejay/elasticsearch", "path": "test/framework/src/main/java/org/elasticsearch/test/AbstractWireSerializingTestCase.java", "license": "apache-2.0", "size": 5240 }
[ "java.io.IOException", "org.elasticsearch.Version" ]
import java.io.IOException; import org.elasticsearch.Version;
import java.io.*; import org.elasticsearch.*;
[ "java.io", "org.elasticsearch" ]
java.io; org.elasticsearch;
352,151
private Location getModuleLocation(JCCompilationUnit tree) throws IOException { JavaFileObject fo = tree.sourcefile; Location loc = fileManager.getLocationForModule(StandardLocation.MODULE_SOURCE_PATH, fo); if (loc == null) { Location sourceOutput = fileManager.hasLocation(StandardLocation.SOURCE_OUTPUT) ? StandardLocation.SOURCE_OUTPUT : StandardLocation.CLASS_OUTPUT; loc = fileManager.getLocationForModule(sourceOutput, fo); } return loc; }
Location function(JCCompilationUnit tree) throws IOException { JavaFileObject fo = tree.sourcefile; Location loc = fileManager.getLocationForModule(StandardLocation.MODULE_SOURCE_PATH, fo); if (loc == null) { Location sourceOutput = fileManager.hasLocation(StandardLocation.SOURCE_OUTPUT) ? StandardLocation.SOURCE_OUTPUT : StandardLocation.CLASS_OUTPUT; loc = fileManager.getLocationForModule(sourceOutput, fo); } return loc; }
/** * Determine the location for the module on the module source path * or source output directory which contains a given CompilationUnit. * If the source output directory is unset, the class output directory * will be checked instead. * {@code null} is returned if no such module can be found. * @param tree the compilation unit tree * @return the location for the enclosing module * @throws IOException if there is a problem while searching for the module. */
Determine the location for the module on the module source path or source output directory which contains a given CompilationUnit. If the source output directory is unset, the class output directory will be checked instead. null is returned if no such module can be found
getModuleLocation
{ "repo_name": "google/error-prone-javac", "path": "src/jdk.compiler/share/classes/com/sun/tools/javac/comp/Modules.java", "license": "gpl-2.0", "size": 73357 }
[ "com.sun.tools.javac.tree.JCTree", "java.io.IOException", "javax.tools.JavaFileManager", "javax.tools.JavaFileObject", "javax.tools.StandardLocation" ]
import com.sun.tools.javac.tree.JCTree; import java.io.IOException; import javax.tools.JavaFileManager; import javax.tools.JavaFileObject; import javax.tools.StandardLocation;
import com.sun.tools.javac.tree.*; import java.io.*; import javax.tools.*;
[ "com.sun.tools", "java.io", "javax.tools" ]
com.sun.tools; java.io; javax.tools;
374,207
IsimRecords getIsimRecords();
IsimRecords getIsimRecords();
/** * Return an interface to retrieve the ISIM records for IMS, if available. * @return the interface to retrieve the ISIM records, or null if not supported */
Return an interface to retrieve the ISIM records for IMS, if available
getIsimRecords
{ "repo_name": "JSDemos/android-sdk-20", "path": "src/com/android/internal/telephony/Phone.java", "license": "apache-2.0", "size": 62674 }
[ "com.android.internal.telephony.uicc.IsimRecords" ]
import com.android.internal.telephony.uicc.IsimRecords;
import com.android.internal.telephony.uicc.*;
[ "com.android.internal" ]
com.android.internal;
1,071,574
@Override @Nullable public UUID getUUID() throws IOException { boolean hasValue = getBoolean(); if (hasValue) { long most = getLong(); long least = getLong(); return new UUID(most, least); } else { return null; } }
UUID function() throws IOException { boolean hasValue = getBoolean(); if (hasValue) { long most = getLong(); long least = getLong(); return new UUID(most, least); } else { return null; } }
/** * Get the next 16-17 bytes as a UUID. * * <p>The UUID is read as a boolean value (See {@link #getBoolean}. If * the boolean value is "true", the next 16 bytes are read as the UUID. If * the boolean value is "false", null is returned.</p> * * @throws IOException */
Get the next 16-17 bytes as a UUID. The UUID is read as a boolean value (See <code>#getBoolean</code>. If the boolean value is "true", the next 16 bytes are read as the UUID. If the boolean value is "false", null is returned
getUUID
{ "repo_name": "JCThePants/NucleusFramework", "path": "src/com/jcwhatever/nucleus/utils/file/BasicByteReader.java", "license": "mit", "size": 22981 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,459,988
public RuleConfiguredTargetBuilder setRunfilesSupport( RunfilesSupport runfilesSupport, Artifact executable) { this.runfilesSupport = runfilesSupport; this.executable = executable; return this; }
RuleConfiguredTargetBuilder function( RunfilesSupport runfilesSupport, Artifact executable) { this.runfilesSupport = runfilesSupport; this.executable = executable; return this; }
/** * Set the runfiles support for executable targets. */
Set the runfiles support for executable targets
setRunfilesSupport
{ "repo_name": "cushon/bazel", "path": "src/main/java/com/google/devtools/build/lib/analysis/RuleConfiguredTargetBuilder.java", "license": "apache-2.0", "size": 28491 }
[ "com.google.devtools.build.lib.actions.Artifact" ]
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.*;
[ "com.google.devtools" ]
com.google.devtools;
1,765,076
@Override public CompletableFuture<EventRegistration> registerListenerAsync(@Nonnull String serviceName, @Nonnull String topic, @Nonnull Object listener) { return registerListenerAsync(serviceName, topic, TrueEventFilter.INSTANCE, listener); }
CompletableFuture<EventRegistration> function(@Nonnull String serviceName, @Nonnull String topic, @Nonnull Object listener) { return registerListenerAsync(serviceName, topic, TrueEventFilter.INSTANCE, listener); }
/** * Registers the listener for events matching the service name, topic and filter. * It will register only for events published on this node and then the registration is sent to other nodes and the listener * will listen for events on all cluster members. * * @param serviceName the service name for which we are registering * @param topic the event topic for which we are registering * @param listener the event listener * @return the event registration future */
Registers the listener for events matching the service name, topic and filter. It will register only for events published on this node and then the registration is sent to other nodes and the listener will listen for events on all cluster members
registerListenerAsync
{ "repo_name": "mdogan/hazelcast", "path": "hazelcast/src/main/java/com/hazelcast/spi/impl/eventservice/impl/EventServiceImpl.java", "license": "apache-2.0", "size": 33665 }
[ "com.hazelcast.spi.impl.eventservice.EventRegistration", "java.util.concurrent.CompletableFuture", "javax.annotation.Nonnull" ]
import com.hazelcast.spi.impl.eventservice.EventRegistration; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull;
import com.hazelcast.spi.impl.eventservice.*; import java.util.concurrent.*; import javax.annotation.*;
[ "com.hazelcast.spi", "java.util", "javax.annotation" ]
com.hazelcast.spi; java.util; javax.annotation;
798,308
void updateConnection(URL cswUrl, int pushInterval) throws OwsExceptionReport;
void updateConnection(URL cswUrl, int pushInterval) throws OwsExceptionReport;
/** * update the connection in database * * @param cswUrl * url to the catalog service * @param pushInterval * update interval * @throws OwsExceptionReport */
update the connection in database
updateConnection
{ "repo_name": "52North/OpenSensorSearch", "path": "service/src/main/java/org/n52/sir/ds/IConnectToCatalogDAO.java", "license": "apache-2.0", "size": 2205 }
[ "org.n52.oss.sir.ows.OwsExceptionReport" ]
import org.n52.oss.sir.ows.OwsExceptionReport;
import org.n52.oss.sir.ows.*;
[ "org.n52.oss" ]
org.n52.oss;
1,982,043
public Observable<ServiceResponse<Page<DiskInner>>> listByResourceGroupSinglePageAsync(final String resourceGroupName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); }
Observable<ServiceResponse<Page<DiskInner>>> function(final String resourceGroupName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (resourceGroupName == null) { throw new IllegalArgumentException(STR); }
/** * Lists all the disks under a resource group. * ServiceResponse<PageImpl1<DiskInner>> * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;DiskInner&gt; object wrapped in {@link ServiceResponse} if successful. */
Lists all the disks under a resource group
listByResourceGroupSinglePageAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/compute/mgmt-v2019_03_01/src/main/java/com/microsoft/azure/management/compute/v2019_03_01/implementation/DisksInner.java", "license": "mit", "size": 88874 }
[ "com.microsoft.azure.Page", "com.microsoft.rest.ServiceResponse" ]
import com.microsoft.azure.Page; import com.microsoft.rest.ServiceResponse;
import com.microsoft.azure.*; import com.microsoft.rest.*;
[ "com.microsoft.azure", "com.microsoft.rest" ]
com.microsoft.azure; com.microsoft.rest;
2,102,126
public void displayTasks(JobID jobId, String type, String state) throws IOException { TaskReport[] reports = new TaskReport[0]; if (type.equals("map")) { reports = getMapTaskReports(jobId); } else if (type.equals("reduce")) { reports = getReduceTaskReports(jobId); } else if (type.equals("setup")) { reports = getSetupTaskReports(jobId); } else if (type.equals("cleanup")) { reports = getCleanupTaskReports(jobId); } for (TaskReport report : reports) { TIPStatus status = report.getCurrentStatus(); if ((state.equals("pending") && status ==TIPStatus.PENDING) || (state.equals("running") && status ==TIPStatus.RUNNING) || (state.equals("completed") && status == TIPStatus.COMPLETE) || (state.equals("failed") && status == TIPStatus.FAILED) || (state.equals("killed") && status == TIPStatus.KILLED)) { printTaskAttempts(report); } } }
void function(JobID jobId, String type, String state) throws IOException { TaskReport[] reports = new TaskReport[0]; if (type.equals("map")) { reports = getMapTaskReports(jobId); } else if (type.equals(STR)) { reports = getReduceTaskReports(jobId); } else if (type.equals("setup")) { reports = getSetupTaskReports(jobId); } else if (type.equals(STR)) { reports = getCleanupTaskReports(jobId); } for (TaskReport report : reports) { TIPStatus status = report.getCurrentStatus(); if ((state.equals(STR) && status ==TIPStatus.PENDING) (state.equals(STR) && status ==TIPStatus.RUNNING) (state.equals(STR) && status == TIPStatus.COMPLETE) (state.equals(STR) && status == TIPStatus.FAILED) (state.equals(STR) && status == TIPStatus.KILLED)) { printTaskAttempts(report); } } }
/** * Display the information about a job's tasks, of a particular type and * in a particular state * * @param jobId the ID of the job * @param type the type of the task (map/reduce/setup/cleanup) * @param state the state of the task * (pending/running/completed/failed/killed) */
Display the information about a job's tasks, of a particular type and in a particular state
displayTasks
{ "repo_name": "toddlipcon/hadoop", "path": "src/mapred/org/apache/hadoop/mapred/JobClient.java", "license": "apache-2.0", "size": 64290 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
464,357
protected void prettify(File file, CodeGenConfiguration configuration) { List fileList = configuration.getOutputFileNamesList(); String fileName = null; for (Iterator iter = fileList.iterator();iter.hasNext();){ fileName = (String) iter.next(); if (fileName.toLowerCase().endsWith(fileExtension)) { prettifyFile(new File(fileName)); } } }
void function(File file, CodeGenConfiguration configuration) { List fileList = configuration.getOutputFileNamesList(); String fileName = null; for (Iterator iter = fileList.iterator();iter.hasNext();){ fileName = (String) iter.next(); if (fileName.toLowerCase().endsWith(fileExtension)) { prettifyFile(new File(fileName)); } } }
/** * Recursive procedure to prettify the files * * @param file */
Recursive procedure to prettify the files
prettify
{ "repo_name": "Nipuni/wso2-axis2", "path": "modules/codegen/src/org/apache/axis2/wsdl/codegen/extension/AbstractPrettyPrinterExtension.java", "license": "apache-2.0", "size": 2224 }
[ "java.io.File", "java.util.Iterator", "java.util.List", "org.apache.axis2.wsdl.codegen.CodeGenConfiguration" ]
import java.io.File; import java.util.Iterator; import java.util.List; import org.apache.axis2.wsdl.codegen.CodeGenConfiguration;
import java.io.*; import java.util.*; import org.apache.axis2.wsdl.codegen.*;
[ "java.io", "java.util", "org.apache.axis2" ]
java.io; java.util; org.apache.axis2;
623,381
public final void setTransformationMethod(TransformationMethod method) { if (method == mTransformation) { // Avoid the setText() below if the transformation is // the same. return; } if (mTransformation != null) { if (mText instanceof Spannable) { ((Spannable) mText).removeSpan(mTransformation); } } mTransformation = method; if (method instanceof TransformationMethod2) { TransformationMethod2 method2 = (TransformationMethod2) method; mAllowTransformationLengthChange = !isTextSelectable() && !(mText instanceof Editable); method2.setLengthChangesAllowed(mAllowTransformationLengthChange); } else { mAllowTransformationLengthChange = false; } setText(mText); if (hasPasswordTransformationMethod()) { notifyViewAccessibilityStateChangedIfNeeded( AccessibilityEvent.CONTENT_CHANGE_TYPE_UNDEFINED); } }
final void function(TransformationMethod method) { if (method == mTransformation) { return; } if (mTransformation != null) { if (mText instanceof Spannable) { ((Spannable) mText).removeSpan(mTransformation); } } mTransformation = method; if (method instanceof TransformationMethod2) { TransformationMethod2 method2 = (TransformationMethod2) method; mAllowTransformationLengthChange = !isTextSelectable() && !(mText instanceof Editable); method2.setLengthChangesAllowed(mAllowTransformationLengthChange); } else { mAllowTransformationLengthChange = false; } setText(mText); if (hasPasswordTransformationMethod()) { notifyViewAccessibilityStateChangedIfNeeded( AccessibilityEvent.CONTENT_CHANGE_TYPE_UNDEFINED); } }
/** * Sets the transformation that is applied to the text that this * TextView is displaying. * * @attr ref android.R.styleable#TextView_password * @attr ref android.R.styleable#TextView_singleLine */
Sets the transformation that is applied to the text that this TextView is displaying
setTransformationMethod
{ "repo_name": "szpaddy/android-4.1.2_r2-core", "path": "java/android/widget/TextView.java", "license": "apache-2.0", "size": 343588 }
[ "android.text.Editable", "android.text.Spannable", "android.text.method.TransformationMethod", "android.text.method.TransformationMethod2", "android.view.accessibility.AccessibilityEvent" ]
import android.text.Editable; import android.text.Spannable; import android.text.method.TransformationMethod; import android.text.method.TransformationMethod2; import android.view.accessibility.AccessibilityEvent;
import android.text.*; import android.text.method.*; import android.view.accessibility.*;
[ "android.text", "android.view" ]
android.text; android.view;
706,655
public void remove( Module module, DesignElement content, String propName ) { ElementPropertyDefn defn = getPropertyDefn( propName ); if ( defn != null ) { // if the content does not belong to the same module, do nothing if (content.getRoot( ) != null && module != content.getRoot( ) ) { return; } if ( defn.isList( ) ) { List<DesignElement> values = (List<DesignElement>) getLocalProperty( module, propName ); if ( values != null ) { values.remove( content ); content.containerInfo = null; } } else { clearProperty( propName ); content.containerInfo = null; } } }
void function( Module module, DesignElement content, String propName ) { ElementPropertyDefn defn = getPropertyDefn( propName ); if ( defn != null ) { if (content.getRoot( ) != null && module != content.getRoot( ) ) { return; } if ( defn.isList( ) ) { List<DesignElement> values = (List<DesignElement>) getLocalProperty( module, propName ); if ( values != null ) { values.remove( content ); content.containerInfo = null; } } else { clearProperty( propName ); content.containerInfo = null; } } }
/** * Removes a content from the given property value. * * @param module * the module of the content * @param content * the content to remove * @param propName * the property name where the content resides */
Removes a content from the given property value
remove
{ "repo_name": "sguan-actuate/birt", "path": "model/org.eclipse.birt.report.model/src/org/eclipse/birt/report/model/core/DesignElement.java", "license": "epl-1.0", "size": 113258 }
[ "java.util.List", "org.eclipse.birt.report.model.metadata.ElementPropertyDefn" ]
import java.util.List; import org.eclipse.birt.report.model.metadata.ElementPropertyDefn;
import java.util.*; import org.eclipse.birt.report.model.metadata.*;
[ "java.util", "org.eclipse.birt" ]
java.util; org.eclipse.birt;
132,707
@Test public void testCount_withNullInput() throws Exception { Assert.assertEquals(Integer.valueOf(1), new DoubleContentAggregateFunction().count(null)); }
void function() throws Exception { Assert.assertEquals(Integer.valueOf(1), new DoubleContentAggregateFunction().count(null)); }
/** * Test case for {@link DoubleContentAggregateFunction#count(Integer)} being * provided null as input */
Test case for <code>DoubleContentAggregateFunction#count(Integer)</code> being provided null as input
testCount_withNullInput
{ "repo_name": "ottogroup/flink-operator-library", "path": "src/test/java/com/ottogroup/bi/streaming/operator/json/aggregate/functions/DoubleContentAggregateFunctionTest.java", "license": "apache-2.0", "size": 7655 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
926,206
public MqttConnectOptions getOptions() { return options; }
MqttConnectOptions function() { return options; }
/** * Fetches the default options set for the MQTT Client * * @return the options that are currently set for the client. */
Fetches the default options set for the MQTT Client
getOptions
{ "repo_name": "wso2-incubator/iot-server-agents", "path": "wso2_sense_agent/app/src/main/java/org/wso2/carbon/iot/android/sense/data/publisher/mqtt/transport/MQTTTransportHandler.java", "license": "apache-2.0", "size": 19102 }
[ "org.eclipse.paho.client.mqttv3.MqttConnectOptions" ]
import org.eclipse.paho.client.mqttv3.MqttConnectOptions;
import org.eclipse.paho.client.mqttv3.*;
[ "org.eclipse.paho" ]
org.eclipse.paho;
1,685,823
public void generateMethod(IType type, ICompilationUnit cu, Shell shell, IProgressMonitor monitor) throws JavaModelException { String className = type.getElementName(); String title = MessageFormat.format(CCMessages.getString("Generator.generating"), //$NON-NLS-1$ new Object[]{className}); monitor.beginTask(title, 100); monitor.worked(10); monitor.setTaskName(title + CCMessages.getString("Generator.parsing")); //$NON-NLS-1$ String src = createMethod(type); monitor.worked(30); monitor.setTaskName(title + CCMessages.getString("Generator.formatting")); //$NON-NLS-1$ Document document = new Document(src); TextEdit text = ToolFactory.createCodeFormatter(null).format( CodeFormatter.K_UNKNOWN, src, 0, src.length(), getIndentUsed(type, cu) + 1, null); try { text.apply(document); } catch (MalformedTreeException ex) { MessageDialog.openError(shell, CCMessages.getString("Generator.errortitle"), ex.getMessage()); //$NON-NLS-1$ } catch (BadLocationException ex) { MessageDialog.openError(shell, CCMessages.getString("Generator.errortitle"), ex.getMessage()); //$NON-NLS-1$ } monitor.worked(20); monitor.setTaskName(title + CCMessages.getString("Generator.adding")); //$NON-NLS-1$ type.createMethod(document.get() + LINE_SEPARATOR, null, false, null); monitor.worked(20); monitor.setTaskName(title + CCMessages.getString("Generator.imports")); //$NON-NLS-1$ addImports(type); monitor.worked(20); monitor.done(); }
void function(IType type, ICompilationUnit cu, Shell shell, IProgressMonitor monitor) throws JavaModelException { String className = type.getElementName(); String title = MessageFormat.format(CCMessages.getString(STR), new Object[]{className}); monitor.beginTask(title, 100); monitor.worked(10); monitor.setTaskName(title + CCMessages.getString(STR)); String src = createMethod(type); monitor.worked(30); monitor.setTaskName(title + CCMessages.getString(STR)); Document document = new Document(src); TextEdit text = ToolFactory.createCodeFormatter(null).format( CodeFormatter.K_UNKNOWN, src, 0, src.length(), getIndentUsed(type, cu) + 1, null); try { text.apply(document); } catch (MalformedTreeException ex) { MessageDialog.openError(shell, CCMessages.getString(STR), ex.getMessage()); } catch (BadLocationException ex) { MessageDialog.openError(shell, CCMessages.getString(STR), ex.getMessage()); } monitor.worked(20); monitor.setTaskName(title + CCMessages.getString(STR)); type.createMethod(document.get() + LINE_SEPARATOR, null, false, null); monitor.worked(20); monitor.setTaskName(title + CCMessages.getString(STR)); addImports(type); monitor.worked(20); monitor.done(); }
/** * Generates the method by: * <ul> * <li>call createMethod</li> * <li>format the given method</li> * <li>add it to type</li> * <li>call addImports</li> * </ul>. * @param type IType * @param cu compilation unit * @param shell Shell for messages * @param monitor progress monitor, updated during processing * @throws JavaModelException any exception in method generation */
Generates the method by: call createMethod format the given method add it to type call addImports
generateMethod
{ "repo_name": "mnuessler/commonclipse", "path": "src/main/java/net/sf/commonclipse/Generator.java", "license": "apache-2.0", "size": 13823 }
[ "java.text.MessageFormat", "org.eclipse.core.runtime.IProgressMonitor", "org.eclipse.jdt.core.ICompilationUnit", "org.eclipse.jdt.core.IType", "org.eclipse.jdt.core.JavaModelException", "org.eclipse.jdt.core.ToolFactory", "org.eclipse.jdt.core.formatter.CodeFormatter", "org.eclipse.jface.dialogs.MessageDialog", "org.eclipse.jface.text.BadLocationException", "org.eclipse.jface.text.Document", "org.eclipse.swt.widgets.Shell", "org.eclipse.text.edits.MalformedTreeException", "org.eclipse.text.edits.TextEdit" ]
import java.text.MessageFormat; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jdt.core.ICompilationUnit; import org.eclipse.jdt.core.IType; import org.eclipse.jdt.core.JavaModelException; import org.eclipse.jdt.core.ToolFactory; import org.eclipse.jdt.core.formatter.CodeFormatter; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.text.BadLocationException; import org.eclipse.jface.text.Document; import org.eclipse.swt.widgets.Shell; import org.eclipse.text.edits.MalformedTreeException; import org.eclipse.text.edits.TextEdit;
import java.text.*; import org.eclipse.core.runtime.*; import org.eclipse.jdt.core.*; import org.eclipse.jdt.core.formatter.*; import org.eclipse.jface.dialogs.*; import org.eclipse.jface.text.*; import org.eclipse.swt.widgets.*; import org.eclipse.text.edits.*;
[ "java.text", "org.eclipse.core", "org.eclipse.jdt", "org.eclipse.jface", "org.eclipse.swt", "org.eclipse.text" ]
java.text; org.eclipse.core; org.eclipse.jdt; org.eclipse.jface; org.eclipse.swt; org.eclipse.text;
906,530
public void testStoreBetweenRepositories() { final CountingWBORepository repoA = new CountingWBORepository(); // "Remote". First source. final CountingWBORepository repoB = new CountingWBORepository(); // "Local". First sink. long now = System.currentTimeMillis(); TestRecord recordA1 = new TestRecord("aacdefghiaaa", "coll", now - 30, false); TestRecord recordA2 = new TestRecord("aacdefghibbb", "coll", now - 20, false); TestRecord recordB1 = new TestRecord("aacdefghiaaa", "coll", now - 10, false); TestRecord recordB2 = new TestRecord("aacdefghibbb", "coll", now - 40, false); TestRecord recordA3 = new TestRecord("nncdefghibbb", "coll", now, false); TestRecord recordB3 = new TestRecord("nncdefghiaaa", "coll", now, false); // A1 and B1 are the same, but B's version is newer. We expect A1 to be downloaded // and B1 to be uploaded. // A2 and B2 are the same, but A's version is newer. We expect A2 to be downloaded // and B2 to not be uploaded. // Both A3 and B3 are new. We expect them to go in each direction. // Expected counts, then: // Repo A: B1 + B3 // Repo B: A1 + A2 + A3 repoB.wbos.put(recordB1.guid, recordB1); repoB.wbos.put(recordB2.guid, recordB2); repoB.wbos.put(recordB3.guid, recordB3); repoA.wbos.put(recordA1.guid, recordA1); repoA.wbos.put(recordA2.guid, recordA2); repoA.wbos.put(recordA3.guid, recordA3); final Synchronizer s = new Synchronizer(); s.repositoryA = repoA; s.repositoryB = repoB;
void function() { final CountingWBORepository repoA = new CountingWBORepository(); final CountingWBORepository repoB = new CountingWBORepository(); long now = System.currentTimeMillis(); TestRecord recordA1 = new TestRecord(STR, "coll", now - 30, false); TestRecord recordA2 = new TestRecord(STR, "coll", now - 20, false); TestRecord recordB1 = new TestRecord(STR, "coll", now - 10, false); TestRecord recordB2 = new TestRecord(STR, "coll", now - 40, false); TestRecord recordA3 = new TestRecord(STR, "coll", now, false); TestRecord recordB3 = new TestRecord(STR, "coll", now, false); repoB.wbos.put(recordB1.guid, recordB1); repoB.wbos.put(recordB2.guid, recordB2); repoB.wbos.put(recordB3.guid, recordB3); repoA.wbos.put(recordA1.guid, recordA1); repoA.wbos.put(recordA2.guid, recordA2); repoA.wbos.put(recordA3.guid, recordA3); final Synchronizer s = new Synchronizer(); s.repositoryA = repoA; s.repositoryB = repoB;
/** * Create two repositories, syncing from one to the other. Ensure * that records stored from one aren't re-uploaded. */
Create two repositories, syncing from one to the other. Ensure that records stored from one aren't re-uploaded
testStoreBetweenRepositories
{ "repo_name": "jrconlin/mc_backup", "path": "tests/background/junit3/src/sync/TestStoreTracking.java", "license": "mpl-2.0", "size": 14147 }
[ "org.mozilla.gecko.sync.synchronizer.Synchronizer" ]
import org.mozilla.gecko.sync.synchronizer.Synchronizer;
import org.mozilla.gecko.sync.synchronizer.*;
[ "org.mozilla.gecko" ]
org.mozilla.gecko;
922,823
public static void renameAppPermissionPathNode(String oldName, String newName) throws IdentityApplicationManagementException { List<ApplicationPermission> loadPermissions = loadPermissions(oldName); String newApplicationNode = ApplicationMgtUtil.getApplicationPermissionPath() + PATH_CONSTANT + oldName; Registry tenantGovReg = CarbonContext.getThreadLocalCarbonContext().getRegistry( RegistryType.USER_GOVERNANCE); //creating new application node try { for (ApplicationPermission applicationPermission : loadPermissions) { tenantGovReg.delete(newApplicationNode + PATH_CONSTANT + applicationPermission.getValue()); } tenantGovReg.delete(newApplicationNode); Collection permissionNode = tenantGovReg.newCollection(); permissionNode.setProperty("name", newName); newApplicationNode = ApplicationMgtUtil.getApplicationPermissionPath() + PATH_CONSTANT + newName; String applicationNode = newApplicationNode; tenantGovReg.put(newApplicationNode, permissionNode); addPermission(applicationNode, loadPermissions.toArray(new ApplicationPermission[loadPermissions.size()]), tenantGovReg); } catch (RegistryException e) { throw new IdentityApplicationManagementException("Error while renaming permission node " + oldName + "to " + newName, e); } }
static void function(String oldName, String newName) throws IdentityApplicationManagementException { List<ApplicationPermission> loadPermissions = loadPermissions(oldName); String newApplicationNode = ApplicationMgtUtil.getApplicationPermissionPath() + PATH_CONSTANT + oldName; Registry tenantGovReg = CarbonContext.getThreadLocalCarbonContext().getRegistry( RegistryType.USER_GOVERNANCE); try { for (ApplicationPermission applicationPermission : loadPermissions) { tenantGovReg.delete(newApplicationNode + PATH_CONSTANT + applicationPermission.getValue()); } tenantGovReg.delete(newApplicationNode); Collection permissionNode = tenantGovReg.newCollection(); permissionNode.setProperty("name", newName); newApplicationNode = ApplicationMgtUtil.getApplicationPermissionPath() + PATH_CONSTANT + newName; String applicationNode = newApplicationNode; tenantGovReg.put(newApplicationNode, permissionNode); addPermission(applicationNode, loadPermissions.toArray(new ApplicationPermission[loadPermissions.size()]), tenantGovReg); } catch (RegistryException e) { throw new IdentityApplicationManagementException(STR + oldName + STR + newName, e); } }
/** * Rename the registry path node name for a deleted Service provider role. * * @param oldName * @param newName * @throws IdentityApplicationManagementException */
Rename the registry path node name for a deleted Service provider role
renameAppPermissionPathNode
{ "repo_name": "wso2/carbon-identity-framework", "path": "components/application-mgt/org.wso2.carbon.identity.application.mgt/src/main/java/org/wso2/carbon/identity/application/mgt/ApplicationMgtUtil.java", "license": "apache-2.0", "size": 43620 }
[ "java.util.List", "org.wso2.carbon.context.CarbonContext", "org.wso2.carbon.context.RegistryType", "org.wso2.carbon.identity.application.common.IdentityApplicationManagementException", "org.wso2.carbon.identity.application.common.model.ApplicationPermission", "org.wso2.carbon.registry.api.Collection", "org.wso2.carbon.registry.api.Registry", "org.wso2.carbon.registry.api.RegistryException" ]
import java.util.List; import org.wso2.carbon.context.CarbonContext; import org.wso2.carbon.context.RegistryType; import org.wso2.carbon.identity.application.common.IdentityApplicationManagementException; import org.wso2.carbon.identity.application.common.model.ApplicationPermission; import org.wso2.carbon.registry.api.Collection; import org.wso2.carbon.registry.api.Registry; import org.wso2.carbon.registry.api.RegistryException;
import java.util.*; import org.wso2.carbon.context.*; import org.wso2.carbon.identity.application.common.*; import org.wso2.carbon.identity.application.common.model.*; import org.wso2.carbon.registry.api.*;
[ "java.util", "org.wso2.carbon" ]
java.util; org.wso2.carbon;
836,551
interface WithZones { Update withZones(List<String> zones); } }
interface WithZones { Update withZones(List<String> zones); } }
/** * Specifies zones. * @param zones A list of availability zones denoting the IP allocated for the resource needs to come from * @return the next update stage */
Specifies zones
withZones
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/network/mgmt-v2019_11_01/src/main/java/com/microsoft/azure/management/network/v2019_11_01/PublicIPPrefix.java", "license": "mit", "size": 8219 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,198,263
void cargoWasMisdirected(Cargo cargo);
void cargoWasMisdirected(Cargo cargo);
/** * A cargo has been misdirected. * * @param cargo cargo */
A cargo has been misdirected
cargoWasMisdirected
{ "repo_name": "seedstack/samples", "path": "full-apps/ddd/src/main/java/org/seedstack/samples/ddd/application/services/ApplicationEventService.java", "license": "mpl-2.0", "size": 1412 }
[ "org.seedstack.samples.ddd.domain.model.cargo.Cargo" ]
import org.seedstack.samples.ddd.domain.model.cargo.Cargo;
import org.seedstack.samples.ddd.domain.model.cargo.*;
[ "org.seedstack.samples" ]
org.seedstack.samples;
1,606,549
public static boolean isPreparedStatementClusterError(Throwable e) { while (e != null) { if (e instanceof InvalidQueryException && e.getMessage().contains(PREP_STATEMENT_CLUSTER_INSTANCE_ERROR)) return true; e = e.getCause(); } return false; }
static boolean function(Throwable e) { while (e != null) { if (e instanceof InvalidQueryException && e.getMessage().contains(PREP_STATEMENT_CLUSTER_INSTANCE_ERROR)) return true; e = e.getCause(); } return false; }
/** * Checks if Cassandra error occur because of prepared statement created in one session was used in another session. * * @param e Exception to check. * @return {@code true} in case of invalid usage of prepared statement. */
Checks if Cassandra error occur because of prepared statement created in one session was used in another session
isPreparedStatementClusterError
{ "repo_name": "nizhikov/ignite", "path": "modules/cassandra/store/src/main/java/org/apache/ignite/cache/store/cassandra/common/CassandraHelper.java", "license": "apache-2.0", "size": 7034 }
[ "com.datastax.driver.core.exceptions.InvalidQueryException" ]
import com.datastax.driver.core.exceptions.InvalidQueryException;
import com.datastax.driver.core.exceptions.*;
[ "com.datastax.driver" ]
com.datastax.driver;
1,431,185
public static File getBlockFile(File storageDir, ExtendedBlock blk) { return new File(getFinalizedDir(storageDir, blk.getBlockPoolId()), blk.getBlockName()); }
static File function(File storageDir, ExtendedBlock blk) { return new File(getFinalizedDir(storageDir, blk.getBlockPoolId()), blk.getBlockName()); }
/** * Get file correpsonding to a block * @param storageDir storage directory * @param blk block to be corrupted * @return file corresponding to the block */
Get file correpsonding to a block
getBlockFile
{ "repo_name": "moreus/hadoop", "path": "hadoop-0.23.10/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java", "license": "apache-2.0", "size": 74425 }
[ "java.io.File", "org.apache.hadoop.hdfs.protocol.ExtendedBlock" ]
import java.io.File; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import java.io.*; import org.apache.hadoop.hdfs.protocol.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
88,319
ValidateMeasureResult createExports(String key, List<MatValueSet> matValueSetList, boolean shouldCreateArtifacts) throws Exception;
ValidateMeasureResult createExports(String key, List<MatValueSet> matValueSetList, boolean shouldCreateArtifacts) throws Exception;
/** * Validate measure for export. * * @param key * - {@link String}. * @param matValueSetList * - {@link ArrayList} of {@link MatValueSet}. * @return {@link ValidateMeasureResult}. * @throws Exception * - {@link Exception}. */
Validate measure for export
createExports
{ "repo_name": "MeasureAuthoringTool/MeasureAuthoringTool_LatestSprint", "path": "mat/src/main/java/mat/server/service/MeasurePackageService.java", "license": "cc0-1.0", "size": 6800 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,343,551
private static Transferable getJVMLocalSourceTransferable() { return currentJVMLocalSourceTransferable; } protected final static int STATUS_NONE = 0; // none pending protected final static int STATUS_WAIT = 1; // drop pending protected final static int STATUS_ACCEPT = 2; protected final static int STATUS_REJECT = -1; public SunDropTargetContextPeer() { super(); } public DropTarget getDropTarget() { return currentDT; }
static Transferable function() { return currentJVMLocalSourceTransferable; } protected final static int STATUS_NONE = 0; protected final static int STATUS_WAIT = 1; protected final static int STATUS_ACCEPT = 2; protected final static int STATUS_REJECT = -1; public SunDropTargetContextPeer() { super(); } public DropTarget getDropTarget() { return currentDT; }
/** * obtain the transferable iff the operation is in the same VM */
obtain the transferable iff the operation is in the same VM
getJVMLocalSourceTransferable
{ "repo_name": "lambdalab-mirror/jdk7u-jdk", "path": "src/share/classes/sun/awt/dnd/SunDropTargetContextPeer.java", "license": "gpl-2.0", "size": 29674 }
[ "java.awt.datatransfer.Transferable", "java.awt.dnd.DropTarget" ]
import java.awt.datatransfer.Transferable; import java.awt.dnd.DropTarget;
import java.awt.datatransfer.*; import java.awt.dnd.*;
[ "java.awt" ]
java.awt;
985,695
Module getModuleByFile(@NotNull VirtualFile file);
Module getModuleByFile(@NotNull VirtualFile file);
/** * Returns the module to which the specified file belongs. This method is aware of the file->module mapping * for generated files. * * @param file the file to check. * @return the module to which the file belongs */
Returns the module to which the specified file belongs. This method is aware of the file->module mapping for generated files
getModuleByFile
{ "repo_name": "paplorinc/intellij-community", "path": "java/compiler/openapi/src/com/intellij/openapi/compiler/CompileContext.java", "license": "apache-2.0", "size": 5533 }
[ "com.intellij.openapi.module.Module", "com.intellij.openapi.vfs.VirtualFile", "org.jetbrains.annotations.NotNull" ]
import com.intellij.openapi.module.Module; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.NotNull;
import com.intellij.openapi.module.*; import com.intellij.openapi.vfs.*; import org.jetbrains.annotations.*;
[ "com.intellij.openapi", "org.jetbrains.annotations" ]
com.intellij.openapi; org.jetbrains.annotations;
2,648,968
@ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> deleteAsync(String resourceGroupName, String virtualNetworkGatewayConnectionName) { return beginDeleteAsync(resourceGroupName, virtualNetworkGatewayConnectionName) .last() .flatMap(this.client::getLroFinalResultOrError); }
@ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> function(String resourceGroupName, String virtualNetworkGatewayConnectionName) { return beginDeleteAsync(resourceGroupName, virtualNetworkGatewayConnectionName) .last() .flatMap(this.client::getLroFinalResultOrError); }
/** * Deletes the specified virtual network Gateway connection. * * @param resourceGroupName The name of the resource group. * @param virtualNetworkGatewayConnectionName The name of the virtual network gateway connection. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return A {@link Mono} that completes when a successful response is received. */
Deletes the specified virtual network Gateway connection
deleteAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/implementation/VirtualNetworkGatewayConnectionsClientImpl.java", "license": "mit", "size": 187423 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.*;
[ "com.azure.core" ]
com.azure.core;
1,590,315
@SimpleProperty( description = "Whether or not to show the loading dialog", category = PropertyCategory.BEHAVIOR) public boolean ShowLoadingDialog() { return showLoadingDialog; }
@SimpleProperty( description = STR, category = PropertyCategory.BEHAVIOR) boolean function() { return showLoadingDialog; }
/** * Getter for the loading dialog's visibility. * @return True if the loading dialog should be shown, otherwise False. */
Getter for the loading dialog's visibility
ShowLoadingDialog
{ "repo_name": "kkashi01/appinventor-sources", "path": "appinventor/components/src/com/google/appinventor/components/runtime/FusiontablesControl.java", "license": "apache-2.0", "size": 40494 }
[ "com.google.appinventor.components.annotations.PropertyCategory", "com.google.appinventor.components.annotations.SimpleProperty" ]
import com.google.appinventor.components.annotations.PropertyCategory; import com.google.appinventor.components.annotations.SimpleProperty;
import com.google.appinventor.components.annotations.*;
[ "com.google.appinventor" ]
com.google.appinventor;
1,649,142
public void restoreVersion(Inode node, User user, boolean respectFrontendRoles) throws DotSecurityException, DotStateException, DotDataException;
void function(Inode node, User user, boolean respectFrontendRoles) throws DotSecurityException, DotStateException, DotDataException;
/** * Will make the passed in node the working copy. * * @param node * @param user * @param respectFrontendRoles * @throws DotDataException * @throws DotStateException * @throws DotSecurityException */
Will make the passed in node the working copy
restoreVersion
{ "repo_name": "wisdom-garden/dotcms", "path": "src/com/dotmarketing/business/skeleton/DotCMSAPI.java", "license": "gpl-3.0", "size": 24636 }
[ "com.dotmarketing.beans.Inode", "com.dotmarketing.business.DotStateException", "com.dotmarketing.exception.DotDataException", "com.dotmarketing.exception.DotSecurityException", "com.liferay.portal.model.User" ]
import com.dotmarketing.beans.Inode; import com.dotmarketing.business.DotStateException; import com.dotmarketing.exception.DotDataException; import com.dotmarketing.exception.DotSecurityException; import com.liferay.portal.model.User;
import com.dotmarketing.beans.*; import com.dotmarketing.business.*; import com.dotmarketing.exception.*; import com.liferay.portal.model.*;
[ "com.dotmarketing.beans", "com.dotmarketing.business", "com.dotmarketing.exception", "com.liferay.portal" ]
com.dotmarketing.beans; com.dotmarketing.business; com.dotmarketing.exception; com.liferay.portal;
1,222,926
void verifyUpdateProcessGroup(ProcessGroupDTO processGroupDTO);
void verifyUpdateProcessGroup(ProcessGroupDTO processGroupDTO);
/** * Verifies that the Process Group identified by the given DTO can be updated in the manner appropriate according * to the DTO * * @param processGroupDTO the DTO that indicates the updates to occur */
Verifies that the Process Group identified by the given DTO can be updated in the manner appropriate according to the DTO
verifyUpdateProcessGroup
{ "repo_name": "mattyb149/nifi", "path": "nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/NiFiServiceFacade.java", "license": "apache-2.0", "size": 87066 }
[ "org.apache.nifi.web.api.dto.ProcessGroupDTO" ]
import org.apache.nifi.web.api.dto.ProcessGroupDTO;
import org.apache.nifi.web.api.dto.*;
[ "org.apache.nifi" ]
org.apache.nifi;
2,231,500
public CredentialReference credential() { return this.credential; }
CredentialReference function() { return this.credential; }
/** * Get the credential property: The credential reference containing authentication information. * * @return the credential value. */
Get the credential property: The credential reference containing authentication information
credential
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/datafactory/azure-resourcemanager-datafactory/src/main/java/com/azure/resourcemanager/datafactory/fluent/models/CosmosDbLinkedServiceTypeProperties.java", "license": "mit", "size": 15299 }
[ "com.azure.resourcemanager.datafactory.models.CredentialReference" ]
import com.azure.resourcemanager.datafactory.models.CredentialReference;
import com.azure.resourcemanager.datafactory.models.*;
[ "com.azure.resourcemanager" ]
com.azure.resourcemanager;
946,420
@Override protected Boolean compute(Date left, Date right) { if (left == null || right == null) { return null; } return left.after(right); }
Boolean function(Date left, Date right) { if (left == null right == null) { return null; } return left.after(right); }
/** * Compares two dates and returns true if the the second date is after the first */
Compares two dates and returns true if the the second date is after the first
compute
{ "repo_name": "brtonnies/rapidminer-studio", "path": "src/main/java/com/rapidminer/tools/expression/internal/function/date/DateAfter.java", "license": "agpl-3.0", "size": 1362 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
669,762
public void setDoStayInvisible(boolean doStayInvisible, Tab tab) { mDoStayInvisible = doStayInvisible; if (mTabObserver == null) mTabObserver = createTabObserver(); if (doStayInvisible) { tab.addObserver(mTabObserver); } else { tab.removeObserver(mTabObserver); } }
void function(boolean doStayInvisible, Tab tab) { mDoStayInvisible = doStayInvisible; if (mTabObserver == null) mTabObserver = createTabObserver(); if (doStayInvisible) { tab.addObserver(mTabObserver); } else { tab.removeObserver(mTabObserver); } }
/** * Call with {@code true} when a higher priority bottom element is visible to keep the infobars * from ever becoming visible. Call with {@code false} to restore normal visibility behavior. * @param doStayInvisible Whether the infobars should stay invisible even when they would * normally become visible. * @param tab The current Tab. */
Call with true when a higher priority bottom element is visible to keep the infobars from ever becoming visible. Call with false to restore normal visibility behavior
setDoStayInvisible
{ "repo_name": "mohamed--abdel-maksoud/chromium.src", "path": "chrome/android/java/src/org/chromium/chrome/browser/infobar/InfoBarContainer.java", "license": "bsd-3-clause", "size": 21246 }
[ "org.chromium.chrome.browser.Tab" ]
import org.chromium.chrome.browser.Tab;
import org.chromium.chrome.browser.*;
[ "org.chromium.chrome" ]
org.chromium.chrome;
465,259
public static <E> Set<E> immutableSet() { // TODO(anorth): optimise to a truly immutable set. return Collections.unmodifiableSet(CollectionUtils.<E>newHashSet()); }
static <E> Set<E> function() { return Collections.unmodifiableSet(CollectionUtils.<E>newHashSet()); }
/** * Creates an empty immutable set. * * @return a newly created set containing those elements. */
Creates an empty immutable set
immutableSet
{ "repo_name": "vega113/incubator-wave", "path": "wave/src/main/java/org/waveprotocol/wave/model/util/CollectionUtils.java", "license": "apache-2.0", "size": 37875 }
[ "java.util.Collections", "java.util.Set" ]
import java.util.Collections; import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
2,344,268
int updateByExample(@Param("record") CZNotifyLevel record, @Param("example") CZNotifyLevelExample example);
int updateByExample(@Param(STR) CZNotifyLevel record, @Param(STR) CZNotifyLevelExample example);
/** * This method was generated by MyBatis Generator. * This method corresponds to the database table nfjd502.dbo.CZNotifyLevel * * @mbggenerated Thu Mar 02 11:23:21 CST 2017 */
This method was generated by MyBatis Generator. This method corresponds to the database table nfjd502.dbo.CZNotifyLevel
updateByExample
{ "repo_name": "xtwxy/cassandra-tests", "path": "mstar-server-dao/src/main/java/com/wincom/mstar/dao/mapper/CZNotifyLevelMapper.java", "license": "apache-2.0", "size": 2198 }
[ "com.wincom.mstar.domain.CZNotifyLevel", "com.wincom.mstar.domain.CZNotifyLevelExample", "org.apache.ibatis.annotations.Param" ]
import com.wincom.mstar.domain.CZNotifyLevel; import com.wincom.mstar.domain.CZNotifyLevelExample; import org.apache.ibatis.annotations.Param;
import com.wincom.mstar.domain.*; import org.apache.ibatis.annotations.*;
[ "com.wincom.mstar", "org.apache.ibatis" ]
com.wincom.mstar; org.apache.ibatis;
956,623
private void processSetCssNameMapping(NodeTraversal t, Node n, Node parent) { Node left = n.getFirstChild(); Node arg = left.getNext(); if (verifyArgument(t, left, arg, Token.OBJECTLIT)) { // Translate OBJECTLIT into SubstitutionMap. All keys and // values must be strings, or an error will be thrown. final Map<String, String> cssNames = Maps.newHashMap(); JSError error = null; for (Node key = arg.getFirstChild(); key != null; key = key.getNext()) { Node value = key.getFirstChild(); if (key.getType() != Token.STRING || value == null || value.getType() != Token.STRING) { error = t.makeError(n, NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR); } if (error != null) { compiler.report(error); break; } cssNames.put(key.getString(), value.getString()); }
void function(NodeTraversal t, Node n, Node parent) { Node left = n.getFirstChild(); Node arg = left.getNext(); if (verifyArgument(t, left, arg, Token.OBJECTLIT)) { final Map<String, String> cssNames = Maps.newHashMap(); JSError error = null; for (Node key = arg.getFirstChild(); key != null; key = key.getNext()) { Node value = key.getFirstChild(); if (key.getType() != Token.STRING value == null value.getType() != Token.STRING) { error = t.makeError(n, NON_STRING_PASSED_TO_SET_CSS_NAME_MAPPING_ERROR); } if (error != null) { compiler.report(error); break; } cssNames.put(key.getString(), value.getString()); }
/** * Processes a call to goog.setCssNameMapping(). Either the argument to * goog.setCssNameMapping() is valid, in which case it will be used to create * a CssRenamingMap for the compiler of this CompilerPass, or it is invalid * and a JSCompiler error will be reported. * @see #visit(NodeTraversal, Node, Node) */
Processes a call to goog.setCssNameMapping(). Either the argument to goog.setCssNameMapping() is valid, in which case it will be used to create a CssRenamingMap for the compiler of this CompilerPass, or it is invalid and a JSCompiler error will be reported
processSetCssNameMapping
{ "repo_name": "olegshnitko/closure-compiler", "path": "src/com/google/javascript/jscomp/ProcessClosurePrimitives.java", "license": "apache-2.0", "size": 32465 }
[ "com.google.common.collect.Maps", "com.google.javascript.rhino.Node", "com.google.javascript.rhino.Token", "java.util.Map" ]
import com.google.common.collect.Maps; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.Map;
import com.google.common.collect.*; import com.google.javascript.rhino.*; import java.util.*;
[ "com.google.common", "com.google.javascript", "java.util" ]
com.google.common; com.google.javascript; java.util;
825,343
private void initBroadcastManager() { bManager = LocalBroadcastManager.getInstance(service); IntentFilter intentFilter = new IntentFilter(); intentFilter.addAction(DIALOG_RESPONSE); bManager.registerReceiver(bReceiver, intentFilter); }
void function() { bManager = LocalBroadcastManager.getInstance(service); IntentFilter intentFilter = new IntentFilter(); intentFilter.addAction(DIALOG_RESPONSE); bManager.registerReceiver(bReceiver, intentFilter); }
/** * Initializes the broadcast manager endpoint for Dialogs */
Initializes the broadcast manager endpoint for Dialogs
initBroadcastManager
{ "repo_name": "BirdBrainTechnologies/BirdBlox-Android-Support", "path": "app/src/main/java/com/birdbraintechnologies/birdblox/httpservice/RequestHandlers/HostDeviceHandler.java", "license": "mit", "size": 22483 }
[ "android.content.IntentFilter", "android.support.v4.content.LocalBroadcastManager" ]
import android.content.IntentFilter; import android.support.v4.content.LocalBroadcastManager;
import android.content.*; import android.support.v4.content.*;
[ "android.content", "android.support" ]
android.content; android.support;
516,016
public static Sha256Hash hashFileContents(File f) throws IOException { FileInputStream in = new FileInputStream(f); try { return create(ByteStreams.toByteArray(in)); } finally { in.close(); } }
static Sha256Hash function(File f) throws IOException { FileInputStream in = new FileInputStream(f); try { return create(ByteStreams.toByteArray(in)); } finally { in.close(); } }
/** * Returns a hash of the given files contents. Reads the file fully into memory before hashing so only use with * small files. * @throws IOException */
Returns a hash of the given files contents. Reads the file fully into memory before hashing so only use with small files
hashFileContents
{ "repo_name": "bitbandi/spreadcoinj", "path": "core/src/main/java/org/spreadcoinj/core/Sha256Hash.java", "license": "apache-2.0", "size": 4413 }
[ "com.google.common.io.ByteStreams", "java.io.File", "java.io.FileInputStream", "java.io.IOException" ]
import com.google.common.io.ByteStreams; import java.io.File; import java.io.FileInputStream; import java.io.IOException;
import com.google.common.io.*; import java.io.*;
[ "com.google.common", "java.io" ]
com.google.common; java.io;
401,195
private static void drawCircle(Graphics g, int x, int y, int r) { g.drawOval(x-r, y-r, 2*r, 2*r); }
static void function(Graphics g, int x, int y, int r) { g.drawOval(x-r, y-r, 2*r, 2*r); }
/** * Calls the drawOval method of java.awt.Graphics * with a square bounding box centered at specified * location with width/height of 2r. * * @param g The Graphics object. * @param x The x-coordinate of the center of the * circle. * @param y The y-coordinate of the center of the * circle. * @param r The radius of the circle. */
Calls the drawOval method of java.awt.Graphics with a square bounding box centered at specified location with width/height of 2r
drawCircle
{ "repo_name": "K-Phoen/Pong", "path": "src/game/PongBase.java", "license": "gpl-3.0", "size": 18132 }
[ "java.awt.Graphics" ]
import java.awt.Graphics;
import java.awt.*;
[ "java.awt" ]
java.awt;
578,421
private void handleChannelWriteFailure(Throwable cause, boolean propagateErrorIfRequired) { long writeFailureProcessingStartTime = System.currentTimeMillis(); try { nettyMetrics.channelWriteError.inc(); Exception exception; if (!(cause instanceof Exception)) { logger.warn("Encountered a throwable on channel write failure", cause); exception = new IllegalStateException("Encountered a Throwable - " + cause.getMessage()); if (propagateErrorIfRequired) { // we can't ignore throwables - so we let Netty deal with it. ctx.fireExceptionCaught(cause); nettyMetrics.throwableCount.inc(); } } else if (cause instanceof ClosedChannelException) { // wrap the exception in something we recognize as a client termination exception = Utils.convertToClientTerminationException(cause); } else { exception = (Exception) cause; } onResponseComplete(exception); cleanupChunks(exception); } finally { nettyMetrics.channelWriteFailureProcessingTimeInMs.update( System.currentTimeMillis() - writeFailureProcessingStartTime); } }
void function(Throwable cause, boolean propagateErrorIfRequired) { long writeFailureProcessingStartTime = System.currentTimeMillis(); try { nettyMetrics.channelWriteError.inc(); Exception exception; if (!(cause instanceof Exception)) { logger.warn(STR, cause); exception = new IllegalStateException(STR + cause.getMessage()); if (propagateErrorIfRequired) { ctx.fireExceptionCaught(cause); nettyMetrics.throwableCount.inc(); } } else if (cause instanceof ClosedChannelException) { exception = Utils.convertToClientTerminationException(cause); } else { exception = (Exception) cause; } onResponseComplete(exception); cleanupChunks(exception); } finally { nettyMetrics.channelWriteFailureProcessingTimeInMs.update( System.currentTimeMillis() - writeFailureProcessingStartTime); } }
/** * Handles post-mortem of writes that have failed. * @param cause the cause of the failure. * @param propagateErrorIfRequired if {@code true} and {@code cause} is not an instance of {@link Exception}, the * error is propagated through the netty pipeline. */
Handles post-mortem of writes that have failed
handleChannelWriteFailure
{ "repo_name": "vgkholla/ambry", "path": "ambry-rest/src/main/java/com.github.ambry.rest/NettyResponseChannel.java", "license": "apache-2.0", "size": 42302 }
[ "com.github.ambry.utils.Utils", "java.nio.channels.ClosedChannelException" ]
import com.github.ambry.utils.Utils; import java.nio.channels.ClosedChannelException;
import com.github.ambry.utils.*; import java.nio.channels.*;
[ "com.github.ambry", "java.nio" ]
com.github.ambry; java.nio;
2,912,407
@Deployment public void testParentActivationOnNonJoiningEnd() throws Exception { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("parentActivationOnNonJoiningEnd"); List<Execution> executionsBefore = runtimeService.createExecutionQuery().list(); assertEquals(3, executionsBefore.size()); // start first round of tasks List<org.flowable.task.api.Task> firstTasks = taskService.createTaskQuery().processInstanceId(processInstance.getId()).list(); assertEquals(2, firstTasks.size()); for (org.flowable.task.api.Task t : firstTasks) { taskService.complete(t.getId()); } // start first round of tasks List<org.flowable.task.api.Task> secondTasks = taskService.createTaskQuery().processInstanceId(processInstance.getId()).list(); assertEquals(2, secondTasks.size()); // complete one task org.flowable.task.api.Task task = secondTasks.get(0); taskService.complete(task.getId()); // should have merged last child execution into parent List<Execution> executionsAfter = runtimeService.createExecutionQuery().list(); assertEquals(1, executionsAfter.size()); Execution execution = executionsAfter.get(0); // and should have one active activity List<String> activeActivityIds = runtimeService.getActiveActivityIds(execution.getId()); assertEquals(1, activeActivityIds.size()); // Completing last task should finish the process instance org.flowable.task.api.Task lastTask = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); taskService.complete(lastTask.getId()); assertEquals(0l, runtimeService.createProcessInstanceQuery().active().count()); }
void function() throws Exception { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey(STR); List<Execution> executionsBefore = runtimeService.createExecutionQuery().list(); assertEquals(3, executionsBefore.size()); List<org.flowable.task.api.Task> firstTasks = taskService.createTaskQuery().processInstanceId(processInstance.getId()).list(); assertEquals(2, firstTasks.size()); for (org.flowable.task.api.Task t : firstTasks) { taskService.complete(t.getId()); } List<org.flowable.task.api.Task> secondTasks = taskService.createTaskQuery().processInstanceId(processInstance.getId()).list(); assertEquals(2, secondTasks.size()); org.flowable.task.api.Task task = secondTasks.get(0); taskService.complete(task.getId()); List<Execution> executionsAfter = runtimeService.createExecutionQuery().list(); assertEquals(1, executionsAfter.size()); Execution execution = executionsAfter.get(0); List<String> activeActivityIds = runtimeService.getActiveActivityIds(execution.getId()); assertEquals(1, activeActivityIds.size()); org.flowable.task.api.Task lastTask = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); taskService.complete(lastTask.getId()); assertEquals(0l, runtimeService.createProcessInstanceQuery().active().count()); }
/** * Test for ACT-1216: When merging a concurrent execution the parent is not activated correctly */
Test for ACT-1216: When merging a concurrent execution the parent is not activated correctly
testParentActivationOnNonJoiningEnd
{ "repo_name": "lsmall/flowable-engine", "path": "modules/flowable5-test/src/test/java/org/activiti/engine/test/bpmn/gateway/InclusiveGatewayTest.java", "license": "apache-2.0", "size": 25692 }
[ "java.util.List", "org.flowable.engine.runtime.Execution", "org.flowable.engine.runtime.ProcessInstance" ]
import java.util.List; import org.flowable.engine.runtime.Execution; import org.flowable.engine.runtime.ProcessInstance;
import java.util.*; import org.flowable.engine.runtime.*;
[ "java.util", "org.flowable.engine" ]
java.util; org.flowable.engine;
2,326,460
public AccountSet filterFromTransaction(Object value) { ObjectSet neighbors = new ObjectSet(); if (value instanceof Collection) { neighbors.addAll((Collection<?>) value); } else { neighbors.add(value); } AccountSet answer = new AccountSet(); for (Account obj : this) { if ( ! Collections.disjoint(neighbors, obj.getFromTransaction())) { answer.add(obj); } } return answer; }
AccountSet function(Object value) { ObjectSet neighbors = new ObjectSet(); if (value instanceof Collection) { neighbors.addAll((Collection<?>) value); } else { neighbors.add(value); } AccountSet answer = new AccountSet(); for (Account obj : this) { if ( ! Collections.disjoint(neighbors, obj.getFromTransaction())) { answer.add(obj); } } return answer; }
/** * Loop through the current set of Account objects and collect all contained objects with reference FromTransaction pointing to the object passed as parameter. * * @param value The object required as FromTransaction neighbor of the collected results. * * @return Set of Transaction objects referring to value via FromTransaction */
Loop through the current set of Account objects and collect all contained objects with reference FromTransaction pointing to the object passed as parameter
filterFromTransaction
{ "repo_name": "SWE443-TeamRed/open-bank", "path": "open-bank/src/main/java/org/sdmlib/openbank/util/AccountSet.java", "license": "mit", "size": 30045 }
[ "de.uniks.networkparser.list.ObjectSet", "java.util.Collection", "java.util.Collections", "org.sdmlib.openbank.Account" ]
import de.uniks.networkparser.list.ObjectSet; import java.util.Collection; import java.util.Collections; import org.sdmlib.openbank.Account;
import de.uniks.networkparser.list.*; import java.util.*; import org.sdmlib.openbank.*;
[ "de.uniks.networkparser", "java.util", "org.sdmlib.openbank" ]
de.uniks.networkparser; java.util; org.sdmlib.openbank;
1,635,415
@ServiceMethod(returns = ReturnType.SINGLE) Mono<ExpressRouteCircuitPeeringInner> getAsync(String resourceGroupName, String circuitName, String peeringName);
@ServiceMethod(returns = ReturnType.SINGLE) Mono<ExpressRouteCircuitPeeringInner> getAsync(String resourceGroupName, String circuitName, String peeringName);
/** * Gets the specified peering for the express route circuit. * * @param resourceGroupName The name of the resource group. * @param circuitName The name of the express route circuit. * @param peeringName The name of the peering. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the specified peering for the express route circuit on successful completion of {@link Mono}. */
Gets the specified peering for the express route circuit
getAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/fluent/ExpressRouteCircuitPeeringsClient.java", "license": "mit", "size": 20443 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.resourcemanager.network.fluent.models.ExpressRouteCircuitPeeringInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.network.fluent.models.ExpressRouteCircuitPeeringInner;
import com.azure.core.annotation.*; import com.azure.resourcemanager.network.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
1,310,427
public void emptyLeavingReplyVIDNeighbours() { this.leavingReplyVIDNeighbours = new LinkedList<CanOverlayContact[]>(); }
void function() { this.leavingReplyVIDNeighbours = new LinkedList<CanOverlayContact[]>(); }
/** * sets the list to null */
sets the list to null
emptyLeavingReplyVIDNeighbours
{ "repo_name": "flyroom/PeerfactSimKOM_Clone", "path": "src/org/peerfact/impl/overlay/dht/can/components/CanNode.java", "license": "gpl-2.0", "size": 33180 }
[ "java.util.LinkedList" ]
import java.util.LinkedList;
import java.util.*;
[ "java.util" ]
java.util;
1,495,189
@Before public void initializeDefaultSession() throws UniqueException, UniqueListException { Account admin = new Account("admin", "", "55555555D", "London", "admin", "[email protected]", "666666666", "666666666", "demo", roles.ROLE_ADMIN); accountService.save(admin); defaultSession = getDefaultSession("[email protected]"); // Create User user = new Account("user", "1", "55555555C", "London", "user", "[email protected]", "666666666", "666666666", "demo", roles.ROLE_USER); accountService.save(user); accountType = new AccountType("Adult", false, "Fee for adults", 0); accountTypeService.save(accountType); methodPayment = new MethodPayment("cash", false, "cash"); methodPaymentService.save(methodPayment); user.setAccountType(accountType); user.setMethodPayment(methodPayment); user.setInstallments(1); accountService.update(user, false, true); // Create Payment feeMember = new FeeMember("pay of 2016", 2016, Double.valueOf(20), DateUtils.format("2016-04-05", DateUtils.FORMAT_DATE), DateUtils.format("2016-07-05", DateUtils.FORMAT_DATE), "pay of 2016"); feeMemberService.save(feeMember); }
void function() throws UniqueException, UniqueListException { Account admin = new Account("admin", STR55555555DSTRLondonSTRadminSTRadmin@udc.esSTR666666666STR666666666STRdemoSTRadmin@udc.esSTRuserSTR1STR55555555CSTRLondonSTRuserSTRuser@udc.esSTR666666666STR666666666STRdemoSTRAdultSTRFee for adultsSTRcashSTRcashSTRpay of 2016STR2016-04-05STR2016-07-05STRpay of 2016"); feeMemberService.save(feeMember); }
/** * Initialize default session. * * @throws UniqueException the unique exception */
Initialize default session
initializeDefaultSession
{ "repo_name": "pablogrela/members_cuacfm", "path": "src/test/java/org/cuacfm/members/test/web/paymember/PayMemberListControllerTest.java", "license": "apache-2.0", "size": 14076 }
[ "org.cuacfm.members.model.account.Account", "org.cuacfm.members.model.exceptions.UniqueException", "org.cuacfm.members.model.exceptions.UniqueListException" ]
import org.cuacfm.members.model.account.Account; import org.cuacfm.members.model.exceptions.UniqueException; import org.cuacfm.members.model.exceptions.UniqueListException;
import org.cuacfm.members.model.account.*; import org.cuacfm.members.model.exceptions.*;
[ "org.cuacfm.members" ]
org.cuacfm.members;
604,522
public void assertQueryIsForbidden(final String sql, final AuthenticationResult authenticationResult) { assertQueryIsForbidden(PLANNER_CONFIG_DEFAULT, sql, authenticationResult); }
void function(final String sql, final AuthenticationResult authenticationResult) { assertQueryIsForbidden(PLANNER_CONFIG_DEFAULT, sql, authenticationResult); }
/** * Provided for tests that wish to check multiple queries instead of relying on ExpectedException. */
Provided for tests that wish to check multiple queries instead of relying on ExpectedException
assertQueryIsForbidden
{ "repo_name": "jon-wei/druid", "path": "sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java", "license": "apache-2.0", "size": 29702 }
[ "org.apache.druid.server.security.AuthenticationResult" ]
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.*;
[ "org.apache.druid" ]
org.apache.druid;
514,385
public void addDeployment(List<DeploymentsData> deployments) { try { mDb.beginTransaction(); for (DeploymentsData deployment : deployments) { createDeployment(deployment); } mDb.setTransactionSuccessful(); } finally { mDb.endTransaction(); } }
void function(List<DeploymentsData> deployments) { try { mDb.beginTransaction(); for (DeploymentsData deployment : deployments) { createDeployment(deployment); } mDb.setTransactionSuccessful(); } finally { mDb.endTransaction(); } }
/** * Add new deployments to table * * @param deployments */
Add new deployments to table
addDeployment
{ "repo_name": "mckayb24/Ushahidi_Android", "path": "Core/src/com/ushahidi/android/app/data/Database.java", "license": "lgpl-3.0", "size": 42834 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,113,313
private RawFileStorePrx getRawFileService(SecurityContext ctx) throws DSAccessException, DSOutOfServiceException { try { Connector c = getConnector(ctx); if (c == null) throw new DSOutOfServiceException( "Cannot access the connector."); RawFileStorePrx prx = c.getRawFileService(); if (prx == null) throw new DSOutOfServiceException( "Cannot access the RawFileStore service."); return prx; } catch (Throwable e) { handleException(e, "Cannot access the RawFileStore service."); } return null; }
RawFileStorePrx function(SecurityContext ctx) throws DSAccessException, DSOutOfServiceException { try { Connector c = getConnector(ctx); if (c == null) throw new DSOutOfServiceException( STR); RawFileStorePrx prx = c.getRawFileService(); if (prx == null) throw new DSOutOfServiceException( STR); return prx; } catch (Throwable e) { handleException(e, STR); } return null; }
/** * Returns the {@link RawFileStorePrx} service. * * @param ctx The security context. * @return See above. * @throws DSOutOfServiceException If the connection is broken, or logged in * @throws DSAccessException If an error occurred while trying to * retrieve data from OMERO service. */
Returns the <code>RawFileStorePrx</code> service
getRawFileService
{ "repo_name": "rleigh-dundee/openmicroscopy", "path": "components/insight/SRC/org/openmicroscopy/shoola/env/data/OMEROGateway.java", "license": "gpl-2.0", "size": 268360 }
[ "org.openmicroscopy.shoola.env.data.util.SecurityContext" ]
import org.openmicroscopy.shoola.env.data.util.SecurityContext;
import org.openmicroscopy.shoola.env.data.util.*;
[ "org.openmicroscopy.shoola" ]
org.openmicroscopy.shoola;
2,068,691
public void compact(TableName tableName, boolean major) throws IOException { getMiniHBaseCluster().compact(tableName, major); }
void function(TableName tableName, boolean major) throws IOException { getMiniHBaseCluster().compact(tableName, major); }
/** * Compact all of a table's reagion in the mini hbase cluster * @throws IOException */
Compact all of a table's reagion in the mini hbase cluster
compact
{ "repo_name": "ndimiduk/hbase", "path": "hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtil.java", "license": "apache-2.0", "size": 151074 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
756,827
@Override protected Logger getLog() { return log; }
Logger function() { return log; }
/** * Returns the log. */
Returns the log
getLog
{ "repo_name": "bertrama/resin", "path": "modules/resin/src/com/caucho/server/webapp/WebAppSingleDeployGenerator.java", "license": "gpl-2.0", "size": 9283 }
[ "java.util.logging.Logger" ]
import java.util.logging.Logger;
import java.util.logging.*;
[ "java.util" ]
java.util;
1,612,791
default Gradient createGradient(GradientType type, GradientScope scope) { return createGradient(type, GradientOrientation.getDefaultByType(type), scope); }
default Gradient createGradient(GradientType type, GradientScope scope) { return createGradient(type, GradientOrientation.getDefaultByType(type), scope); }
/** * Creates a gradient by a type and a scope. * * @param type gradient type * @param scope scope of gradient * @return new gradient based on color scheme */
Creates a gradient by a type and a scope
createGradient
{ "repo_name": "pepstock-org/Charba", "path": "src/org/pepstock/charba/client/impl/plugins/enums/IsEnumeratedScheme.java", "license": "apache-2.0", "size": 4367 }
[ "org.pepstock.charba.client.colors.Gradient", "org.pepstock.charba.client.colors.GradientOrientation", "org.pepstock.charba.client.colors.GradientScope", "org.pepstock.charba.client.colors.GradientType" ]
import org.pepstock.charba.client.colors.Gradient; import org.pepstock.charba.client.colors.GradientOrientation; import org.pepstock.charba.client.colors.GradientScope; import org.pepstock.charba.client.colors.GradientType;
import org.pepstock.charba.client.colors.*;
[ "org.pepstock.charba" ]
org.pepstock.charba;
2,568,604
protected void setMenuItemsVisibility(boolean visible) { for (int i = 0; i < mMenu.size(); i++) { MenuItem item = mMenu.getItem(i); if (item != mSearchItem) { item.setVisible(visible); } } }
void function(boolean visible) { for (int i = 0; i < mMenu.size(); i++) { MenuItem item = mMenu.getItem(i); if (item != mSearchItem) { item.setVisible(visible); } } }
/** * Hides or shows all menu icons. * @param visible True to show; false to hide. */
Hides or shows all menu icons
setMenuItemsVisibility
{ "repo_name": "cohenadair/anglers-log", "path": "android/app/src/main/java/com/cohenadair/anglerslog/fragments/MasterFragment.java", "license": "gpl-3.0", "size": 5577 }
[ "android.view.MenuItem" ]
import android.view.MenuItem;
import android.view.*;
[ "android.view" ]
android.view;
2,762,180
DHCPService service = get(DHCPService.class); ObjectNode node = mapper().createObjectNode() .put("leaseTime", service.getLeaseTime()) .put("renewalTime", service.getRenewalTime()) .put("rebindingTime", service.getRebindingTime()); return ok(node.toString()).build(); }
DHCPService service = get(DHCPService.class); ObjectNode node = mapper().createObjectNode() .put(STR, service.getLeaseTime()) .put(STR, service.getRenewalTime()) .put(STR, service.getRebindingTime()); return ok(node.toString()).build(); }
/** * Get DHCP server configuration data. * Shows lease, renewal and rebinding times in seconds. * * @return 200 OK */
Get DHCP server configuration data. Shows lease, renewal and rebinding times in seconds
getConfigs
{ "repo_name": "chinghanyu/onos", "path": "apps/dhcp/src/main/java/org/onosproject/dhcp/rest/DHCPWebResource.java", "license": "apache-2.0", "size": 5434 }
[ "com.fasterxml.jackson.databind.node.ObjectNode", "org.onosproject.dhcp.DHCPService" ]
import com.fasterxml.jackson.databind.node.ObjectNode; import org.onosproject.dhcp.DHCPService;
import com.fasterxml.jackson.databind.node.*; import org.onosproject.dhcp.*;
[ "com.fasterxml.jackson", "org.onosproject.dhcp" ]
com.fasterxml.jackson; org.onosproject.dhcp;
1,203,787
public void start() throws LifecycleException { if (!initialized) init(); // Validate and update our current state if (started) { if (log.isInfoEnabled()) log.info(sm.getString("coyoteConnector.alreadyStarted")); return; } lifecycle.fireLifecycleEvent(START_EVENT, null); started = true; // We can't register earlier - the JMX registration of this happens // in Server.start callback if (org.apache.tomcat.util.Constants.ENABLE_MODELER) { if (this.oname != null) { // We are registred - register the adapter as well. try { Registry.getRegistry(null, null).registerComponent(protocolHandler, createObjectName(this.domain, "ProtocolHandler"), null); } catch (Exception ex) { log.error(sm.getString("coyoteConnector.protocolRegistrationFailed"), ex); } } else { if (log.isInfoEnabled()) log.info(sm.getString("coyoteConnector.cannotRegisterProtocol")); } } try { protocolHandler.start(); } catch (Exception e) { String errPrefix = ""; if (this.service != null) { errPrefix += "service.getName(): \"" + this.service.getName() + "\"; "; } throw new LifecycleException(errPrefix + " " + sm.getString("coyoteConnector.protocolHandlerStartFailed", e)); } }
void function() throws LifecycleException { if (!initialized) init(); if (started) { if (log.isInfoEnabled()) log.info(sm.getString(STR)); return; } lifecycle.fireLifecycleEvent(START_EVENT, null); started = true; if (org.apache.tomcat.util.Constants.ENABLE_MODELER) { if (this.oname != null) { try { Registry.getRegistry(null, null).registerComponent(protocolHandler, createObjectName(this.domain, STR), null); } catch (Exception ex) { log.error(sm.getString(STR), ex); } } else { if (log.isInfoEnabled()) log.info(sm.getString(STR)); } } try { protocolHandler.start(); } catch (Exception e) { String errPrefix = STRservice.getName(): \STR\STR; } throw new LifecycleException(errPrefix + " " + sm.getString(STR, e)); } }
/** * Begin processing requests via this Connector. * * @exception LifecycleException * if a fatal startup error occurs */
Begin processing requests via this Connector
start
{ "repo_name": "benothman/jboss-web-nio2", "path": "java/org/apache/catalina/connector/Connector.java", "license": "lgpl-3.0", "size": 26733 }
[ "org.apache.catalina.LifecycleException", "org.apache.tomcat.util.modeler.Registry" ]
import org.apache.catalina.LifecycleException; import org.apache.tomcat.util.modeler.Registry;
import org.apache.catalina.*; import org.apache.tomcat.util.modeler.*;
[ "org.apache.catalina", "org.apache.tomcat" ]
org.apache.catalina; org.apache.tomcat;
2,270,883
ValidationResults validateExperiment(ExperimentModel experiment) throws OrchestratorException, LaunchValidationException;
ValidationResults validateExperiment(ExperimentModel experiment) throws OrchestratorException, LaunchValidationException;
/** * This method can be used to run all custom validators plugged in to the orchestrator and make * sure the experiment is ready to launch and if its not this will return false * * @param experiment * @return boolean if the experiment is valid after executing all the validators return true otherwise it will return false * @throws OrchestratorException */
This method can be used to run all custom validators plugged in to the orchestrator and make sure the experiment is ready to launch and if its not this will return false
validateExperiment
{ "repo_name": "gouravshenoy/airavata", "path": "modules/orchestrator/orchestrator-core/src/main/java/org/apache/airavata/orchestrator/cpi/Orchestrator.java", "license": "apache-2.0", "size": 3619 }
[ "org.apache.airavata.model.error.LaunchValidationException", "org.apache.airavata.model.error.ValidationResults", "org.apache.airavata.model.experiment.ExperimentModel", "org.apache.airavata.orchestrator.core.exception.OrchestratorException" ]
import org.apache.airavata.model.error.LaunchValidationException; import org.apache.airavata.model.error.ValidationResults; import org.apache.airavata.model.experiment.ExperimentModel; import org.apache.airavata.orchestrator.core.exception.OrchestratorException;
import org.apache.airavata.model.error.*; import org.apache.airavata.model.experiment.*; import org.apache.airavata.orchestrator.core.exception.*;
[ "org.apache.airavata" ]
org.apache.airavata;
1,835,278
AbstractSagaManager<S> manager();
AbstractSagaManager<S> manager();
/** * Retrieve the Saga Manager in this Configuration. * * @return the Manager for this Saga Configuration */
Retrieve the Saga Manager in this Configuration
manager
{ "repo_name": "krosenvold/AxonFramework", "path": "config/src/main/java/org/axonframework/config/SagaConfiguration.java", "license": "apache-2.0", "size": 2466 }
[ "org.axonframework.modelling.saga.AbstractSagaManager" ]
import org.axonframework.modelling.saga.AbstractSagaManager;
import org.axonframework.modelling.saga.*;
[ "org.axonframework.modelling" ]
org.axonframework.modelling;
984,894
public DataStatement createLiteralStatement(String token) throws ParseException { // Const if (dataMap.getVarPattern().isStringToken(token)) { return new StringStatement(stringMap.get(token)); } if (dataMap.getVarPattern().isIntegerToken(token)) { return new IntegerStatement(token); } if (dataMap.getVarPattern().isDecimalToken(token)) { return new DecimalStatement(token); } if (dataMap.getVarPattern().isDateToken(token)) { return new DateStatement(getDate(token)); } return null; }
DataStatement function(String token) throws ParseException { if (dataMap.getVarPattern().isStringToken(token)) { return new StringStatement(stringMap.get(token)); } if (dataMap.getVarPattern().isIntegerToken(token)) { return new IntegerStatement(token); } if (dataMap.getVarPattern().isDecimalToken(token)) { return new DecimalStatement(token); } if (dataMap.getVarPattern().isDateToken(token)) { return new DateStatement(getDate(token)); } return null; }
/** * Cria um Statement a partir de um valor literal String, Integer, Decimal, * Date Boolean ou List. * * @param token * O token que contém o literal que pode ser parseado para um * Statement que estende <code>LiteralStatement</code>. * @return A instância de <code>LiteralStatement</code> ou null se não foi * possível criar o Statement. * @throws ParseException */
Cria um Statement a partir de um valor literal String, Integer, Decimal, Date Boolean ou List
createLiteralStatement
{ "repo_name": "siviotti/ubre", "path": "ubre-core/src/main/java/br/net/ubre/lang/parse/ExpParser.java", "license": "gpl-3.0", "size": 5802 }
[ "br.net.ubre.lang.data.DataStatement", "br.net.ubre.lang.data.literal.DateStatement", "br.net.ubre.lang.data.literal.DecimalStatement", "br.net.ubre.lang.data.literal.IntegerStatement", "br.net.ubre.lang.data.literal.StringStatement", "java.text.ParseException" ]
import br.net.ubre.lang.data.DataStatement; import br.net.ubre.lang.data.literal.DateStatement; import br.net.ubre.lang.data.literal.DecimalStatement; import br.net.ubre.lang.data.literal.IntegerStatement; import br.net.ubre.lang.data.literal.StringStatement; import java.text.ParseException;
import br.net.ubre.lang.data.*; import br.net.ubre.lang.data.literal.*; import java.text.*;
[ "br.net.ubre", "java.text" ]
br.net.ubre; java.text;
706,238
public static NodeEvent fromOSCMessage( OSCMessage msg, Object source, long when, Node node ) { final int eventID = collValidCmds.indexOf( msg.getName() ); if( eventID == -1 ) throw new IllegalArgumentException( "Not a valid node notification message : " + msg.getName() ); final int nodeID = ((Number) msg.getArg( 0 )).intValue(); final int parentID = ((Number) msg.getArg( 1 )).intValue(); final int predID = ((Number) msg.getArg( 2 )).intValue(); final int succID = ((Number) msg.getArg( 3 )).intValue(); final int nodeType = ((Number) msg.getArg( 4 )).intValue(); final int headID = nodeType == GROUP ? ((Number) msg.getArg( 5 )).intValue() : -1; final int tailID = nodeType == GROUP ? ((Number) msg.getArg( 6 )).intValue() : -1; // let's trust the programmer for the sake of speed // if( node != null ) { // if( node.getNodeID() != nodeID ) throw new IllegalArgumentException( "Message and Node have different nodeIDs" ); // if( nodeType == SYNTH ) { // if( !(node instanceof Synth) ) throw new IllegalArgumentException( "Message and Node have different nodeTypes" ); // } else if( nodeType == GROUP ) { // if( !(node instanceof Group) ) throw new IllegalArgumentException( "Message and Node have different nodeTypes" ); // } // } return new NodeEvent( source, eventID, when, node, nodeID, parentID, predID, succID, nodeType, headID, tailID ); }
static NodeEvent function( OSCMessage msg, Object source, long when, Node node ) { final int eventID = collValidCmds.indexOf( msg.getName() ); if( eventID == -1 ) throw new IllegalArgumentException( STR + msg.getName() ); final int nodeID = ((Number) msg.getArg( 0 )).intValue(); final int parentID = ((Number) msg.getArg( 1 )).intValue(); final int predID = ((Number) msg.getArg( 2 )).intValue(); final int succID = ((Number) msg.getArg( 3 )).intValue(); final int nodeType = ((Number) msg.getArg( 4 )).intValue(); final int headID = nodeType == GROUP ? ((Number) msg.getArg( 5 )).intValue() : -1; final int tailID = nodeType == GROUP ? ((Number) msg.getArg( 6 )).intValue() : -1; return new NodeEvent( source, eventID, when, node, nodeID, parentID, predID, succID, nodeType, headID, tailID ); }
/** * Constructs a <code>NodeEvent</code> from a valid node * notification OSC message. The provided node object is simply * stored for future reference through <code>getNode</code> and must * be updated by the caller according to the returned event. * * @param msg OSC message such as <code>/n_go</code> * @param source who shall be known as the source of the generated event * @param when what is proposed time of the event generation * @param node a client side representation node to use for the event, * or <code>null</code> if no representation is known. The caller is * responsible for updating the node's status from the returned * event. * * @throws IllegalArgumentException if the message doesn't contain a valid node message; you * can use <code>getIDFromOSCMessage</code> to determine if the * message is valid. */
Constructs a <code>NodeEvent</code> from a valid node notification OSC message. The provided node object is simply stored for future reference through <code>getNode</code> and must be updated by the caller according to the returned event
fromOSCMessage
{ "repo_name": "acm-uiuc/Tacchi", "path": "libs/JCollider/src/de/sciss/jcollider/NodeEvent.java", "license": "gpl-2.0", "size": 13815 }
[ "de.sciss.net.OSCMessage" ]
import de.sciss.net.OSCMessage;
import de.sciss.net.*;
[ "de.sciss.net" ]
de.sciss.net;
605,407
@Nonnull public OutputStream getOutputStream() { return out; }
OutputStream function() { return out; }
/** * Returns the underlying OutputStream to which the records are written. * * @return The OutputStream */
Returns the underlying OutputStream to which the records are written
getOutputStream
{ "repo_name": "erikandre/hprof-tools", "path": "hprof-lib/src/main/java/com/badoo/hprof/library/heap/HeapDumpWriter.java", "license": "mit", "size": 5146 }
[ "java.io.OutputStream" ]
import java.io.OutputStream;
import java.io.*;
[ "java.io" ]
java.io;
789,001
private synchronized Set<Locale> getAvailableLocaleSet() { if (availableLocales == null) { availableLocales = new HashSet<>(); for (LocaleServiceProvider lsp : providers.values()) { Locale[] locales = lsp.getAvailableLocales(); for (Locale locale: locales) { availableLocales.add(getLookupLocale(locale)); } } } return availableLocales; }
synchronized Set<Locale> function() { if (availableLocales == null) { availableLocales = new HashSet<>(); for (LocaleServiceProvider lsp : providers.values()) { Locale[] locales = lsp.getAvailableLocales(); for (Locale locale: locales) { availableLocales.add(getLookupLocale(locale)); } } } return availableLocales; }
/** * Returns the union of locale sets that are available from * each service provider. This method does NOT return the * defensive copy. * * @return a set of available locales */
Returns the union of locale sets that are available from each service provider. This method does NOT return the defensive copy
getAvailableLocaleSet
{ "repo_name": "rokn/Count_Words_2015", "path": "testing/openjdk2/jdk/src/share/classes/sun/util/locale/provider/LocaleServiceProviderPool.java", "license": "mit", "size": 15978 }
[ "java.util.HashSet", "java.util.Locale", "java.util.Set", "java.util.spi.LocaleServiceProvider" ]
import java.util.HashSet; import java.util.Locale; import java.util.Set; import java.util.spi.LocaleServiceProvider;
import java.util.*; import java.util.spi.*;
[ "java.util" ]
java.util;
1,989,447
@JsonIgnore public boolean isSuccess() { return type != ResultType.FAILURE; }
boolean function() { return type != ResultType.FAILURE; }
/** * For now "success" means "not failure", which introduces the least surprise for tests that have * an assumption violation / failure. Tests that fall into this category are still considered * "successful" by buck, though other parts of the system (specifically, event listeners) can do * differently if they please. */
For now "success" means "not failure", which introduces the least surprise for tests that have an assumption violation / failure. Tests that fall into this category are still considered "successful" by buck, though other parts of the system (specifically, event listeners) can do differently if they please
isSuccess
{ "repo_name": "MarkRunWu/buck", "path": "src/com/facebook/buck/test/TestResultSummary.java", "license": "apache-2.0", "size": 4582 }
[ "com.facebook.buck.test.result.type.ResultType" ]
import com.facebook.buck.test.result.type.ResultType;
import com.facebook.buck.test.result.type.*;
[ "com.facebook.buck" ]
com.facebook.buck;
906,995
protected boolean supportsInternal(final WebApplicationService singleLogoutService, final RegisteredService registeredService, final SingleLogoutExecutionRequest context) { return true; }
boolean function(final WebApplicationService singleLogoutService, final RegisteredService registeredService, final SingleLogoutExecutionRequest context) { return true; }
/** * Supports internal. * * @param singleLogoutService the single logout service * @param registeredService the registered service * @param context the context * @return true /false */
Supports internal
supportsInternal
{ "repo_name": "apereo/cas", "path": "core/cas-server-core-logout-api/src/main/java/org/apereo/cas/logout/slo/BaseSingleLogoutServiceMessageHandler.java", "license": "apache-2.0", "size": 10195 }
[ "org.apereo.cas.authentication.principal.WebApplicationService", "org.apereo.cas.logout.SingleLogoutExecutionRequest", "org.apereo.cas.services.RegisteredService" ]
import org.apereo.cas.authentication.principal.WebApplicationService; import org.apereo.cas.logout.SingleLogoutExecutionRequest; import org.apereo.cas.services.RegisteredService;
import org.apereo.cas.authentication.principal.*; import org.apereo.cas.logout.*; import org.apereo.cas.services.*;
[ "org.apereo.cas" ]
org.apereo.cas;
2,757,991
//----------------------------------------------------------------------- public Builder fixingDate(LocalDate fixingDate) { JodaBeanUtils.notNull(fixingDate, "fixingDate"); this.fixingDate = fixingDate; return this; }
Builder function(LocalDate fixingDate) { JodaBeanUtils.notNull(fixingDate, STR); this.fixingDate = fixingDate; return this; }
/** * Sets the fixing date to use to determine a rate for the reset period. * <p> * This is an adjusted date with any business day rule applied. * Valid business days are defined by {@link IborIndex#getFixingCalendar()}. * @param fixingDate the new value, not null * @return this, for chaining, not null */
Sets the fixing date to use to determine a rate for the reset period. This is an adjusted date with any business day rule applied. Valid business days are defined by <code>IborIndex#getFixingCalendar()</code>
fixingDate
{ "repo_name": "nssales/Strata", "path": "modules/finance/src/main/java/com/opengamma/strata/finance/rate/IborAveragedFixing.java", "license": "apache-2.0", "size": 17747 }
[ "java.time.LocalDate", "org.joda.beans.JodaBeanUtils" ]
import java.time.LocalDate; import org.joda.beans.JodaBeanUtils;
import java.time.*; import org.joda.beans.*;
[ "java.time", "org.joda.beans" ]
java.time; org.joda.beans;
1,803,882
private List<String> getComponentModules(CmsSetupComponent component) { List<String> modules = new ArrayList<String>(); Iterator<String> itModules = m_availableModules.keySet().iterator(); while (itModules.hasNext()) { String moduleName = itModules.next(); if (component.match(moduleName)) { modules.add(moduleName); } } return modules; }
List<String> function(CmsSetupComponent component) { List<String> modules = new ArrayList<String>(); Iterator<String> itModules = m_availableModules.keySet().iterator(); while (itModules.hasNext()) { String moduleName = itModules.next(); if (component.match(moduleName)) { modules.add(moduleName); } } return modules; }
/** * Returns a list of matching modules for the given component.<p> * * @param component the component to get the modules for * * @return a list of matching module names */
Returns a list of matching modules for the given component
getComponentModules
{ "repo_name": "ggiudetti/opencms-core", "path": "src-setup/org/opencms/setup/CmsSetupBean.java", "license": "lgpl-2.1", "size": 116372 }
[ "java.util.ArrayList", "java.util.Iterator", "java.util.List" ]
import java.util.ArrayList; import java.util.Iterator; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,544,479
@Nullable public static FolderConfiguration getConfig(@NonNull String[] folderSegments) { Iterator<String> iterator = Iterators.forArray(folderSegments); if (iterator.hasNext()) { // Skip the first segment: it should be just the base folder, such as "values" or // "layout" iterator.next(); } return getConfigFromQualifiers(iterator); }
static FolderConfiguration function(@NonNull String[] folderSegments) { Iterator<String> iterator = Iterators.forArray(folderSegments); if (iterator.hasNext()) { iterator.next(); } return getConfigFromQualifiers(iterator); }
/** * Creates a {@link FolderConfiguration} matching the folder segments. * @param folderSegments The segments of the folder name. The first segments should contain * the name of the folder * @return a FolderConfiguration object, or null if the folder name isn't valid.. */
Creates a <code>FolderConfiguration</code> matching the folder segments
getConfig
{ "repo_name": "consulo/consulo-android", "path": "tools-base/sdk-common/src/main/java/com/android/ide/common/resources/configuration/FolderConfiguration.java", "license": "apache-2.0", "size": 44491 }
[ "com.android.annotations.NonNull", "com.google.common.collect.Iterators", "java.util.Iterator" ]
import com.android.annotations.NonNull; import com.google.common.collect.Iterators; import java.util.Iterator;
import com.android.annotations.*; import com.google.common.collect.*; import java.util.*;
[ "com.android.annotations", "com.google.common", "java.util" ]
com.android.annotations; com.google.common; java.util;
1,795,503
String[] order = new String[statusOrder.length]; for (int i = 0; i < statusOrder.length; i++) { order[i] = statusOrder[i].getCode(); } setStatusOrder(Arrays.asList(order)); }
String[] order = new String[statusOrder.length]; for (int i = 0; i < statusOrder.length; i++) { order[i] = statusOrder[i].getCode(); } setStatusOrder(Arrays.asList(order)); }
/** * Set the ordering of the status. * @param statusOrder an ordered list of the status */
Set the ordering of the status
setStatusOrder
{ "repo_name": "bclozel/spring-boot", "path": "spring-boot-project/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/health/OrderedHealthAggregator.java", "license": "apache-2.0", "size": 3203 }
[ "java.util.Arrays" ]
import java.util.Arrays;
import java.util.*;
[ "java.util" ]
java.util;
356,528
@Nullable private static NettyConfig createNettyConfig( Configuration configuration, boolean localTaskManagerCommunication, InetAddress taskManagerAddress, int dataport) { final NettyConfig nettyConfig; if (!localTaskManagerCommunication) { final InetSocketAddress taskManagerInetSocketAddress = new InetSocketAddress(taskManagerAddress, dataport); nettyConfig = new NettyConfig( taskManagerInetSocketAddress.getAddress(), taskManagerInetSocketAddress.getPort(), ConfigurationParserUtils.getPageSize(configuration), ConfigurationParserUtils.getSlot(configuration), configuration); } else { nettyConfig = null; } return nettyConfig; }
static NettyConfig function( Configuration configuration, boolean localTaskManagerCommunication, InetAddress taskManagerAddress, int dataport) { final NettyConfig nettyConfig; if (!localTaskManagerCommunication) { final InetSocketAddress taskManagerInetSocketAddress = new InetSocketAddress(taskManagerAddress, dataport); nettyConfig = new NettyConfig( taskManagerInetSocketAddress.getAddress(), taskManagerInetSocketAddress.getPort(), ConfigurationParserUtils.getPageSize(configuration), ConfigurationParserUtils.getSlot(configuration), configuration); } else { nettyConfig = null; } return nettyConfig; }
/** * Generates {@link NettyConfig} from Flink {@link Configuration}. * * @param configuration configuration object * @param localTaskManagerCommunication true, to skip initializing the network stack * @param taskManagerAddress identifying the IP address under which the TaskManager will be accessible * @param dataport data port for communication and data exchange * @return the netty configuration or {@code null} if communication is in the same task manager */
Generates <code>NettyConfig</code> from Flink <code>Configuration</code>
createNettyConfig
{ "repo_name": "greghogan/flink", "path": "flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/NettyShuffleEnvironmentConfiguration.java", "license": "apache-2.0", "size": 16123 }
[ "java.net.InetAddress", "java.net.InetSocketAddress", "org.apache.flink.configuration.Configuration", "org.apache.flink.runtime.io.network.netty.NettyConfig", "org.apache.flink.runtime.util.ConfigurationParserUtils" ]
import java.net.InetAddress; import java.net.InetSocketAddress; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.io.network.netty.NettyConfig; import org.apache.flink.runtime.util.ConfigurationParserUtils;
import java.net.*; import org.apache.flink.configuration.*; import org.apache.flink.runtime.io.network.netty.*; import org.apache.flink.runtime.util.*;
[ "java.net", "org.apache.flink" ]
java.net; org.apache.flink;
184,171
@Test public void testRecordWithinOptionWithinRecordDDL() throws IOException { String schemaName = "testRecordWithinOptionWithinRecordDDL"; Schema schema = ConversionHiveTestUtils.readSchemaFromJsonFile(resourceDir, "recordWithinOptionWithinRecord_nested.json"); String q = HiveAvroORCQueryGenerator .generateCreateTableDDL(schema, schemaName, "file:/user/hive/warehouse/" + schemaName, Optional.<String>absent(), Optional.<Map<String, String>>absent(), Optional.<List<String>>absent(), Optional.<Map<String, HiveAvroORCQueryGenerator.COLUMN_SORT_ORDER>>absent(), Optional.<Integer>absent(), Optional.<String>absent(), Optional.<String>absent(), Optional.<String>absent(), null, isEvolutionEnabled, destinationTableMeta, new HashMap<String, String>()); Assert.assertEquals(q.trim(), ConversionHiveTestUtils.readQueryFromFile(resourceDir, "recordWithinOptionWithinRecord_nested.ddl")); }
void function() throws IOException { String schemaName = STR; Schema schema = ConversionHiveTestUtils.readSchemaFromJsonFile(resourceDir, STR); String q = HiveAvroORCQueryGenerator .generateCreateTableDDL(schema, schemaName, STR + schemaName, Optional.<String>absent(), Optional.<Map<String, String>>absent(), Optional.<List<String>>absent(), Optional.<Map<String, HiveAvroORCQueryGenerator.COLUMN_SORT_ORDER>>absent(), Optional.<Integer>absent(), Optional.<String>absent(), Optional.<String>absent(), Optional.<String>absent(), null, isEvolutionEnabled, destinationTableMeta, new HashMap<String, String>()); Assert.assertEquals(q.trim(), ConversionHiveTestUtils.readQueryFromFile(resourceDir, STR)); }
/*** * Test DDL generation for schema structured as: record within option within record * @throws IOException */
Test DDL generation for schema structured as: record within option within record
testRecordWithinOptionWithinRecordDDL
{ "repo_name": "jinhyukchang/gobblin", "path": "gobblin-data-management/src/test/java/org/apache/gobblin/data/management/conversion/hive/util/HiveAvroORCQueryGeneratorTest.java", "license": "apache-2.0", "size": 19371 }
[ "com.google.common.base.Optional", "java.io.IOException", "java.util.HashMap", "java.util.List", "java.util.Map", "org.apache.avro.Schema", "org.apache.gobblin.data.management.ConversionHiveTestUtils", "org.apache.gobblin.data.management.conversion.hive.query.HiveAvroORCQueryGenerator", "org.testng.Assert" ]
import com.google.common.base.Optional; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.avro.Schema; import org.apache.gobblin.data.management.ConversionHiveTestUtils; import org.apache.gobblin.data.management.conversion.hive.query.HiveAvroORCQueryGenerator; import org.testng.Assert;
import com.google.common.base.*; import java.io.*; import java.util.*; import org.apache.avro.*; import org.apache.gobblin.data.management.*; import org.apache.gobblin.data.management.conversion.hive.query.*; import org.testng.*;
[ "com.google.common", "java.io", "java.util", "org.apache.avro", "org.apache.gobblin", "org.testng" ]
com.google.common; java.io; java.util; org.apache.avro; org.apache.gobblin; org.testng;
2,505,444
public CountDownLatch getVisitAsync(String visitId, String responseFields, AsyncCallback<com.mozu.api.contracts.customer.Visit> callback) throws Exception { MozuClient<com.mozu.api.contracts.customer.Visit> client = com.mozu.api.clients.commerce.customer.VisitClient.getVisitClient( visitId, responseFields); client.setContext(_apiContext); return client.executeRequest(callback); }
CountDownLatch function(String visitId, String responseFields, AsyncCallback<com.mozu.api.contracts.customer.Visit> callback) throws Exception { MozuClient<com.mozu.api.contracts.customer.Visit> client = com.mozu.api.clients.commerce.customer.VisitClient.getVisitClient( visitId, responseFields); client.setContext(_apiContext); return client.executeRequest(callback); }
/** * Retrieves the details of the customer visit specified in the request. * <p><pre><code> * Visit visit = new Visit(); * CountDownLatch latch = visit.getVisit( visitId, responseFields, callback ); * latch.await() * </code></pre></p> * @param responseFields Use this field to include those fields which are not included by default. * @param visitId Unique identifier of the customer visit to update. * @param callback callback handler for asynchronous operations * @return com.mozu.api.contracts.customer.Visit * @see com.mozu.api.contracts.customer.Visit */
Retrieves the details of the customer visit specified in the request. <code><code> Visit visit = new Visit(); CountDownLatch latch = visit.getVisit( visitId, responseFields, callback ); latch.await() * </code></code>
getVisitAsync
{ "repo_name": "lakshmi-nair/mozu-java", "path": "mozu-javaasync-core/src/main/java/com/mozu/api/resources/commerce/customer/VisitResource.java", "license": "mit", "size": 16660 }
[ "com.mozu.api.AsyncCallback", "com.mozu.api.MozuClient", "java.util.concurrent.CountDownLatch" ]
import com.mozu.api.AsyncCallback; import com.mozu.api.MozuClient; import java.util.concurrent.CountDownLatch;
import com.mozu.api.*; import java.util.concurrent.*;
[ "com.mozu.api", "java.util" ]
com.mozu.api; java.util;
2,397,311
public static List<CompilerDiagnostic> collectDynamicCompilerDiagnostics( List<? extends IsolatedTaskWithCompilerDiagnostic<TestResult>> tasks) { List<CompilerDiagnostic> dynamicCompilerDiagnosticList = new ArrayList<CompilerDiagnostic>(); HashSet<CompilerDiagnostic> seen = new HashSet<CompilerDiagnostic>(); for (IsolatedTaskWithCompilerDiagnostic<TestResult> task : tasks) { CompilerDiagnostic diag = task.getCompilerDiagnostic(); if (diag != null && !seen.contains(diag)) { seen.add(diag); dynamicCompilerDiagnosticList.add(diag); } } return dynamicCompilerDiagnosticList; } /** * Create a "dynamic" {@link CompilationResult} where some test cases may have * produced {@link CompilerDiagnostic}s. * * @param programSource the scaffolded {@link ProgramSource} * @param dynamicCompilerDiagnosticList the list of dynamic {@link CompilerDiagnostic}s * @return the {@link CompilationResult}
static List<CompilerDiagnostic> function( List<? extends IsolatedTaskWithCompilerDiagnostic<TestResult>> tasks) { List<CompilerDiagnostic> dynamicCompilerDiagnosticList = new ArrayList<CompilerDiagnostic>(); HashSet<CompilerDiagnostic> seen = new HashSet<CompilerDiagnostic>(); for (IsolatedTaskWithCompilerDiagnostic<TestResult> task : tasks) { CompilerDiagnostic diag = task.getCompilerDiagnostic(); if (diag != null && !seen.contains(diag)) { seen.add(diag); dynamicCompilerDiagnosticList.add(diag); } } return dynamicCompilerDiagnosticList; } /** * Create a STR {@link CompilationResult} where some test cases may have * produced {@link CompilerDiagnostic}s. * * @param programSource the scaffolded {@link ProgramSource} * @param dynamicCompilerDiagnosticList the list of dynamic {@link CompilerDiagnostic}s * @return the {@link CompilationResult}
/** * College "dynamic" compiler diagnostics from completed {@link IsolatedTaskWithCompilerDiagnostic}s. * * @param tasks the list of {@link IsolatedTaskWithCompilerDiagnostic}s * @return list of {@link CompilerDiagnostic}s */
College "dynamic" compiler diagnostics from completed <code>IsolatedTaskWithCompilerDiagnostic</code>s
collectDynamicCompilerDiagnostics
{ "repo_name": "cloudcoderdotorg/CloudCoder", "path": "CloudCoderBuilder2/src/org/cloudcoder/builder2/javasandbox/SandboxUtil.java", "license": "agpl-3.0", "size": 4873 }
[ "java.util.ArrayList", "java.util.HashSet", "java.util.List", "org.cloudcoder.app.shared.model.CompilationResult", "org.cloudcoder.app.shared.model.CompilerDiagnostic", "org.cloudcoder.app.shared.model.TestResult", "org.cloudcoder.builder2.model.ProgramSource" ]
import java.util.ArrayList; import java.util.HashSet; import java.util.List; import org.cloudcoder.app.shared.model.CompilationResult; import org.cloudcoder.app.shared.model.CompilerDiagnostic; import org.cloudcoder.app.shared.model.TestResult; import org.cloudcoder.builder2.model.ProgramSource;
import java.util.*; import org.cloudcoder.app.shared.model.*; import org.cloudcoder.builder2.model.*;
[ "java.util", "org.cloudcoder.app", "org.cloudcoder.builder2" ]
java.util; org.cloudcoder.app; org.cloudcoder.builder2;
2,488,851
@WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v202111") @RequestWrapper(localName = "getContactsByStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v202111", className = "com.google.api.ads.admanager.jaxws.v202111.ContactServiceInterfacegetContactsByStatement") @ResponseWrapper(localName = "getContactsByStatementResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v202111", className = "com.google.api.ads.admanager.jaxws.v202111.ContactServiceInterfacegetContactsByStatementResponse") public ContactPage getContactsByStatement( @WebParam(name = "statement", targetNamespace = "https://www.google.com/apis/ads/publisher/v202111") Statement statement) throws ApiException_Exception ;
@WebResult(name = "rval", targetNamespace = STRgetContactsByStatementSTRhttps: @ResponseWrapper(localName = "getContactsByStatementResponseSTRhttps: ContactPage function( @WebParam(name = "statementSTRhttps: Statement statement) throws ApiException_Exception ;
/** * * Gets a {@link ContactPage} of {@link Contact} objects that satisfy the given * {@link Statement#query}. The following fields are supported for filtering: * * <table> * <tr> * <th scope="col">PQL Property</th> <th scope="col">Object Property</th> * </tr> * <tr> * <td>{@code name}</td> * <td>{@link Contact#name}</td> * </tr> * <tr> * <td>{@code email}</td> * <td>{@link Contact#email}</td> * </tr> * <tr> * <td>{@code id}</td> * <td>{@link Contact#id}</td> * </tr> * <tr> * <td>{@code comment}</td> * <td>{@link Contact#comment}</td> * </tr> * <tr> * <td>{@code companyId}</td> * <td>{@link Contact#companyId}</td> * </tr> * <tr> * <td>{@code title}</td> * <td>{@link Contact#title}</td> * </tr> * <tr> * <td>{@code cellPhone}</td> * <td>{@link Contact#cellPhone}</td> * </tr> * <tr> * <td>{@code workPhone}</td> * <td>{@link Contact#workPhone}</td> * </tr> * <tr> * <td>{@code faxPhone}</td> * <td>{@link Contact#faxPhone}</td> * </tr> * <tr> * <td>{@code status}</td> * <td>{@link Contact#status}</td> * </tr> * </table> * * @param filterStatement a Publisher Query Language statement used to filter * a set of contacts * @return the contacts that match the given filter * * * @param statement * @return * returns com.google.api.ads.admanager.jaxws.v202111.ContactPage * @throws ApiException_Exception */
Gets a <code>ContactPage</code> of <code>Contact</code> objects that satisfy the given <code>Statement#query</code>. The following fields are supported for filtering: PQL Property Object Property name <code>Contact#name</code> email <code>Contact#email</code> id <code>Contact#id</code> comment <code>Contact#comment</code> companyId <code>Contact#companyId</code> title <code>Contact#title</code> cellPhone <code>Contact#cellPhone</code> workPhone <code>Contact#workPhone</code> faxPhone <code>Contact#faxPhone</code> status <code>Contact#status</code>
getContactsByStatement
{ "repo_name": "googleads/googleads-java-lib", "path": "modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202111/ContactServiceInterface.java", "license": "apache-2.0", "size": 6574 }
[ "javax.jws.WebParam", "javax.jws.WebResult", "javax.xml.ws.ResponseWrapper" ]
import javax.jws.WebParam; import javax.jws.WebResult; import javax.xml.ws.ResponseWrapper;
import javax.jws.*; import javax.xml.ws.*;
[ "javax.jws", "javax.xml" ]
javax.jws; javax.xml;
466,133
public void removePropertyChangeListener(String propertyName, PropertyChangeListener listener) { if (listener == null) { return; } this.changeSupport.removePropertyChangeListener(propertyName, listener); }
void function(String propertyName, PropertyChangeListener listener) { if (listener == null) { return; } this.changeSupport.removePropertyChangeListener(propertyName, listener); }
/** * Removes a PropertyChangeListener for a specific property on a session. * @param propertyName * @param listener */
Removes a PropertyChangeListener for a specific property on a session
removePropertyChangeListener
{ "repo_name": "MSOpenTech/orc-for-java-shared", "path": "live-auth/src/main/java/com/microsoft/live/LiveConnectSession.java", "license": "mit", "size": 10813 }
[ "java.beans.PropertyChangeListener" ]
import java.beans.PropertyChangeListener;
import java.beans.*;
[ "java.beans" ]
java.beans;
2,023,293
void setReturn(TypeReference value);
void setReturn(TypeReference value);
/** * Sets the value of the '{@link net.mlanoe.language.vhdl.Signature#getReturn <em>Return</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Return</em>' containment reference. * @see #getReturn() * @generated */
Sets the value of the '<code>net.mlanoe.language.vhdl.Signature#getReturn Return</code>' containment reference.
setReturn
{ "repo_name": "mlanoe/x-vhdl", "path": "plugins/net.mlanoe.language.vhdl/src-gen/net/mlanoe/language/vhdl/Signature.java", "license": "gpl-3.0", "size": 2322 }
[ "net.mlanoe.language.vhdl.type.TypeReference" ]
import net.mlanoe.language.vhdl.type.TypeReference;
import net.mlanoe.language.vhdl.type.*;
[ "net.mlanoe.language" ]
net.mlanoe.language;
1,079,576
public Iterator iterator() { return m_linkTable.values().iterator(); }
Iterator function() { return m_linkTable.values().iterator(); }
/** * Returns an iterator over the links in the table.<p> * * The objects iterated are of type <code>{@link CmsLink}</code>. * * @return a string iterator for internal link names */
Returns an iterator over the links in the table. The objects iterated are of type <code><code>CmsLink</code></code>
iterator
{ "repo_name": "comundus/opencms-comundus", "path": "src/main/java/org/opencms/staticexport/CmsLinkTable.java", "license": "lgpl-2.1", "size": 3905 }
[ "java.util.Iterator" ]
import java.util.Iterator;
import java.util.*;
[ "java.util" ]
java.util;
1,058,725
@Nonnull public static <DATATYPE> ChangeWithValue <DATATYPE> createUnchanged (@Nullable final DATATYPE aValue) { return new ChangeWithValue <> (EChange.UNCHANGED, aValue); }
static <DATATYPE> ChangeWithValue <DATATYPE> function (@Nullable final DATATYPE aValue) { return new ChangeWithValue <> (EChange.UNCHANGED, aValue); }
/** * Create a new unchanged object with the given value. * * @param <DATATYPE> * The data type that is wrapped together with the change indicator * @param aValue * The value to be used. May be <code>null</code>. * @return Never <code>null</code>. */
Create a new unchanged object with the given value
createUnchanged
{ "repo_name": "phax/ph-commons", "path": "ph-commons/src/main/java/com/helger/commons/state/ChangeWithValue.java", "license": "apache-2.0", "size": 5085 }
[ "javax.annotation.Nullable" ]
import javax.annotation.Nullable;
import javax.annotation.*;
[ "javax.annotation" ]
javax.annotation;
1,687,118
private void writeFlowMod(IOFSwitch sw, short command, int bufferId, OFMatch match, short outPort) { // from openflow 1.0 spec - need to set these on a struct ofp_flow_mod: // struct ofp_flow_mod { // struct ofp_header header; // struct ofp_match match; // uint64_t cookie; // // // uint16_t command; // uint16_t idle_timeout; // uint16_t hard_timeout; // uint16_t priority; // uint32_t buffer_id; // uint16_t out_port; // uint16_t flags; // struct ofp_action_header actions[0]; // }; OFFlowMod flowMod = (OFFlowMod) floodlightProvider.getOFMessageFactory().getMessage(OFType.FLOW_MOD); flowMod.setMatch(match); flowMod.setCookie(LearningSwitch.LEARNING_SWITCH_COOKIE); flowMod.setCommand(command); flowMod.setIdleTimeout(LearningSwitch.IDLE_TIMEOUT_DEFAULT); flowMod.setHardTimeout(LearningSwitch.HARD_TIMEOUT_DEFAULT); flowMod.setPriority(LearningSwitch.PRIORITY_DEFAULT); flowMod.setBufferId(bufferId); flowMod.setOutPort((command == OFFlowMod.OFPFC_DELETE) ? outPort : OFPort.OFPP_NONE.getValue()); flowMod.setFlags((command == OFFlowMod.OFPFC_DELETE) ? 0 : (short) (1 << 0)); // OFPFF_SEND_FLOW_REM // set the ofp_action_header/out actions: // from the openflow 1.0 spec: need to set these on a struct ofp_action_output: // uint16_t type; // uint16_t len; // uint16_t port; // uint16_t max_len; // type/len are set because it is OFActionOutput, // and port, max_len are arguments to this constructor flowMod.setActions(Arrays.asList((OFAction) new OFActionOutput(outPort, (short) 0xffff))); flowMod.setLength((short) (OFFlowMod.MINIMUM_LENGTH + OFActionOutput.MINIMUM_LENGTH)); if (log.isTraceEnabled()) { log.trace("{} {} flow mod {}", new Object[]{ sw, (command == OFFlowMod.OFPFC_DELETE) ? "deleting" : "adding", flowMod }); } counterStore.updatePktOutFMCounterStore(sw, flowMod); // and write it out try { sw.write(flowMod, null); } catch (IOException e) { log.error("Failed to write {} to switch {}", new Object[]{ flowMod, sw }, e); } }
void function(IOFSwitch sw, short command, int bufferId, OFMatch match, short outPort) { OFFlowMod flowMod = (OFFlowMod) floodlightProvider.getOFMessageFactory().getMessage(OFType.FLOW_MOD); flowMod.setMatch(match); flowMod.setCookie(LearningSwitch.LEARNING_SWITCH_COOKIE); flowMod.setCommand(command); flowMod.setIdleTimeout(LearningSwitch.IDLE_TIMEOUT_DEFAULT); flowMod.setHardTimeout(LearningSwitch.HARD_TIMEOUT_DEFAULT); flowMod.setPriority(LearningSwitch.PRIORITY_DEFAULT); flowMod.setBufferId(bufferId); flowMod.setOutPort((command == OFFlowMod.OFPFC_DELETE) ? outPort : OFPort.OFPP_NONE.getValue()); flowMod.setFlags((command == OFFlowMod.OFPFC_DELETE) ? 0 : (short) (1 << 0)); flowMod.setActions(Arrays.asList((OFAction) new OFActionOutput(outPort, (short) 0xffff))); flowMod.setLength((short) (OFFlowMod.MINIMUM_LENGTH + OFActionOutput.MINIMUM_LENGTH)); if (log.isTraceEnabled()) { log.trace(STR, new Object[]{ sw, (command == OFFlowMod.OFPFC_DELETE) ? STR : STR, flowMod }); } counterStore.updatePktOutFMCounterStore(sw, flowMod); try { sw.write(flowMod, null); } catch (IOException e) { log.error(STR, new Object[]{ flowMod, sw }, e); } }
/** * Writes a OFFlowMod to a switch. * @param sw The switch tow rite the flowmod to. * @param command The FlowMod actions (add, delete, etc). * @param bufferId The buffer ID if the switch has buffered the packet. * @param match The OFMatch structure to write. * @param outPort The switch port to output it to. */
Writes a OFFlowMod to a switch
writeFlowMod
{ "repo_name": "vishalshubham/Multipath-Hedera-system-in-Floodlight-controller", "path": "src/main/java/net/floodlightcontroller/learningswitch/LearningSwitch.java", "license": "apache-2.0", "size": 22894 }
[ "java.io.IOException", "java.util.Arrays", "net.floodlightcontroller.core.IOFSwitch", "org.openflow.protocol.OFFlowMod", "org.openflow.protocol.OFMatch", "org.openflow.protocol.OFPort", "org.openflow.protocol.OFType", "org.openflow.protocol.action.OFAction", "org.openflow.protocol.action.OFActionOutput" ]
import java.io.IOException; import java.util.Arrays; import net.floodlightcontroller.core.IOFSwitch; import org.openflow.protocol.OFFlowMod; import org.openflow.protocol.OFMatch; import org.openflow.protocol.OFPort; import org.openflow.protocol.OFType; import org.openflow.protocol.action.OFAction; import org.openflow.protocol.action.OFActionOutput;
import java.io.*; import java.util.*; import net.floodlightcontroller.core.*; import org.openflow.protocol.*; import org.openflow.protocol.action.*;
[ "java.io", "java.util", "net.floodlightcontroller.core", "org.openflow.protocol" ]
java.io; java.util; net.floodlightcontroller.core; org.openflow.protocol;
737,725
protected void writeRtfContent() throws IOException { writeGroupMark(true); writeAttributes(getRtfAttributes(), null); } } private class RtfCloseGroupMark extends RtfElement { private int breakType = BREAK_NONE; RtfCloseGroupMark(RtfContainer parent, Writer w, int breakType) throws IOException { super(parent, w); this.breakType = breakType; }
void function() throws IOException { writeGroupMark(true); writeAttributes(getRtfAttributes(), null); } } private class RtfCloseGroupMark extends RtfElement { private int breakType = BREAK_NONE; RtfCloseGroupMark(RtfContainer parent, Writer w, int breakType) throws IOException { super(parent, w); this.breakType = breakType; }
/** * write RTF code of all our children * @throws IOException for I/O problems */
write RTF code of all our children
writeRtfContent
{ "repo_name": "pellcorp/fop", "path": "src/java/org/apache/fop/render/rtf/rtflib/rtfdoc/RtfTextrun.java", "license": "apache-2.0", "size": 17760 }
[ "java.io.IOException", "java.io.Writer" ]
import java.io.IOException; import java.io.Writer;
import java.io.*;
[ "java.io" ]
java.io;
1,468,615
public byte[] read(int length) throws IOException { byte[] array = new byte[length]; if (position < limit) { int remainder = limit - position; if (length >= remainder) { System.arraycopy(buffer, position, array, 0, remainder); length -= remainder; position = limit; readIteratively(array, remainder, length); } else { System.arraycopy(buffer, position, array, 0, length); position += length; } } else readIteratively(array, 0, length); return array; }
byte[] function(int length) throws IOException { byte[] array = new byte[length]; if (position < limit) { int remainder = limit - position; if (length >= remainder) { System.arraycopy(buffer, position, array, 0, remainder); length -= remainder; position = limit; readIteratively(array, remainder, length); } else { System.arraycopy(buffer, position, array, 0, length); position += length; } } else readIteratively(array, 0, length); return array; }
/** * Reads length bytes from the stream * * @param length * @return data read */
Reads length bytes from the stream
read
{ "repo_name": "aj-jaswanth/phoenix-proxy-server", "path": "Phoenix Proxy Server/src/in/rgukt/phoenix/core/protocols/BufferedStreamReader.java", "license": "gpl-2.0", "size": 1781 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,573,763
public static final SourceModel.Expr hasLeftEndpoint(SourceModel.Expr r) { return SourceModel.Expr.Application.make( new SourceModel.Expr[] {SourceModel.Expr.Var.make(Functions.hasLeftEndpoint), r}); } public static final QualifiedName hasLeftEndpoint = QualifiedName.make(CAL_Range.MODULE_NAME, "hasLeftEndpoint");
static final SourceModel.Expr function(SourceModel.Expr r) { return SourceModel.Expr.Application.make( new SourceModel.Expr[] {SourceModel.Expr.Var.make(Functions.hasLeftEndpoint), r}); } static final QualifiedName function = QualifiedName.make(CAL_Range.MODULE_NAME, STR);
/** * Returns whether the range has a left endpoint. * @param r (CAL type: <code>Cal.Utilities.Range.Range a</code>) * @return (CAL type: <code>Cal.Core.Prelude.Boolean</code>) */
Returns whether the range has a left endpoint
hasLeftEndpoint
{ "repo_name": "levans/Open-Quark", "path": "src/CAL_Libraries/src/org/openquark/cal/module/Cal/Utilities/CAL_Range.java", "license": "bsd-3-clause", "size": 34986 }
[ "org.openquark.cal.compiler.QualifiedName", "org.openquark.cal.compiler.SourceModel" ]
import org.openquark.cal.compiler.QualifiedName; import org.openquark.cal.compiler.SourceModel;
import org.openquark.cal.compiler.*;
[ "org.openquark.cal" ]
org.openquark.cal;
377,932
public void stopMonitoring() throws IOException { MyHomeSocketFactory.disconnect(monitorSk); }
void function() throws IOException { MyHomeSocketFactory.disconnect(monitorSk); }
/** * Close the monitor session * * @throws IOException * in case of communication error */
Close the monitor session
stopMonitoring
{ "repo_name": "rahulopengts/myhome", "path": "bundles/binding/org.openhab.binding.bticino/src/main/java/com/myhome/fcrisciani/connector/MyHomeJavaConnector.java", "license": "epl-1.0", "size": 10691 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,696,404
@Test public void testAtomicExtractUpdate() throws InterruptedException, ExecutionException { long numUpdates = 1_000_000; StreamingModeExecutionState state = new StreamingModeExecutionState( NameContextsForTests.nameContextForTest(), "testState", null, NoopProfileScope.NOOP); ExecutorService executor = Executors.newFixedThreadPool(2); AtomicBoolean doneWriting = new AtomicBoolean(false); Callable<Long> reader = () -> { long count = 0; boolean isLastRead; do { isLastRead = doneWriting.get(); CounterUpdate update = state.extractUpdate(false); if (update != null) { count += splitIntToLong(update.getInteger()); } } while (!isLastRead); return count; }; Runnable writer = () -> { for (int i = 0; i < numUpdates; i++) { state.takeSample(1L); } doneWriting.set(true); }; // NB: Reader is invoked before writer to ensure they execute concurrently. List<Future<Long>> results = executor.invokeAll( Lists.newArrayList(reader, Executors.callable(writer, 0L)), 2, TimeUnit.SECONDS); long count = results.get(0).get(); assertThat(count, equalTo(numUpdates)); }
void function() throws InterruptedException, ExecutionException { long numUpdates = 1_000_000; StreamingModeExecutionState state = new StreamingModeExecutionState( NameContextsForTests.nameContextForTest(), STR, null, NoopProfileScope.NOOP); ExecutorService executor = Executors.newFixedThreadPool(2); AtomicBoolean doneWriting = new AtomicBoolean(false); Callable<Long> reader = () -> { long count = 0; boolean isLastRead; do { isLastRead = doneWriting.get(); CounterUpdate update = state.extractUpdate(false); if (update != null) { count += splitIntToLong(update.getInteger()); } } while (!isLastRead); return count; }; Runnable writer = () -> { for (int i = 0; i < numUpdates; i++) { state.takeSample(1L); } doneWriting.set(true); }; List<Future<Long>> results = executor.invokeAll( Lists.newArrayList(reader, Executors.callable(writer, 0L)), 2, TimeUnit.SECONDS); long count = results.get(0).get(); assertThat(count, equalTo(numUpdates)); }
/** * Ensure that incrementing and extracting counter updates are correct under concurrent reader and * writer threads. */
Ensure that incrementing and extracting counter updates are correct under concurrent reader and writer threads
testAtomicExtractUpdate
{ "repo_name": "rangadi/incubator-beam", "path": "runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContextTest.java", "license": "apache-2.0", "size": 15689 }
[ "com.google.api.services.dataflow.model.CounterUpdate", "com.google.common.collect.Lists", "java.util.List", "java.util.concurrent.Callable", "java.util.concurrent.ExecutionException", "java.util.concurrent.ExecutorService", "java.util.concurrent.Executors", "java.util.concurrent.Future", "java.util.concurrent.TimeUnit", "java.util.concurrent.atomic.AtomicBoolean", "org.apache.beam.runners.dataflow.worker.StreamingModeExecutionContext", "org.apache.beam.runners.dataflow.worker.counters.DataflowCounterUpdateExtractor", "org.apache.beam.runners.dataflow.worker.profiler.ScopedProfiler", "org.hamcrest.Matchers", "org.junit.Assert" ]
import com.google.api.services.dataflow.model.CounterUpdate; import com.google.common.collect.Lists; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.beam.runners.dataflow.worker.StreamingModeExecutionContext; import org.apache.beam.runners.dataflow.worker.counters.DataflowCounterUpdateExtractor; import org.apache.beam.runners.dataflow.worker.profiler.ScopedProfiler; import org.hamcrest.Matchers; import org.junit.Assert;
import com.google.api.services.dataflow.model.*; import com.google.common.collect.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import org.apache.beam.runners.dataflow.worker.*; import org.apache.beam.runners.dataflow.worker.counters.*; import org.apache.beam.runners.dataflow.worker.profiler.*; import org.hamcrest.*; import org.junit.*;
[ "com.google.api", "com.google.common", "java.util", "org.apache.beam", "org.hamcrest", "org.junit" ]
com.google.api; com.google.common; java.util; org.apache.beam; org.hamcrest; org.junit;
2,381,960
private JPanel getDatabaseSubPanel () { // Create panel double [][] sizes = {{PAD, PREF, PAD, FILL, PAD}, {PAD, PREF, PAD}}; JPanel panel = new JPanel (new TableLayout (sizes)); // Create detail panel sizes = new double [][]{{PAD, 150, PAD, FILL, PAD}, {PAD, PREF, PAD, PREF, PAD, PREF, PAD, PREF, PAD, PREF, PAD, PREF, PAD}}; JPanel detailPanel = new JPanel (new TableLayout (sizes)); detailPanel.setBorder (BorderFactory.createTitledBorder("Details")); // Create text fields JLabel idLabel = new JLabel ("ID"); JLabel driverLabel = new JLabel ("Driver"); JLabel urlLabel = new JLabel ("URL"); JLabel usernameLabel = new JLabel ("Username"); JLabel passwordLabel = new JLabel ("Password"); // Text field driverIDLabelValue = new JLabel (); driverTextField = new JTextField (10); urlTextField = new JTextField (10); usernameTextField = new JTextField (10); passwordField = new JPasswordField (10); detailPanel.add (idLabel, "1,1"); detailPanel.add (driverIDLabelValue, "3,1"); detailPanel.add (driverLabel, "1,3"); detailPanel.add (driverTextField, "3,3"); detailPanel.add (urlLabel, "1,5"); detailPanel.add (urlTextField, "3,5"); detailPanel.add (usernameLabel, "1,7"); detailPanel.add (usernameTextField, "3,7"); detailPanel.add (passwordLabel, "1,9"); detailPanel.add (passwordField, "3,9"); driverIDLabelValue.setBorder(BorderFactory.createLoweredBevelBorder()); // Add test button testDatabaseButton = new JButton ("Test"); detailPanel.add (testDatabaseButton, "1,11"); // Add database list panel connectionListPanel = new ConnectionListPanel (serverProperties); this.connectionList = connectionListPanel.getList(); // extract list // Add list panel to the left and detail panel to the right panel.add (connectionListPanel, "1,1"); panel.add (detailPanel, "3,1"); return panel; }
JPanel function () { double [][] sizes = {{PAD, PREF, PAD, FILL, PAD}, {PAD, PREF, PAD}}; JPanel panel = new JPanel (new TableLayout (sizes)); sizes = new double [][]{{PAD, 150, PAD, FILL, PAD}, {PAD, PREF, PAD, PREF, PAD, PREF, PAD, PREF, PAD, PREF, PAD, PREF, PAD}}; JPanel detailPanel = new JPanel (new TableLayout (sizes)); detailPanel.setBorder (BorderFactory.createTitledBorder(STR)); JLabel idLabel = new JLabel ("ID"); JLabel driverLabel = new JLabel (STR); JLabel urlLabel = new JLabel ("URL"); JLabel usernameLabel = new JLabel (STR); JLabel passwordLabel = new JLabel (STR); driverIDLabelValue = new JLabel (); driverTextField = new JTextField (10); urlTextField = new JTextField (10); usernameTextField = new JTextField (10); passwordField = new JPasswordField (10); detailPanel.add (idLabel, "1,1"); detailPanel.add (driverIDLabelValue, "3,1"); detailPanel.add (driverLabel, "1,3"); detailPanel.add (driverTextField, "3,3"); detailPanel.add (urlLabel, "1,5"); detailPanel.add (urlTextField, "3,5"); detailPanel.add (usernameLabel, "1,7"); detailPanel.add (usernameTextField, "3,7"); detailPanel.add (passwordLabel, "1,9"); detailPanel.add (passwordField, "3,9"); driverIDLabelValue.setBorder(BorderFactory.createLoweredBevelBorder()); testDatabaseButton = new JButton ("Test"); detailPanel.add (testDatabaseButton, "1,11"); connectionListPanel = new ConnectionListPanel (serverProperties); this.connectionList = connectionListPanel.getList(); panel.add (connectionListPanel, "1,1"); panel.add (detailPanel, "3,1"); return panel; }
/** * Return the database sub panel. * * @return */
Return the database sub panel
getDatabaseSubPanel
{ "repo_name": "lsilvestre/Jogre", "path": "server/src/org/jogre/server/administrator/AdminServerPropertiesDialog.java", "license": "gpl-2.0", "size": 50813 }
[ "info.clearthought.layout.TableLayout", "javax.swing.BorderFactory", "javax.swing.JButton", "javax.swing.JLabel", "javax.swing.JPanel", "javax.swing.JPasswordField", "javax.swing.JTextField" ]
import info.clearthought.layout.TableLayout; import javax.swing.BorderFactory; import javax.swing.JButton; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JPasswordField; import javax.swing.JTextField;
import info.clearthought.layout.*; import javax.swing.*;
[ "info.clearthought.layout", "javax.swing" ]
info.clearthought.layout; javax.swing;
157,047
public Boolean getValue(RandomBoolean params) throws ValueGeneratorException { checkParamsNotNull(params); // check not null double probability = params.probability(); if (probability < 0.0 || probability > 1.0) { throw new ValueGeneratorException("Invalid value of probability: " + probability); } return (nextDouble(params) < probability) ? Boolean.TRUE : Boolean.FALSE; }
Boolean function(RandomBoolean params) throws ValueGeneratorException { checkParamsNotNull(params); double probability = params.probability(); if (probability < 0.0 probability > 1.0) { throw new ValueGeneratorException(STR + probability); } return (nextDouble(params) < probability) ? Boolean.TRUE : Boolean.FALSE; }
/** * Returns random boolean value according to probability in given parameters. * * @param params the parameters of (random) boolean value generator. * @return Random boolean value. * @throws ValueGeneratorException If probability in given parameters is not between 0.0 and 1.0. */
Returns random boolean value according to probability in given parameters
getValue
{ "repo_name": "mrfranta/jop", "path": "jop-impl/src/main/java/cz/zcu/kiv/jop/generator/bool/RandomBooleanGenerator.java", "license": "apache-2.0", "size": 1294 }
[ "cz.zcu.kiv.jop.annotation.generator.bool.RandomBoolean", "cz.zcu.kiv.jop.generator.ValueGeneratorException" ]
import cz.zcu.kiv.jop.annotation.generator.bool.RandomBoolean; import cz.zcu.kiv.jop.generator.ValueGeneratorException;
import cz.zcu.kiv.jop.annotation.generator.bool.*; import cz.zcu.kiv.jop.generator.*;
[ "cz.zcu.kiv" ]
cz.zcu.kiv;
1,895,123
public String readText() throws IOException { switch (currentToken) { case VALUE_STRING: String text = jsonParser.getText(); return text; case VALUE_FALSE: return "false"; case VALUE_TRUE: return "true"; case VALUE_NULL: return null; case VALUE_NUMBER_FLOAT: case VALUE_NUMBER_INT: return jsonParser.getNumberValue().toString(); case FIELD_NAME: return jsonParser.getText(); default: throw new RuntimeException( "We expected a VALUE token but got: " + currentToken); } }
String function() throws IOException { switch (currentToken) { case VALUE_STRING: String text = jsonParser.getText(); return text; case VALUE_FALSE: return "false"; case VALUE_TRUE: return "true"; case VALUE_NULL: return null; case VALUE_NUMBER_FLOAT: case VALUE_NUMBER_INT: return jsonParser.getNumberValue().toString(); case FIELD_NAME: return jsonParser.getText(); default: throw new RuntimeException( STR + currentToken); } }
/** * Returns the text of the current token, or throws an exception if * the current token does not contain text (ex: '{', '}', etc.). * * @return The text of the current token. * * @throws IOException */
Returns the text of the current token, or throws an exception if the current token does not contain text (ex: '{', '}', etc.)
readText
{ "repo_name": "XidongHuang/aws-sdk-for-java", "path": "src/main/java/com/amazonaws/transform/JsonUnmarshallerContext.java", "license": "apache-2.0", "size": 8099 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,528,727
private static void visitMethod(Type owner, ClassWriter cw, MethodConfig config) { java.lang.reflect.Method ifaceMethod = config.getMethod(); Type returnType = Type.getType(ifaceMethod.getReturnType()); Type[] args = new Type[ifaceMethod.getParameterTypes().length]; for (int i = 0; i < args.length; ++i) { args[i] = Type.getType(ifaceMethod.getParameterTypes()[i]); } Method method = new Method(ifaceMethod.getName(), returnType, args); GeneratorAdapter mg = new GeneratorAdapter(Opcodes.ACC_PUBLIC, method, null, null, cw); // Factory mg.loadThis(); mg.getField(owner, "delegate", QUERY_DELEGATE_TYPE); // Statement config mg.loadThis(); mg.getField(owner, "configs", STATEMENT_CONFIGS_ARR_TYPE); mg.push(config.getIndex()); mg.arrayLoad(Type.getType(StatementConfig.class)); // Arguments mg.loadArgArray(); mg.invokeInterface(QUERY_DELEGATE_TYPE, QUERY_METHOD); mg.unbox(returnType); mg.returnValue(); mg.endMethod(); // Copy the annotations copyAnnotations(ifaceMethod, null, mg); }
static void function(Type owner, ClassWriter cw, MethodConfig config) { java.lang.reflect.Method ifaceMethod = config.getMethod(); Type returnType = Type.getType(ifaceMethod.getReturnType()); Type[] args = new Type[ifaceMethod.getParameterTypes().length]; for (int i = 0; i < args.length; ++i) { args[i] = Type.getType(ifaceMethod.getParameterTypes()[i]); } Method method = new Method(ifaceMethod.getName(), returnType, args); GeneratorAdapter mg = new GeneratorAdapter(Opcodes.ACC_PUBLIC, method, null, null, cw); mg.loadThis(); mg.getField(owner, STR, QUERY_DELEGATE_TYPE); mg.loadThis(); mg.getField(owner, STR, STATEMENT_CONFIGS_ARR_TYPE); mg.push(config.getIndex()); mg.arrayLoad(Type.getType(StatementConfig.class)); mg.loadArgArray(); mg.invokeInterface(QUERY_DELEGATE_TYPE, QUERY_METHOD); mg.unbox(returnType); mg.returnValue(); mg.endMethod(); copyAnnotations(ifaceMethod, null, mg); }
/** * Generate mapper method. * * @param owner self type * @param cw class writer * @param config method configuration */
Generate mapper method
visitMethod
{ "repo_name": "idubrov/nanorm", "path": "src/main/java/com/google/code/nanorm/internal/introspect/asm/MapperBuilder.java", "license": "apache-2.0", "size": 9316 }
[ "com.google.code.nanorm.internal.config.StatementConfig", "org.objectweb.asm.ClassWriter", "org.objectweb.asm.Opcodes", "org.objectweb.asm.Type", "org.objectweb.asm.commons.GeneratorAdapter", "org.objectweb.asm.commons.Method" ]
import com.google.code.nanorm.internal.config.StatementConfig; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import org.objectweb.asm.commons.GeneratorAdapter; import org.objectweb.asm.commons.Method;
import com.google.code.nanorm.internal.config.*; import org.objectweb.asm.*; import org.objectweb.asm.commons.*;
[ "com.google.code", "org.objectweb.asm" ]
com.google.code; org.objectweb.asm;
2,348,288