method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
list
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
list
libraries_info
stringlengths
6
661
id
int64
0
2.92M
private void remConf() throws ConfirmModSchemaException { try { // Remove options group, asking user to confirm full sync if necessary mCol.getDecks().remConf(mOptions.getLong("id")); // Run the CPU intensive re-sort operation in a background thread DeckTask.launchDeckTask(DeckTask.TASK_TYPE_CONF_REMOVE, mConfChangeHandler, new DeckTask.TaskData(new Object[] { mOptions })); mDeck.put("conf", 1); } catch (JSONException e) { throw new RuntimeException(e); } } }
void function() throws ConfirmModSchemaException { try { mCol.getDecks().remConf(mOptions.getLong("id")); DeckTask.launchDeckTask(DeckTask.TASK_TYPE_CONF_REMOVE, mConfChangeHandler, new DeckTask.TaskData(new Object[] { mOptions })); mDeck.put("conf", 1); } catch (JSONException e) { throw new RuntimeException(e); } } }
/** * Remove the currently selected options group * @throws ConfirmModSchemaException */
Remove the currently selected options group
remConf
{ "repo_name": "mikeAopeneng/joyo-kanji", "path": "KanjiDroid/src/main/java/website/openeng/anki/DeckOptions.java", "license": "gpl-2.0", "size": 33380 }
[ "org.json.JSONException", "website.openeng.anki.exception.ConfirmModSchemaException", "website.openeng.async.DeckTask" ]
import org.json.JSONException; import website.openeng.anki.exception.ConfirmModSchemaException; import website.openeng.async.DeckTask;
import org.json.*; import website.openeng.anki.exception.*; import website.openeng.async.*;
[ "org.json", "website.openeng.anki", "website.openeng.async" ]
org.json; website.openeng.anki; website.openeng.async;
2,303,955
Pair<Integer, Long> stopRecordingDroppedFrames();
Pair<Integer, Long> stopRecordingDroppedFrames();
/** * Returns the number of frames dropped since starting **/
Returns the number of frames dropped since starting
stopRecordingDroppedFrames
{ "repo_name": "haowenbiao/AndroidProject", "path": "lottie-android-master/LottieSample/src/main/java/com/airbnb/lottie/samples/ILottieApplication.java", "license": "apache-2.0", "size": 268 }
[ "android.support.v4.util.Pair" ]
import android.support.v4.util.Pair;
import android.support.v4.util.*;
[ "android.support" ]
android.support;
1,389,237
private Animator createDialogSlideAnimator(boolean isEnter) { final float animHeight = -1f * mContainer.getHeight(); ObjectAnimator translateAnim; if (isEnter) { mContainer.setTranslationY(animHeight); translateAnim = ObjectAnimator.ofFloat(mContainer, View.TRANSLATION_Y, 0f); translateAnim.setInterpolator(BakedBezierInterpolator.FADE_IN_CURVE); } else { translateAnim = ObjectAnimator.ofFloat(mContainer, View.TRANSLATION_Y, animHeight); translateAnim.setInterpolator(BakedBezierInterpolator.FADE_OUT_CURVE); } translateAnim.setDuration(FADE_DURATION); return translateAnim; }
Animator function(boolean isEnter) { final float animHeight = -1f * mContainer.getHeight(); ObjectAnimator translateAnim; if (isEnter) { mContainer.setTranslationY(animHeight); translateAnim = ObjectAnimator.ofFloat(mContainer, View.TRANSLATION_Y, 0f); translateAnim.setInterpolator(BakedBezierInterpolator.FADE_IN_CURVE); } else { translateAnim = ObjectAnimator.ofFloat(mContainer, View.TRANSLATION_Y, animHeight); translateAnim.setInterpolator(BakedBezierInterpolator.FADE_OUT_CURVE); } translateAnim.setDuration(FADE_DURATION); return translateAnim; }
/** * Create an animator to slide in the entire dialog from the top of the screen. */
Create an animator to slide in the entire dialog from the top of the screen
createDialogSlideAnimator
{ "repo_name": "Just-D/chromium-1", "path": "chrome/android/java/src/org/chromium/chrome/browser/WebsiteSettingsPopup.java", "license": "bsd-3-clause", "size": 42606 }
[ "android.animation.Animator", "android.animation.ObjectAnimator", "android.view.View", "org.chromium.ui.interpolators.BakedBezierInterpolator" ]
import android.animation.Animator; import android.animation.ObjectAnimator; import android.view.View; import org.chromium.ui.interpolators.BakedBezierInterpolator;
import android.animation.*; import android.view.*; import org.chromium.ui.interpolators.*;
[ "android.animation", "android.view", "org.chromium.ui" ]
android.animation; android.view; org.chromium.ui;
2,476,771
protected String getNamespace(HasMetadata entity) { String answer = KubernetesHelper.getNamespace(entity); if (Strings.isNullOrBlank(answer)) { answer = getNamespace(); } // lest make sure the namespace exists applyNamespace(answer); return answer; }
String function(HasMetadata entity) { String answer = KubernetesHelper.getNamespace(entity); if (Strings.isNullOrBlank(answer)) { answer = getNamespace(); } applyNamespace(answer); return answer; }
/** * Returns the namespace defined in the entity or the configured namespace */
Returns the namespace defined in the entity or the configured namespace
getNamespace
{ "repo_name": "chirino/fabric8v2", "path": "components/kubernetes-api/src/main/java/io/fabric8/kubernetes/api/Controller.java", "license": "apache-2.0", "size": 70148 }
[ "io.fabric8.kubernetes.api.model.HasMetadata", "io.fabric8.utils.Strings" ]
import io.fabric8.kubernetes.api.model.HasMetadata; import io.fabric8.utils.Strings;
import io.fabric8.kubernetes.api.model.*; import io.fabric8.utils.*;
[ "io.fabric8.kubernetes", "io.fabric8.utils" ]
io.fabric8.kubernetes; io.fabric8.utils;
1,320,680
@Override public void zoomRangeAxes(double lowerPercent, double upperPercent, PlotRenderingInfo info, Point2D source) { // delegate 'info' and 'source' argument checks... CategoryPlot subplot = findSubplot(info, source); if (subplot != null) { subplot.zoomRangeAxes(lowerPercent, upperPercent, info, source); } else { // if the source point doesn't fall within a subplot, we do the // zoom on all subplots... Iterator iterator = getSubplots().iterator(); while (iterator.hasNext()) { subplot = (CategoryPlot) iterator.next(); subplot.zoomRangeAxes(lowerPercent, upperPercent, info, source); } } }
void function(double lowerPercent, double upperPercent, PlotRenderingInfo info, Point2D source) { CategoryPlot subplot = findSubplot(info, source); if (subplot != null) { subplot.zoomRangeAxes(lowerPercent, upperPercent, info, source); } else { Iterator iterator = getSubplots().iterator(); while (iterator.hasNext()) { subplot = (CategoryPlot) iterator.next(); subplot.zoomRangeAxes(lowerPercent, upperPercent, info, source); } } }
/** * Zooms in on the range axes. * * @param lowerPercent the lower bound. * @param upperPercent the upper bound. * @param info the plot rendering info (<code>null</code> not permitted). * @param source the source point (<code>null</code> not permitted). */
Zooms in on the range axes
zoomRangeAxes
{ "repo_name": "sebkur/JFreeChart", "path": "src/main/java/org/jfree/chart/plot/CombinedDomainCategoryPlot.java", "license": "lgpl-3.0", "size": 25498 }
[ "java.awt.geom.Point2D", "java.util.Iterator" ]
import java.awt.geom.Point2D; import java.util.Iterator;
import java.awt.geom.*; import java.util.*;
[ "java.awt", "java.util" ]
java.awt; java.util;
2,279,538
@SuppressWarnings("unchecked") public MBeanInfoWrapper readMBeanInfo(InputStream in) throws ConversionException, IOException, ClassNotFoundException { JSONObject json = parseObject(in); MBeanInfoWrapper ret = new MBeanInfoWrapper(); if (USE_BASE64_FOR_MBEANINFO) { Object o = readSerialized(json.get(N_SERIALIZED)); if (!(o instanceof MBeanInfo)) { throwConversionException("readMBeanInfo() receives an instance that's not a MBeanInfo.", json.get(N_SERIALIZED)); } ret.mbeanInfo = (MBeanInfo) o; ret.attributesURL = readStringInternal(json.get(N_ATTRIBUTES_URL)); o = readSerialized(json.get(OM_ATTRIBUTES)); if (!(o instanceof HashMap)) { throwConversionException("readMBeanInfo() receives an instance that's not a HashMap.", json.get(OM_ATTRIBUTES)); } ret.attributeURLs = (Map<String, String>) o; o = readSerialized(json.get(OM_OPERATIONS)); if (!(o instanceof HashMap)) { throwConversionException("readMBeanInfo() receives an instance that's not a HashMap.", json.get(OM_OPERATIONS)); } ret.operationURLs = (Map<String, String>) o; return ret; } ret.attributeURLs = new HashMap<String, String>(); ret.operationURLs = new HashMap<String, String>(); String className = readStringInternal(json.get(N_CLASSNAME)); String description = readStringInternal(json.get(N_DESCRIPTION)); Descriptor descriptor = readDescriptor(json.get(N_DESCRIPTOR)); MBeanAttributeInfo[] attributes = readAttributes(json.get(N_ATTRIBUTES), ret.attributeURLs); String attributeURL = readStringInternal(json.get(N_ATTRIBUTES_URL)); MBeanConstructorInfo[] constructors = readConstructors(json.get(N_CONSTRUCTORS)); MBeanNotificationInfo[] notifications = readNotifications(json.get(N_NOTIFICATIONS)); MBeanOperationInfo[] operations = readOperations(json.get(N_OPERATIONS), ret.operationURLs); ret.attributesURL = attributeURL; Object o = json.get(N_SERIALIZED); if (o != null) { o = readSerialized(o); if (!(o instanceof MBeanInfo)) { throwConversionException("readMBeanInfo() receives an instance that's not a MBeanInfo.", json.get(N_SERIALIZED)); } ret.mbeanInfo = (MBeanInfo) o; } else { ret.mbeanInfo = new MBeanInfo(className, description, attributes, constructors, operations, notifications, descriptor); } return ret; }
@SuppressWarnings(STR) MBeanInfoWrapper function(InputStream in) throws ConversionException, IOException, ClassNotFoundException { JSONObject json = parseObject(in); MBeanInfoWrapper ret = new MBeanInfoWrapper(); if (USE_BASE64_FOR_MBEANINFO) { Object o = readSerialized(json.get(N_SERIALIZED)); if (!(o instanceof MBeanInfo)) { throwConversionException(STR, json.get(N_SERIALIZED)); } ret.mbeanInfo = (MBeanInfo) o; ret.attributesURL = readStringInternal(json.get(N_ATTRIBUTES_URL)); o = readSerialized(json.get(OM_ATTRIBUTES)); if (!(o instanceof HashMap)) { throwConversionException(STR, json.get(OM_ATTRIBUTES)); } ret.attributeURLs = (Map<String, String>) o; o = readSerialized(json.get(OM_OPERATIONS)); if (!(o instanceof HashMap)) { throwConversionException(STR, json.get(OM_OPERATIONS)); } ret.operationURLs = (Map<String, String>) o; return ret; } ret.attributeURLs = new HashMap<String, String>(); ret.operationURLs = new HashMap<String, String>(); String className = readStringInternal(json.get(N_CLASSNAME)); String description = readStringInternal(json.get(N_DESCRIPTION)); Descriptor descriptor = readDescriptor(json.get(N_DESCRIPTOR)); MBeanAttributeInfo[] attributes = readAttributes(json.get(N_ATTRIBUTES), ret.attributeURLs); String attributeURL = readStringInternal(json.get(N_ATTRIBUTES_URL)); MBeanConstructorInfo[] constructors = readConstructors(json.get(N_CONSTRUCTORS)); MBeanNotificationInfo[] notifications = readNotifications(json.get(N_NOTIFICATIONS)); MBeanOperationInfo[] operations = readOperations(json.get(N_OPERATIONS), ret.operationURLs); ret.attributesURL = attributeURL; Object o = json.get(N_SERIALIZED); if (o != null) { o = readSerialized(o); if (!(o instanceof MBeanInfo)) { throwConversionException(STR, json.get(N_SERIALIZED)); } ret.mbeanInfo = (MBeanInfo) o; } else { ret.mbeanInfo = new MBeanInfo(className, description, attributes, constructors, operations, notifications, descriptor); } return ret; }
/** * Decode a JSON document to retrieve an MBeanInfoWrapper instance. * * Note that all descriptors are of class ImmutableDescriptor. * * @param in The stream to read JSON from * @return The decoded MBeanInfoWrapper instance * @throws ConversionException If JSON uses unexpected structure/format * @throws IOException If an I/O error occurs or if JSON is ill-formed. * @throws ClassNotFoundException If needed class can't be found. * @see #writeMBeanInfo(OutputStream, MBeanInfoWrapper) */
Decode a JSON document to retrieve an MBeanInfoWrapper instance. Note that all descriptors are of class ImmutableDescriptor
readMBeanInfo
{ "repo_name": "OpenLiberty/open-liberty", "path": "dev/com.ibm.ws.jmx.connector.client.rest/src/com/ibm/ws/jmx/connector/converter/JSONConverter.java", "license": "epl-1.0", "size": 184410 }
[ "com.ibm.json.java.JSONObject", "com.ibm.ws.jmx.connector.datatypes.ConversionException", "com.ibm.ws.jmx.connector.datatypes.MBeanInfoWrapper", "java.io.IOException", "java.io.InputStream", "java.util.HashMap", "java.util.Map", "javax.management.Descriptor", "javax.management.MBeanAttributeInfo", "javax.management.MBeanConstructorInfo", "javax.management.MBeanInfo", "javax.management.MBeanNotificationInfo", "javax.management.MBeanOperationInfo" ]
import com.ibm.json.java.JSONObject; import com.ibm.ws.jmx.connector.datatypes.ConversionException; import com.ibm.ws.jmx.connector.datatypes.MBeanInfoWrapper; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import javax.management.Descriptor; import javax.management.MBeanAttributeInfo; import javax.management.MBeanConstructorInfo; import javax.management.MBeanInfo; import javax.management.MBeanNotificationInfo; import javax.management.MBeanOperationInfo;
import com.ibm.json.java.*; import com.ibm.ws.jmx.connector.datatypes.*; import java.io.*; import java.util.*; import javax.management.*;
[ "com.ibm.json", "com.ibm.ws", "java.io", "java.util", "javax.management" ]
com.ibm.json; com.ibm.ws; java.io; java.util; javax.management;
2,671,631
@Override public synchronized Document mergeBackend() { if (getBackend() != null && getBackend().getStorage() != null && getMergeDocument() != null) { Document newDoc = getMergeDocument(); for (Map.Entry<String, BaseBean> lEntry : getBackend().getStorage() .entrySet()) { // check if bean should be merged if (isBeanActive(lEntry.getKey())) { newDoc =getBeanTransformer().mergeToISO(lEntry.getValue(), newDoc); } } return newDoc; } return null; }
synchronized Document function() { if (getBackend() != null && getBackend().getStorage() != null && getMergeDocument() != null) { Document newDoc = getMergeDocument(); for (Map.Entry<String, BaseBean> lEntry : getBackend().getStorage() .entrySet()) { if (isBeanActive(lEntry.getKey())) { newDoc =getBeanTransformer().mergeToISO(lEntry.getValue(), newDoc); } } return newDoc; } return null; }
/** * Merges the current bean values with the XML stored in mMergeDocument * * @return Document with merged values or null if on of the input is empty */
Merges the current bean values with the XML stored in mMergeDocument
mergeBackend
{ "repo_name": "nuest/Sensor_SmartEditor", "path": "smartsensoreditor-api/src/main/java/org/n52/smartsensoreditor/service/BackendManagerServiceSML.java", "license": "apache-2.0", "size": 6135 }
[ "de.conterra.smarteditor.beans.BaseBean", "java.util.Map", "org.w3c.dom.Document" ]
import de.conterra.smarteditor.beans.BaseBean; import java.util.Map; import org.w3c.dom.Document;
import de.conterra.smarteditor.beans.*; import java.util.*; import org.w3c.dom.*;
[ "de.conterra.smarteditor", "java.util", "org.w3c.dom" ]
de.conterra.smarteditor; java.util; org.w3c.dom;
196,718
protected void sequence_UiMobileHorizontalLayoutAssigment(EObject context, UiHorizontalLayoutAssigment semanticObject) { genericSequencer.createSequence(context, semanticObject); }
void function(EObject context, UiHorizontalLayoutAssigment semanticObject) { genericSequencer.createSequence(context, semanticObject); }
/** * Constraint: * (element=UiMobileEmbeddable alignment=UiAlignment?) */
Constraint: (element=UiMobileEmbeddable alignment=UiAlignment?)
sequence_UiMobileHorizontalLayoutAssigment
{ "repo_name": "lunifera/lunifera-ecview-addons", "path": "org.lunifera.ecview.dsl/src-gen/org/lunifera/ecview/dsl/serializer/UIGrammarSemanticSequencer.java", "license": "epl-1.0", "size": 151691 }
[ "org.eclipse.emf.ecore.EObject", "org.lunifera.ecview.semantic.uimodel.UiHorizontalLayoutAssigment" ]
import org.eclipse.emf.ecore.EObject; import org.lunifera.ecview.semantic.uimodel.UiHorizontalLayoutAssigment;
import org.eclipse.emf.ecore.*; import org.lunifera.ecview.semantic.uimodel.*;
[ "org.eclipse.emf", "org.lunifera.ecview" ]
org.eclipse.emf; org.lunifera.ecview;
2,916,223
@FIXVersion(introduced="4.0") @TagNumRef(tagNum=TagNum.Side, required = true) public void setSide(Side side) { this.side = side; }
@FIXVersion(introduced="4.0") @TagNumRef(tagNum=TagNum.Side, required = true) void function(Side side) { this.side = side; }
/** * Message field setter. * @param side field value */
Message field setter
setSide
{ "repo_name": "marvisan/HadesFIX", "path": "Model/src/main/java/net/hades/fix/message/OrderModificationRequestMsg.java", "license": "gpl-3.0", "size": 149491 }
[ "net.hades.fix.message.anno.FIXVersion", "net.hades.fix.message.anno.TagNumRef", "net.hades.fix.message.type.Side", "net.hades.fix.message.type.TagNum" ]
import net.hades.fix.message.anno.FIXVersion; import net.hades.fix.message.anno.TagNumRef; import net.hades.fix.message.type.Side; import net.hades.fix.message.type.TagNum;
import net.hades.fix.message.anno.*; import net.hades.fix.message.type.*;
[ "net.hades.fix" ]
net.hades.fix;
1,390,187
private CredentialComboBox getCred() { if (cred == null) { cred = new CredentialComboBox(); } return cred; }
CredentialComboBox function() { if (cred == null) { cred = new CredentialComboBox(); } return cred; }
/** * This method initializes cred * * @return javax.swing.JComboBox */
This method initializes cred
getCred
{ "repo_name": "NCIP/cagrid-core", "path": "caGrid/projects/gaards-ui/src/org/cagrid/gaards/ui/cds/SessionPanel.java", "license": "bsd-3-clause", "size": 4593 }
[ "org.cagrid.gaards.ui.common.CredentialComboBox" ]
import org.cagrid.gaards.ui.common.CredentialComboBox;
import org.cagrid.gaards.ui.common.*;
[ "org.cagrid.gaards" ]
org.cagrid.gaards;
1,881,415
void reset(ResetFrame frame, Consumer<Result<Void>> result);
void reset(ResetFrame frame, Consumer<Result<Void>> result);
/** * <p>Sends the given RST_STREAM {@code frame}.</p> * * @param frame The RST_FRAME to send. * @param result The result that gets notified when the frame has been sent. */
Sends the given RST_STREAM frame
reset
{ "repo_name": "hypercube1024/firefly", "path": "firefly-net/src/main/java/com/fireflysource/net/http/common/v2/stream/Stream.java", "license": "apache-2.0", "size": 9769 }
[ "com.fireflysource.common.sys.Result", "com.fireflysource.net.http.common.v2.frame.ResetFrame", "java.util.function.Consumer" ]
import com.fireflysource.common.sys.Result; import com.fireflysource.net.http.common.v2.frame.ResetFrame; import java.util.function.Consumer;
import com.fireflysource.common.sys.*; import com.fireflysource.net.http.common.v2.frame.*; import java.util.function.*;
[ "com.fireflysource.common", "com.fireflysource.net", "java.util" ]
com.fireflysource.common; com.fireflysource.net; java.util;
2,451,409
public static void setAnonymous(ClusterSet clusterSet) { logger.info("------ setAnonymous ------"); for (String idx : clusterSet) { Cluster cluster = clusterSet.getCluster(idx); String name = cluster.getName(); if (name.matches("[SC][0-9]+")) { String newName = name.replaceFirst("S", "speaker#"); cluster.setName(newName); logger.info("SPEAKER remplace name: " + name + " with new Name:" + newName); } } ClusterSet headClusterSet = clusterSet.getHeadClusterSet(); if (headClusterSet != null) { for (String idx : headClusterSet) { Cluster cluster = headClusterSet.getCluster(idx); String name = cluster.getName(); if (name.matches("[SC][0-9]+")) { String newName = name.replaceFirst("C", "speaker#"); cluster.setName(newName); logger.info("HEAD remplace name: " + name + " with new Name:" + newName); } } } }
static void function(ClusterSet clusterSet) { logger.info(STR); for (String idx : clusterSet) { Cluster cluster = clusterSet.getCluster(idx); String name = cluster.getName(); if (name.matches(STR)) { String newName = name.replaceFirst("S", STR); cluster.setName(newName); logger.info(STR + name + STR + newName); } } ClusterSet headClusterSet = clusterSet.getHeadClusterSet(); if (headClusterSet != null) { for (String idx : headClusterSet) { Cluster cluster = headClusterSet.getCluster(idx); String name = cluster.getName(); if (name.matches(STR)) { String newName = name.replaceFirst("C", STR); cluster.setName(newName); logger.info(STR + name + STR + newName); } } } }
/** * Sets the anonymous. * * @param clusterSet the new anonymous */
Sets the anonymous
setAnonymous
{ "repo_name": "Adirockzz95/GenderDetect", "path": "src/src/fr/lium/experimental/spkDiarization/programs/SpeakerIdenificationDecision10.java", "license": "gpl-3.0", "size": 40757 }
[ "fr.lium.spkDiarization.libClusteringData.Cluster", "fr.lium.spkDiarization.libClusteringData.ClusterSet" ]
import fr.lium.spkDiarization.libClusteringData.Cluster; import fr.lium.spkDiarization.libClusteringData.ClusterSet;
import fr.lium.*;
[ "fr.lium" ]
fr.lium;
2,109,584
public Builder<BE, E> hop(Filter... filters) { selectExtender.path().with(filters); return this; }
Builder<BE, E> function(Filter... filters) { selectExtender.path().with(filters); return this; }
/** * Appends the provided set of filters to the current select candidates. * The filters are applied as path fragments. * * @param filters the set of filters to append as path fragments * @return this builder */
Appends the provided set of filters to the current select candidates. The filters are applied as path fragments
hop
{ "repo_name": "jpkrohling/hawkular-inventory", "path": "hawkular-inventory-api/src/main/java/org/hawkular/inventory/base/TraversalContext.java", "license": "apache-2.0", "size": 19277 }
[ "org.hawkular.inventory.api.filters.Filter" ]
import org.hawkular.inventory.api.filters.Filter;
import org.hawkular.inventory.api.filters.*;
[ "org.hawkular.inventory" ]
org.hawkular.inventory;
1,180,066
public TestingEventBuses.Event assertNext(final Predicate<TestingEventBuses.Event> predicate, Function<TestingEventBuses.Event, String> messageGen ) throws InterruptedException, TimeoutException { TestingEventBuses.Event nextEvent = _events.pollFirst(_defaultTimeoutValue, _defaultTimeoutUnit); if (null == nextEvent) { throw new TimeoutException(); } if (!predicate.apply(nextEvent)) { throw new AssertionError(messageGen.apply(nextEvent)); } return nextEvent; }
TestingEventBuses.Event function(final Predicate<TestingEventBuses.Event> predicate, Function<TestingEventBuses.Event, String> messageGen ) throws InterruptedException, TimeoutException { TestingEventBuses.Event nextEvent = _events.pollFirst(_defaultTimeoutValue, _defaultTimeoutUnit); if (null == nextEvent) { throw new TimeoutException(); } if (!predicate.apply(nextEvent)) { throw new AssertionError(messageGen.apply(nextEvent)); } return nextEvent; }
/** Gets the next event from the queue and validates that it satisfies a given predicate. Blocking * assert. The event is removed from the internal queue regardless if the predicate has been * satisfied. * @param predicate the predicate to apply on the next event * @param assert error message generator * @return the event if the predicate is satisfied * @throws AssertionError if the predicate is not satisfied */
Gets the next event from the queue and validates that it satisfies a given predicate. Blocking assert. The event is removed from the internal queue regardless if the predicate has been satisfied
assertNext
{ "repo_name": "yukuai518/gobblin", "path": "gobblin-core/src/main/java/gobblin/writer/test/TestingEventBusAsserter.java", "license": "apache-2.0", "size": 6414 }
[ "com.google.common.base.Function", "com.google.common.base.Predicate", "java.util.concurrent.TimeoutException" ]
import com.google.common.base.Function; import com.google.common.base.Predicate; import java.util.concurrent.TimeoutException;
import com.google.common.base.*; import java.util.concurrent.*;
[ "com.google.common", "java.util" ]
com.google.common; java.util;
337,064
hi.show();}</pre></noscript> * * @return LocationManager Object */ public LocationManager getLocationManager() { return impl.getLocationManager(); }
hi.show();}</pre></noscript> * * @return LocationManager Object */ LocationManager function() { return impl.getLocationManager(); }
/** * This method returns the platform Location Manager used for geofencing. This allows tracking the * user location in the background. Usage: * * <script src="https://gist.github.com/codenameone/b0fa5280bde905a8f0cd.js"></script> <noscript><pre>{@code public class GeofenceListenerImpl implements GeofenceListener { public void onExit(String id) { System.out.println("Exited "+id); } public void onEntered(String id) { System.out.println("Entered "+id); } } Form hi = new Form("Hi World"); hi.addComponent(new Label("Hi World")); Location loc = new Location(); loc.setLatitude(51.5033630); loc.setLongitude(-0.1276250); Geofence gf = new Geofence("test", loc, 100, 100000); LocationManager.getLocationManager().addGeoFencing(GeofenceListenerImpl.class, gf); hi.show();}</pre></noscript> * * @return LocationManager Object */
This method returns the platform Location Manager used for geofencing. This allows tracking the user location in the background. Usage:
getLocationManager
{ "repo_name": "codenameone/CodenameOne", "path": "CodenameOne/src/com/codename1/ui/Display.java", "license": "gpl-2.0", "size": 192339 }
[ "com.codename1.location.LocationManager" ]
import com.codename1.location.LocationManager;
import com.codename1.location.*;
[ "com.codename1.location" ]
com.codename1.location;
508,942
public static List<String> getLatestSyncLogFiles(Channel c) { String logPath = Config.get().getString(ConfigDefaults.SPACEWALK_REPOSYNC_LOG_PATH, "/var/log/rhn/reposync/"); File dir = new File(logPath); List<String> possibleList = new ArrayList<String>(); String[] dirList = dir.list(); if (dirList != null) { for (String file : dirList) { if (file.startsWith(c.getLabel()) && !file.endsWith(".gz")) { possibleList.add(logPath + file); } } Collections.sort(possibleList); } return possibleList; }
static List<String> function(Channel c) { String logPath = Config.get().getString(ConfigDefaults.SPACEWALK_REPOSYNC_LOG_PATH, STR); File dir = new File(logPath); List<String> possibleList = new ArrayList<String>(); String[] dirList = dir.list(); if (dirList != null) { for (String file : dirList) { if (file.startsWith(c.getLabel()) && !file.endsWith(".gz")) { possibleList.add(logPath + file); } } Collections.sort(possibleList); } return possibleList; }
/** * get the latest log file for spacewalk-repo-sync * @param c channel * @return the string of the filename (fully qualified) */
get the latest log file for spacewalk-repo-sync
getLatestSyncLogFiles
{ "repo_name": "dmacvicar/spacewalk", "path": "java/code/src/com/redhat/rhn/manager/channel/ChannelManager.java", "license": "gpl-2.0", "size": 105505 }
[ "com.redhat.rhn.common.conf.Config", "com.redhat.rhn.common.conf.ConfigDefaults", "com.redhat.rhn.domain.channel.Channel", "java.io.File", "java.util.ArrayList", "java.util.Collections", "java.util.List" ]
import com.redhat.rhn.common.conf.Config; import com.redhat.rhn.common.conf.ConfigDefaults; import com.redhat.rhn.domain.channel.Channel; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.List;
import com.redhat.rhn.common.conf.*; import com.redhat.rhn.domain.channel.*; import java.io.*; import java.util.*;
[ "com.redhat.rhn", "java.io", "java.util" ]
com.redhat.rhn; java.io; java.util;
1,312,042
public List<String> authorizedResources() { return this.authorizedResources; }
List<String> function() { return this.authorizedResources; }
/** * Get the authorizedResources property: List of Resource referred into query. * * @return the authorizedResources value. */
Get the authorizedResources property: List of Resource referred into query
authorizedResources
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanagerhybrid/azure-resourcemanager-monitor/src/main/java/com/azure/resourcemanager/monitor/models/Source.java", "license": "mit", "size": 3789 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
467,858
protected void addError(int code, String key) { errorCodes = (int[]) ArrayUtil.resizeArray(errorCodes, errorCodes.length + 1); errorKeys = (String[]) ArrayUtil.resizeArray(errorKeys, errorKeys.length + 1); errorCodes[errorCodes.length - 1] = code; errorKeys[errorKeys.length - 1] = key; }
void function(int code, String key) { errorCodes = (int[]) ArrayUtil.resizeArray(errorCodes, errorCodes.length + 1); errorKeys = (String[]) ArrayUtil.resizeArray(errorKeys, errorKeys.length + 1); errorCodes[errorCodes.length - 1] = code; errorKeys[errorKeys.length - 1] = key; }
/** * Adds the error code and the key to the list of errors. This list * is populated during construction or addition of elements and is used * outside this class to act upon the errors. */
Adds the error code and the key to the list of errors. This list is populated during construction or addition of elements and is used outside this class to act upon the errors
addError
{ "repo_name": "Julien35/dev-courses", "path": "tutoriel-spring-mvc/lib/hsqldb/src/org/hsqldb/persist/HsqlProperties.java", "license": "mit", "size": 17510 }
[ "org.hsqldb.lib.ArrayUtil" ]
import org.hsqldb.lib.ArrayUtil;
import org.hsqldb.lib.*;
[ "org.hsqldb.lib" ]
org.hsqldb.lib;
1,710,282
public static long getFileSize(String path) { if (StringUtils.isBlank(path)) { return -1; } File file = new File(path); return (file.exists() && file.isFile() ? file.length() : -1); }
static long function(String path) { if (StringUtils.isBlank(path)) { return -1; } File file = new File(path); return (file.exists() && file.isFile() ? file.length() : -1); }
/** * get file size * <ul> * <li>if path is null or empty, return -1</li> * <li>if path exist and it is a file, return file size, else return -1</li> * <ul> * * @param path * @return returns the length of this file in bytes. returns -1 if the file does not exist. */
get file size if path is null or empty, return -1 if path exist and it is a file, return file size, else return -1
getFileSize
{ "repo_name": "DesignQu/MVPFrames", "path": "Common/src/main/java/com/tool/common/utils/FileUtils.java", "license": "apache-2.0", "size": 26780 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
2,658,305
public ServiceFuture<Void> registerAsync(String userName, String registrationCode, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(registerWithServiceResponseAsync(userName, registrationCode), serviceCallback); }
ServiceFuture<Void> function(String userName, String registrationCode, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(registerWithServiceResponseAsync(userName, registrationCode), serviceCallback); }
/** * Register a user to a managed lab. * * @param userName The name of the user. * @param registrationCode The registration code of the lab. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */
Register a user to a managed lab
registerAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/labservices/mgmt-v2018_10_15/src/main/java/com/microsoft/azure/management/labservices/v2018_10_15/implementation/GlobalUsersInner.java", "license": "mit", "size": 77728 }
[ "com.microsoft.rest.ServiceCallback", "com.microsoft.rest.ServiceFuture" ]
import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.*;
[ "com.microsoft.rest" ]
com.microsoft.rest;
2,744,732
private Collection<Protocol> protocolCollection; public Collection<Protocol> getProtocolCollection(){ return protocolCollection; }
Collection<Protocol> protocolCollection; public Collection<Protocol> function(){ return protocolCollection; }
/** * Retreives the value of protocolCollection attribue * @return protocolCollection **/
Retreives the value of protocolCollection attribue
getProtocolCollection
{ "repo_name": "NCIP/cagrid2", "path": "cagrid-mms/cagrid-mms-cadsr-impl/src/main/java/gov/nih/nci/cadsr/domain/Form.java", "license": "bsd-3-clause", "size": 2672 }
[ "java.util.Collection" ]
import java.util.Collection;
import java.util.*;
[ "java.util" ]
java.util;
2,532,489
public Connection getConnection(boolean createIfNotPresent) throws DalDbException;
Connection function(boolean createIfNotPresent) throws DalDbException;
/** * Return the current JDBC Connection for this DalDatabase if it exists or <code>createIfNotPresent</code> * is true. Otherwise, return <code>null</code>. * @param createIfNotPresent * @return Connection or null * @throws DalDbException */
Return the current JDBC Connection for this DalDatabase if it exists or <code>createIfNotPresent</code> is true. Otherwise, return <code>null</code>
getConnection
{ "repo_name": "kddart/interopServer-DAL", "path": "src/main/com/diversityarrays/dal/db/SqlDalDatabase.java", "license": "gpl-3.0", "size": 1954 }
[ "java.sql.Connection" ]
import java.sql.Connection;
import java.sql.*;
[ "java.sql" ]
java.sql;
1,678,553
Uri parse (String path) { if (path.startsWith("res:")) { return getUriForResourcePath(path); } else if (path.startsWith("file:///")) { return getUriFromPath(path); } else if (path.startsWith("file://")) { return getUriFromAsset(path); } else if (path.startsWith("http")){ return getUriFromRemote(path); } return Uri.EMPTY; }
Uri parse (String path) { if (path.startsWith("res:")) { return getUriForResourcePath(path); } else if (path.startsWith(STRfile: return getUriFromAsset(path); } else if (path.startsWith("http")){ return getUriFromRemote(path); } return Uri.EMPTY; }
/** * The URI for a path. * * @param path * The given path */
The URI for a path
parse
{ "repo_name": "nozelrosario/Dcare", "path": "plugins/de.appplant.cordova.plugin.local-notification/src/android/notification/AssetUtil.java", "license": "apache-2.0", "size": 12251 }
[ "android.net.Uri" ]
import android.net.Uri;
import android.net.*;
[ "android.net" ]
android.net;
1,295,581
private static void subscribeToVirtChannel(Server server, User user, ValidatorResult result) { Channel virtChannel = ChannelManager.subscribeToChildChannelByOSProduct( user, server, ChannelManager.VT_OS_PRODUCT); log.debug("virtChannel search by OS product found: " + virtChannel); // Otherwise, try just searching by package name: (libvirt in this case) if (virtChannel == null) { log.debug("Couldnt find a virt channel by OS/Product mappings, " + "trying package"); try { virtChannel = ChannelManager.subscribeToChildChannelWithPackageName( user, server, ChannelManager.VIRT_CHANNEL_PACKAGE_NAME); // If we couldn't find a virt channel, warn the user but continue: if (virtChannel == null) { log.warn("no virt channel"); result.addError(new ValidatorError( "system.entitle.novirtchannel")); } } catch (MultipleChannelsWithPackageException e) { log.warn("Found multiple child channels with package: " + ChannelManager.VIRT_CHANNEL_PACKAGE_NAME); result.addWarning(new ValidatorWarning( "system.entitle.multiplechannelswithpackage", ChannelManager.VIRT_CHANNEL_PACKAGE_NAME)); } } }
static void function(Server server, User user, ValidatorResult result) { Channel virtChannel = ChannelManager.subscribeToChildChannelByOSProduct( user, server, ChannelManager.VT_OS_PRODUCT); log.debug(STR + virtChannel); if (virtChannel == null) { log.debug(STR + STR); try { virtChannel = ChannelManager.subscribeToChildChannelWithPackageName( user, server, ChannelManager.VIRT_CHANNEL_PACKAGE_NAME); if (virtChannel == null) { log.warn(STR); result.addError(new ValidatorError( STR)); } } catch (MultipleChannelsWithPackageException e) { log.warn(STR + ChannelManager.VIRT_CHANNEL_PACKAGE_NAME); result.addWarning(new ValidatorWarning( STR, ChannelManager.VIRT_CHANNEL_PACKAGE_NAME)); } } }
/** * Subscribe the system to the Red Hat Virtualization channel if necessary. * * This method should only ever be called in Satellite. * * @param server Server to schedule install for. * @param user User performing the operation. * @param result Validation result we'll be returning for the UI to render. */
Subscribe the system to the Red Hat Virtualization channel if necessary. This method should only ever be called in Satellite
subscribeToVirtChannel
{ "repo_name": "renner/spacewalk", "path": "java/code/src/com/redhat/rhn/manager/system/SystemManager.java", "license": "gpl-2.0", "size": 132498 }
[ "com.redhat.rhn.common.validator.ValidatorError", "com.redhat.rhn.common.validator.ValidatorResult", "com.redhat.rhn.common.validator.ValidatorWarning", "com.redhat.rhn.domain.channel.Channel", "com.redhat.rhn.domain.server.Server", "com.redhat.rhn.domain.user.User", "com.redhat.rhn.manager.channel.ChannelManager", "com.redhat.rhn.manager.channel.MultipleChannelsWithPackageException" ]
import com.redhat.rhn.common.validator.ValidatorError; import com.redhat.rhn.common.validator.ValidatorResult; import com.redhat.rhn.common.validator.ValidatorWarning; import com.redhat.rhn.domain.channel.Channel; import com.redhat.rhn.domain.server.Server; import com.redhat.rhn.domain.user.User; import com.redhat.rhn.manager.channel.ChannelManager; import com.redhat.rhn.manager.channel.MultipleChannelsWithPackageException;
import com.redhat.rhn.common.validator.*; import com.redhat.rhn.domain.channel.*; import com.redhat.rhn.domain.server.*; import com.redhat.rhn.domain.user.*; import com.redhat.rhn.manager.channel.*;
[ "com.redhat.rhn" ]
com.redhat.rhn;
1,058,410
public void alert(String givenValue){ final AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setTitle(R.string.invalid_input); builder.setMessage(givenValue); builder.setPositiveButton(R.string.ok, null); builder.show(); }
void function(String givenValue){ final AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setTitle(R.string.invalid_input); builder.setMessage(givenValue); builder.setPositiveButton(R.string.ok, null); builder.show(); }
/** * Sets up a simple alert pop-up that shows up when the user tries to insert invalid * text to a preference field. * * @param givenValue */
Sets up a simple alert pop-up that shows up when the user tries to insert invalid text to a preference field
alert
{ "repo_name": "BarCodeKey/BarCodeKey", "path": "app/src/main/java/app/preferences/SettingsFragment.java", "license": "mit", "size": 9161 }
[ "android.app.AlertDialog" ]
import android.app.AlertDialog;
import android.app.*;
[ "android.app" ]
android.app;
404,896
private void updateJobState(final Ticket ticket, final ExecutionState executionState) { final JobInfo jobInfo = getJobInfo(ticket); final QueueStage stage = getQueueStage(executionState); boolean postprocess = false; boolean updated = false; synchronized(jobInfo) { final QueueStage currentStage = jobInfo.queueStage; // set stage if its more "advanced" in its life cycle than the current stage if(currentStage == null || currentStage.equals(QueueStage.PENDING) || (currentStage.equals(QueueStage.RUNNING) && stage.equals(QueueStage.POSTPROCESSING))) { updated = true; setQueueStage(jobInfo, stage); } // Check if its time to postprocess if(jobInfo.queueStage.equals(QueueStage.POSTPROCESSING) && !jobInfo.postprocessed) { jobInfo.postprocessed = true; postprocess = true; } if(jobInfo.progressTracker == null && jobInfo.jobProcessor instanceof ProgressTrackable) { try { jobInfo.progressTracker = ((ProgressTrackable) jobInfo.jobProcessor).getProgressTracker(); } catch(final RuntimeException t) { ExceptionUtils.logQuietly(LOG, t, "Exception thrown while attempting to get ProgressTracker for job " + jobInfo.jobProcessor); } } }
void function(final Ticket ticket, final ExecutionState executionState) { final JobInfo jobInfo = getJobInfo(ticket); final QueueStage stage = getQueueStage(executionState); boolean postprocess = false; boolean updated = false; synchronized(jobInfo) { final QueueStage currentStage = jobInfo.queueStage; if(currentStage == null currentStage.equals(QueueStage.PENDING) (currentStage.equals(QueueStage.RUNNING) && stage.equals(QueueStage.POSTPROCESSING))) { updated = true; setQueueStage(jobInfo, stage); } if(jobInfo.queueStage.equals(QueueStage.POSTPROCESSING) && !jobInfo.postprocessed) { jobInfo.postprocessed = true; postprocess = true; } if(jobInfo.progressTracker == null && jobInfo.jobProcessor instanceof ProgressTrackable) { try { jobInfo.progressTracker = ((ProgressTrackable) jobInfo.jobProcessor).getProgressTracker(); } catch(final RuntimeException t) { ExceptionUtils.logQuietly(LOG, t, STR + jobInfo.jobProcessor); } } }
/** * ExecutionState's may be skipped, may come out of order. jobInfo.queueStage and jobInfo.jobProcessor may be not initialized yet. */
ExecutionState's may be skipped, may come out of order. jobInfo.queueStage and jobInfo.jobProcessor may be not initialized yet
updateJobState
{ "repo_name": "jmchilton/TINT", "path": "projects/TropixJobQueue/src/main/edu/umn/msi/tropix/common/jobqueue/impl/JobProcessorQueueImpl.java", "license": "epl-1.0", "size": 21510 }
[ "edu.umn.msi.tropix.common.jobqueue.QueueStage", "edu.umn.msi.tropix.common.jobqueue.execution.ExecutionState", "edu.umn.msi.tropix.common.jobqueue.progress.ProgressTrackable", "edu.umn.msi.tropix.common.jobqueue.ticket.Ticket", "edu.umn.msi.tropix.common.logging.ExceptionUtils" ]
import edu.umn.msi.tropix.common.jobqueue.QueueStage; import edu.umn.msi.tropix.common.jobqueue.execution.ExecutionState; import edu.umn.msi.tropix.common.jobqueue.progress.ProgressTrackable; import edu.umn.msi.tropix.common.jobqueue.ticket.Ticket; import edu.umn.msi.tropix.common.logging.ExceptionUtils;
import edu.umn.msi.tropix.common.jobqueue.*; import edu.umn.msi.tropix.common.jobqueue.execution.*; import edu.umn.msi.tropix.common.jobqueue.progress.*; import edu.umn.msi.tropix.common.jobqueue.ticket.*; import edu.umn.msi.tropix.common.logging.*;
[ "edu.umn.msi" ]
edu.umn.msi;
2,313,238
public ConcurrentHashMap<String, LogicalDevice> getLogicalDevices() { return logicalDevices; }
ConcurrentHashMap<String, LogicalDevice> function() { return logicalDevices; }
/** * Returns the list of logical devices. * * @return the logicalDevices */
Returns the list of logical devices
getLogicalDevices
{ "repo_name": "cschneider/openhab", "path": "bundles/binding/org.openhab.binding.rwesmarthome/src/main/java/org/openhab/binding/rwesmarthome/internal/communicator/RWESmarthomeSession.java", "license": "epl-1.0", "size": 10160 }
[ "java.util.concurrent.ConcurrentHashMap", "org.openhab.binding.rwesmarthome.internal.model.LogicalDevice" ]
import java.util.concurrent.ConcurrentHashMap; import org.openhab.binding.rwesmarthome.internal.model.LogicalDevice;
import java.util.concurrent.*; import org.openhab.binding.rwesmarthome.internal.model.*;
[ "java.util", "org.openhab.binding" ]
java.util; org.openhab.binding;
619,117
private String message(String pattern, Object ... params) { return path() + "::" + MessageFormat.format(pattern, params); }
String function(String pattern, Object ... params) { return path() + "::" + MessageFormat.format(pattern, params); }
/** * Format an assertion message, including the current path. * @param pattern * @param params */
Format an assertion message, including the current path
message
{ "repo_name": "rnc/apiman", "path": "test/common/src/main/java/io/apiman/test/common/json/JsonCompare.java", "license": "apache-2.0", "size": 18428 }
[ "java.text.MessageFormat" ]
import java.text.MessageFormat;
import java.text.*;
[ "java.text" ]
java.text;
2,407,230
public Object objectToData(Object object) { return object; } } private static class SupplierKeyBinding extends TupleBinding { private SupplierKeyBinding() { }
Object function(Object object) { return object; } } private static class SupplierKeyBinding extends TupleBinding { private SupplierKeyBinding() { }
/** * Return the entity as the stored data. There is nothing to do here * since the entity's key fields are transient. */
Return the entity as the stored data. There is nothing to do here since the entity's key fields are transient
objectToData
{ "repo_name": "zheguang/BerkeleyDB", "path": "examples/java/src/collections/ship/sentity/SampleViews.java", "license": "agpl-3.0", "size": 13415 }
[ "com.sleepycat.bind.tuple.TupleBinding" ]
import com.sleepycat.bind.tuple.TupleBinding;
import com.sleepycat.bind.tuple.*;
[ "com.sleepycat.bind" ]
com.sleepycat.bind;
1,237,337
protected String buildDocumentTitle(String title) { if (this.getVendorDetail() == null) { return title; } Integer vendorHeaderGeneratedIdentifier = this.getVendorDetail().getVendorHeaderGeneratedIdentifier(); VendorService vendorService = SpringContext.getBean(VendorService.class); Object[] indicators = new String[2]; boolean isEmployeeVendor = vendorService.isVendorInstitutionEmployee(vendorHeaderGeneratedIdentifier); indicators[0] = isEmployeeVendor ? AdHocPaymentIndicator.EMPLOYEE_VENDOR : AdHocPaymentIndicator.OTHER; boolean isVendorForeign = vendorService.isVendorForeign(vendorHeaderGeneratedIdentifier); indicators[1] = isVendorForeign ? AdHocPaymentIndicator.ALIEN_VENDOR : AdHocPaymentIndicator.OTHER; for (Object indicator : indicators) { if (!AdHocPaymentIndicator.OTHER.equals(indicator)) { String titlePattern = title + " [{0}:{1}]"; return MessageFormat.format(titlePattern, indicators); } } return title; }
String function(String title) { if (this.getVendorDetail() == null) { return title; } Integer vendorHeaderGeneratedIdentifier = this.getVendorDetail().getVendorHeaderGeneratedIdentifier(); VendorService vendorService = SpringContext.getBean(VendorService.class); Object[] indicators = new String[2]; boolean isEmployeeVendor = vendorService.isVendorInstitutionEmployee(vendorHeaderGeneratedIdentifier); indicators[0] = isEmployeeVendor ? AdHocPaymentIndicator.EMPLOYEE_VENDOR : AdHocPaymentIndicator.OTHER; boolean isVendorForeign = vendorService.isVendorForeign(vendorHeaderGeneratedIdentifier); indicators[1] = isVendorForeign ? AdHocPaymentIndicator.ALIEN_VENDOR : AdHocPaymentIndicator.OTHER; for (Object indicator : indicators) { if (!AdHocPaymentIndicator.OTHER.equals(indicator)) { String titlePattern = title + STR; return MessageFormat.format(titlePattern, indicators); } } return title; }
/** * build document title based on the properties of current document * * @param the default document title * @return the combine information of the given title and additional payment indicators */
build document title based on the properties of current document
buildDocumentTitle
{ "repo_name": "quikkian-ua-devops/will-financials", "path": "kfs-purap/src/main/java/org/kuali/kfs/module/purap/document/PurchasingAccountsPayableDocumentBase.java", "license": "agpl-3.0", "size": 52309 }
[ "java.text.MessageFormat", "org.kuali.kfs.sys.KFSConstants", "org.kuali.kfs.sys.context.SpringContext", "org.kuali.kfs.vnd.document.service.VendorService" ]
import java.text.MessageFormat; import org.kuali.kfs.sys.KFSConstants; import org.kuali.kfs.sys.context.SpringContext; import org.kuali.kfs.vnd.document.service.VendorService;
import java.text.*; import org.kuali.kfs.sys.*; import org.kuali.kfs.sys.context.*; import org.kuali.kfs.vnd.document.service.*;
[ "java.text", "org.kuali.kfs" ]
java.text; org.kuali.kfs;
19,099
@Test public void testLoadReplicaDuringDecommissioning() { EasyMock.expect(throttler.canCreateReplicant(EasyMock.anyString())).andReturn(true).anyTimes(); final LoadQueuePeon mockPeon1 = createEmptyPeon(); final LoadQueuePeon mockPeon2 = createOneCallPeonMock(); final LoadQueuePeon mockPeon3 = createOneCallPeonMock(); final LoadQueuePeon mockPeon4 = createOneCallPeonMock(); LoadRule rule = createLoadRule(ImmutableMap.of("tier1", 2, "tier2", 2)); final DataSegment segment = createDataSegment("foo"); throttler.registerReplicantCreation(EasyMock.eq("tier2"), EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().times(2); ServerHolder holder1 = createServerHolder("tier1", mockPeon1, true); ServerHolder holder2 = createServerHolder("tier1", mockPeon2, false); ServerHolder holder3 = createServerHolder("tier2", mockPeon3, false); ServerHolder holder4 = createServerHolder("tier2", mockPeon4, false); EasyMock.expect(mockBalancerStrategy.findNewSegmentHomeReplicator(segment, ImmutableList.of(holder2))) .andReturn(holder2); EasyMock.expect(mockBalancerStrategy.findNewSegmentHomeReplicator(segment, ImmutableList.of(holder4, holder3))) .andReturn(holder3); EasyMock.expect(mockBalancerStrategy.findNewSegmentHomeReplicator(segment, ImmutableList.of(holder4))) .andReturn(holder4); EasyMock.replay(throttler, mockPeon1, mockPeon2, mockPeon3, mockPeon4, mockBalancerStrategy); DruidCluster druidCluster = DruidClusterBuilder .newBuilder() .addTier("tier1", holder1, holder2) .addTier("tier2", holder3, holder4) .build(); CoordinatorStats stats = rule.run(null, makeCoordinatorRuntimeParams(druidCluster, segment), segment); Assert.assertEquals(1L, stats.getTieredStat(LoadRule.ASSIGNED_COUNT, "tier1")); Assert.assertEquals(2L, stats.getTieredStat(LoadRule.ASSIGNED_COUNT, "tier2")); EasyMock.verify(throttler, mockPeon1, mockPeon2, mockPeon3, mockPeon4, mockBalancerStrategy); }
void function() { EasyMock.expect(throttler.canCreateReplicant(EasyMock.anyString())).andReturn(true).anyTimes(); final LoadQueuePeon mockPeon1 = createEmptyPeon(); final LoadQueuePeon mockPeon2 = createOneCallPeonMock(); final LoadQueuePeon mockPeon3 = createOneCallPeonMock(); final LoadQueuePeon mockPeon4 = createOneCallPeonMock(); LoadRule rule = createLoadRule(ImmutableMap.of("tier1", 2, "tier2", 2)); final DataSegment segment = createDataSegment("foo"); throttler.registerReplicantCreation(EasyMock.eq("tier2"), EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().times(2); ServerHolder holder1 = createServerHolder("tier1", mockPeon1, true); ServerHolder holder2 = createServerHolder("tier1", mockPeon2, false); ServerHolder holder3 = createServerHolder("tier2", mockPeon3, false); ServerHolder holder4 = createServerHolder("tier2", mockPeon4, false); EasyMock.expect(mockBalancerStrategy.findNewSegmentHomeReplicator(segment, ImmutableList.of(holder2))) .andReturn(holder2); EasyMock.expect(mockBalancerStrategy.findNewSegmentHomeReplicator(segment, ImmutableList.of(holder4, holder3))) .andReturn(holder3); EasyMock.expect(mockBalancerStrategy.findNewSegmentHomeReplicator(segment, ImmutableList.of(holder4))) .andReturn(holder4); EasyMock.replay(throttler, mockPeon1, mockPeon2, mockPeon3, mockPeon4, mockBalancerStrategy); DruidCluster druidCluster = DruidClusterBuilder .newBuilder() .addTier("tier1", holder1, holder2) .addTier("tier2", holder3, holder4) .build(); CoordinatorStats stats = rule.run(null, makeCoordinatorRuntimeParams(druidCluster, segment), segment); Assert.assertEquals(1L, stats.getTieredStat(LoadRule.ASSIGNED_COUNT, "tier1")); Assert.assertEquals(2L, stats.getTieredStat(LoadRule.ASSIGNED_COUNT, "tier2")); EasyMock.verify(throttler, mockPeon1, mockPeon2, mockPeon3, mockPeon4, mockBalancerStrategy); }
/** * 2 tiers, 2 servers each, 1 server of the second tier is decommissioning. * Should not load a segment to the server that is decommssioning. */
2 tiers, 2 servers each, 1 server of the second tier is decommissioning. Should not load a segment to the server that is decommssioning
testLoadReplicaDuringDecommissioning
{ "repo_name": "monetate/druid", "path": "server/src/test/java/org/apache/druid/server/coordinator/rules/LoadRuleTest.java", "license": "apache-2.0", "size": 36167 }
[ "com.google.common.collect.ImmutableList", "com.google.common.collect.ImmutableMap", "org.apache.druid.server.coordinator.CoordinatorStats", "org.apache.druid.server.coordinator.DruidCluster", "org.apache.druid.server.coordinator.DruidClusterBuilder", "org.apache.druid.server.coordinator.LoadQueuePeon", "org.apache.druid.server.coordinator.ServerHolder", "org.apache.druid.timeline.DataSegment", "org.easymock.EasyMock", "org.junit.Assert" ]
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.apache.druid.server.coordinator.CoordinatorStats; import org.apache.druid.server.coordinator.DruidCluster; import org.apache.druid.server.coordinator.DruidClusterBuilder; import org.apache.druid.server.coordinator.LoadQueuePeon; import org.apache.druid.server.coordinator.ServerHolder; import org.apache.druid.timeline.DataSegment; import org.easymock.EasyMock; import org.junit.Assert;
import com.google.common.collect.*; import org.apache.druid.server.coordinator.*; import org.apache.druid.timeline.*; import org.easymock.*; import org.junit.*;
[ "com.google.common", "org.apache.druid", "org.easymock", "org.junit" ]
com.google.common; org.apache.druid; org.easymock; org.junit;
1,775,357
private Polygon getPolygon(double minX, double minY, double maxX, double maxY){ Point2D.Double origin = label.getOrigin(); double x[] = new double[] {minX, minX, maxX, maxX}; double y[] = new double[] {minY, maxY, maxY, minY}; //rotating the points if necessary. This is much faster than using JTS-methods (AffineTransformation) double rot = label.getStyling().rotation; if( rot != 0 ) rotatePoints(x,y,4,origin.x,origin.y,(Math.toRadians(rot))); Coordinate c1 = new Coordinate(x[0],y[0]); Coordinate c2 = new Coordinate(x[1],y[1]); Coordinate c3 = new Coordinate(x[2],y[2]); Coordinate c4 = new Coordinate(x[3],y[3]); return jtsFactory.createPolygon( new Coordinate[]{c1,c2,c3,c4,c1} ); }
Polygon function(double minX, double minY, double maxX, double maxY){ Point2D.Double origin = label.getOrigin(); double x[] = new double[] {minX, minX, maxX, maxX}; double y[] = new double[] {minY, maxY, maxY, minY}; double rot = label.getStyling().rotation; if( rot != 0 ) rotatePoints(x,y,4,origin.x,origin.y,(Math.toRadians(rot))); Coordinate c1 = new Coordinate(x[0],y[0]); Coordinate c2 = new Coordinate(x[1],y[1]); Coordinate c3 = new Coordinate(x[2],y[2]); Coordinate c4 = new Coordinate(x[3],y[3]); return jtsFactory.createPolygon( new Coordinate[]{c1,c2,c3,c4,c1} ); }
/** * Creates a JTS-Polygon from min/max values and returns it. */
Creates a JTS-Polygon from min/max values and returns it
getPolygon
{ "repo_name": "deegree/deegree3", "path": "deegree-core/deegree-core-rendering-2d/src/main/java/org/deegree/rendering/r2d/labelplacement/PointLabelPositionOptions.java", "license": "lgpl-2.1", "size": 10550 }
[ "java.awt.geom.Point2D", "org.locationtech.jts.geom.Coordinate", "org.locationtech.jts.geom.Polygon" ]
import java.awt.geom.Point2D; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Polygon;
import java.awt.geom.*; import org.locationtech.jts.geom.*;
[ "java.awt", "org.locationtech.jts" ]
java.awt; org.locationtech.jts;
2,252,211
public void removeCachePool(String poolName) throws IOException { assert namesystem.hasWriteLock(); try { CachePoolInfo.validateName(poolName); CachePool pool = cachePools.remove(poolName); if (pool == null) { throw new InvalidRequestException( "Cannot remove non-existent cache pool " + poolName); } // Remove all directives in this pool. Iterator<CacheDirective> iter = pool.getDirectiveList().iterator(); while (iter.hasNext()) { CacheDirective directive = iter.next(); directivesByPath.removeAll(directive.getPath()); directivesById.remove(directive.getId()); iter.remove(); } setNeedsRescan(); } catch (IOException e) { LOG.info("removeCachePool of " + poolName + " failed: ", e); throw e; } LOG.info("removeCachePool of " + poolName + " successful."); }
void function(String poolName) throws IOException { assert namesystem.hasWriteLock(); try { CachePoolInfo.validateName(poolName); CachePool pool = cachePools.remove(poolName); if (pool == null) { throw new InvalidRequestException( STR + poolName); } Iterator<CacheDirective> iter = pool.getDirectiveList().iterator(); while (iter.hasNext()) { CacheDirective directive = iter.next(); directivesByPath.removeAll(directive.getPath()); directivesById.remove(directive.getId()); iter.remove(); } setNeedsRescan(); } catch (IOException e) { LOG.info(STR + poolName + STR, e); throw e; } LOG.info(STR + poolName + STR); }
/** * Remove a cache pool. * * Only the superuser should be able to call this function. * * @param poolName * The name for the cache pool to remove. */
Remove a cache pool. Only the superuser should be able to call this function
removeCachePool
{ "repo_name": "apurtell/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java", "license": "apache-2.0", "size": 46979 }
[ "java.io.IOException", "java.util.Iterator", "org.apache.hadoop.fs.InvalidRequestException", "org.apache.hadoop.hdfs.protocol.CacheDirective", "org.apache.hadoop.hdfs.protocol.CachePoolInfo" ]
import java.io.IOException; import java.util.Iterator; import org.apache.hadoop.fs.InvalidRequestException; import org.apache.hadoop.hdfs.protocol.CacheDirective; import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import java.io.*; import java.util.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.protocol.*;
[ "java.io", "java.util", "org.apache.hadoop" ]
java.io; java.util; org.apache.hadoop;
1,897,752
public static Map<String, Object> getDocumentProperties(DocumentLibraryPage documentLibraryPage, String filename) { DocumentDetailsPage docDetailsPage = documentLibraryPage.selectFile(filename).render(); return docDetailsPage.getProperties(); }
static Map<String, Object> function(DocumentLibraryPage documentLibraryPage, String filename) { DocumentDetailsPage docDetailsPage = documentLibraryPage.selectFile(filename).render(); return docDetailsPage.getProperties(); }
/** * assume that we are in the DocumentLibrary root * Return the properties details of a particular file from DocumentLibary * * @param documentLibraryPage * @param filename * @return */
assume that we are in the DocumentLibrary root Return the properties details of a particular file from DocumentLibary
getDocumentProperties
{ "repo_name": "nguyentienlong/community-edition", "path": "projects/qa-share/src/main/java/org/alfresco/share/util/DocumentLibraryUtil.java", "license": "lgpl-3.0", "size": 2570 }
[ "java.util.Map", "org.alfresco.po.share.site.document.DocumentDetailsPage", "org.alfresco.po.share.site.document.DocumentLibraryPage" ]
import java.util.Map; import org.alfresco.po.share.site.document.DocumentDetailsPage; import org.alfresco.po.share.site.document.DocumentLibraryPage;
import java.util.*; import org.alfresco.po.share.site.document.*;
[ "java.util", "org.alfresco.po" ]
java.util; org.alfresco.po;
2,851,135
public SearchResultsCollection getResultSlice( DBconnection conn, ScrollParams scrollParams, String orderBy, String query_id) throws SQLException { CloneSummary[] searchResults = null; CloneSummary clone = null; ArrayList found = null; SearchResultsCollection searchCollection = new SearchResultsCollection(); StringBuffer query = new StringBuffer(); int floor = scrollParams.getFloor(); int ceiling = scrollParams.getCeiling(); int rowCounter = 0; // create query to select results from temp table in requested order query.append( "SELECT DISTINCT " + getTableColumns( "result" ) + " " + "FROM RESULT_CLONE WHERE QUERY_ID = '" + query_id + "' "); if ( orderBy != null && orderBy.equalsIgnoreCase( "name" ) ) { query.append( " ORDER BY name " ); } else if ( orderBy != null && orderBy.equalsIgnoreCase( "position" ) ) { query.append( " ORDER BY chromosome, position " ); } else if ( orderBy != null && orderBy.equalsIgnoreCase( "type" ) ) { query.append( " ORDER BY vector_type" ); } else { query.append( " ORDER BY name " ); } conn.setQuery( query.toString() ); ResultSet results = conn.getResultSet(); while ( results.next() ) { if ( rowCounter >= floor && rowCounter <= ceiling ) { clone = getCloneRow( results ); if ( found == null ) { found = new ArrayList(); } found.add( clone ); } rowCounter++; } conn.releaseResources(); if ( found != null && !found.isEmpty() ) { searchResults = makeArray( conn, found ); searchCollection.setResults( searchResults ); searchCollection.setResultSize( rowCounter ); searchCollection.setFloor( floor + 1 ); searchCollection.setCeiling( floor + found.size() ); } // set JSP URL for displaying results searchCollection.setResultsPage( getSummaryPage() ); return searchCollection; }
SearchResultsCollection function( DBconnection conn, ScrollParams scrollParams, String orderBy, String query_id) throws SQLException { CloneSummary[] searchResults = null; CloneSummary clone = null; ArrayList found = null; SearchResultsCollection searchCollection = new SearchResultsCollection(); StringBuffer query = new StringBuffer(); int floor = scrollParams.getFloor(); int ceiling = scrollParams.getCeiling(); int rowCounter = 0; query.append( STR + getTableColumns( STR ) + " " + STR + query_id + STR); if ( orderBy != null && orderBy.equalsIgnoreCase( "name" ) ) { query.append( STR ); } else if ( orderBy != null && orderBy.equalsIgnoreCase( STR ) ) { query.append( STR ); } else if ( orderBy != null && orderBy.equalsIgnoreCase( "type" ) ) { query.append( STR ); } else { query.append( STR ); } conn.setQuery( query.toString() ); ResultSet results = conn.getResultSet(); while ( results.next() ) { if ( rowCounter >= floor && rowCounter <= ceiling ) { clone = getCloneRow( results ); if ( found == null ) { found = new ArrayList(); } found.add( clone ); } rowCounter++; } conn.releaseResources(); if ( found != null && !found.isEmpty() ) { searchResults = makeArray( conn, found ); searchCollection.setResults( searchResults ); searchCollection.setResultSize( rowCounter ); searchCollection.setFloor( floor + 1 ); searchCollection.setCeiling( floor + found.size() ); } searchCollection.setResultsPage( getSummaryPage() ); return searchCollection; }
/** * Retrieves requested slice of total result set * * @param conn An active connection to the database * @param scrollParams ScrollParams object containing data defining floor * and ceiling of result set * @param sessionID User's session id. Used to retrieve data from person * specific temp table * @param String orderBy Field to order search results by * @return clone search results as an <code>SearchResultsCollection</code> * containing an array of <code>CloneSummary</code> objects and data on * the complete result set * @throws SQLException thrown if a database error occurs */
Retrieves requested slice of total result set
getResultSlice
{ "repo_name": "tair/tairwebapp", "path": "src/org/tair/search/dna/CloneSearcher.java", "license": "gpl-3.0", "size": 30422 }
[ "java.sql.ResultSet", "java.sql.SQLException", "java.util.ArrayList", "org.tair.tfc.DBconnection", "org.tair.utilities.ScrollParams", "org.tair.utilities.SearchResultsCollection" ]
import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import org.tair.tfc.DBconnection; import org.tair.utilities.ScrollParams; import org.tair.utilities.SearchResultsCollection;
import java.sql.*; import java.util.*; import org.tair.tfc.*; import org.tair.utilities.*;
[ "java.sql", "java.util", "org.tair.tfc", "org.tair.utilities" ]
java.sql; java.util; org.tair.tfc; org.tair.utilities;
1,651,961
protected void assertNode(String message, String expected, RexNode node) { String actual; if (node.isA(SqlKind.CAST) || node.isA(SqlKind.NEW_SPECIFICATION)) { // toString contains type (see RexCall.toString) actual = node.toString(); } else { actual = node + ":" + node.getType() + (node.getType().isNullable() ? "" : " NOT NULL"); } assertEquals(expected, actual, message); }
void function(String message, String expected, RexNode node) { String actual; if (node.isA(SqlKind.CAST) node.isA(SqlKind.NEW_SPECIFICATION)) { actual = node.toString(); } else { actual = node + ":" + node.getType() + (node.getType().isNullable() ? STR NOT NULL"); } assertEquals(expected, actual, message); }
/** * Asserts that a given node has expected string representation with account * of node type. * * @param message extra message that clarifies where the node came from * @param expected expected string representation of the node * @param node node to check */
Asserts that a given node has expected string representation with account of node type
assertNode
{ "repo_name": "vlsi/calcite", "path": "core/src/test/java/org/apache/calcite/rex/RexProgramTestBase.java", "license": "apache-2.0", "size": 7481 }
[ "org.apache.calcite.sql.SqlKind", "org.junit.jupiter.api.Assertions" ]
import org.apache.calcite.sql.SqlKind; import org.junit.jupiter.api.Assertions;
import org.apache.calcite.sql.*; import org.junit.jupiter.api.*;
[ "org.apache.calcite", "org.junit.jupiter" ]
org.apache.calcite; org.junit.jupiter;
196,306
void deleteUser(final DataLoadListener listener);
void deleteUser(final DataLoadListener listener);
/** * Delete user. * * @param listener the listener */
Delete user
deleteUser
{ "repo_name": "yonadev/yona-app-android", "path": "app/src/main/java/nu/yona/app/api/manager/AuthenticateManager.java", "license": "mpl-2.0", "size": 2853 }
[ "nu.yona.app.listener.DataLoadListener" ]
import nu.yona.app.listener.DataLoadListener;
import nu.yona.app.listener.*;
[ "nu.yona.app" ]
nu.yona.app;
1,908,346
public com.google.common.util.concurrent.ListenableFuture<com.google.container.v1.Operation> rollbackNodePoolUpgrade( com.google.container.v1.RollbackNodePoolUpgradeRequest request) { return futureUnaryCall( getChannel().newCall(getRollbackNodePoolUpgradeMethodHelper(), getCallOptions()), request); }
com.google.common.util.concurrent.ListenableFuture<com.google.container.v1.Operation> function( com.google.container.v1.RollbackNodePoolUpgradeRequest request) { return futureUnaryCall( getChannel().newCall(getRollbackNodePoolUpgradeMethodHelper(), getCallOptions()), request); }
/** * <pre> * Roll back the previously Aborted or Failed NodePool upgrade. * This will be an no-op if the last upgrade successfully completed. * </pre> */
<code> Roll back the previously Aborted or Failed NodePool upgrade. This will be an no-op if the last upgrade successfully completed. </code>
rollbackNodePoolUpgrade
{ "repo_name": "pongad/api-client-staging", "path": "generated/java/grpc-google-cloud-container-v1/src/main/java/com/google/container/v1/ClusterManagerGrpc.java", "license": "bsd-3-clause", "size": 147597 }
[ "io.grpc.stub.ClientCalls" ]
import io.grpc.stub.ClientCalls;
import io.grpc.stub.*;
[ "io.grpc.stub" ]
io.grpc.stub;
2,509,973
public Builder addAllExpand(List<String> elements) { if (this.expand == null) { this.expand = new ArrayList<>(); } this.expand.addAll(elements); return this; }
Builder function(List<String> elements) { if (this.expand == null) { this.expand = new ArrayList<>(); } this.expand.addAll(elements); return this; }
/** * Add all elements to `expand` list. A list is initialized for the first `add/addAll` call, and * subsequent calls adds additional elements to the original list. See {@link * ValueListListParams#expand} for the field documentation. */
Add all elements to `expand` list. A list is initialized for the first `add/addAll` call, and subsequent calls adds additional elements to the original list. See <code>ValueListListParams#expand</code> for the field documentation
addAllExpand
{ "repo_name": "stripe/stripe-java", "path": "src/main/java/com/stripe/param/radar/ValueListListParams.java", "license": "mit", "size": 10366 }
[ "java.util.ArrayList", "java.util.List" ]
import java.util.ArrayList; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
991,549
@Nonnull public java.util.concurrent.CompletableFuture<ManagedMobileLobApp> putAsync(@Nonnull final ManagedMobileLobApp newManagedMobileLobApp) { return sendAsync(HttpMethod.PUT, newManagedMobileLobApp); }
java.util.concurrent.CompletableFuture<ManagedMobileLobApp> function(@Nonnull final ManagedMobileLobApp newManagedMobileLobApp) { return sendAsync(HttpMethod.PUT, newManagedMobileLobApp); }
/** * Creates a ManagedMobileLobApp with a new object * * @param newManagedMobileLobApp the object to create/update * @return a future with the result */
Creates a ManagedMobileLobApp with a new object
putAsync
{ "repo_name": "microsoftgraph/msgraph-sdk-java", "path": "src/main/java/com/microsoft/graph/requests/ManagedMobileLobAppRequest.java", "license": "mit", "size": 7017 }
[ "com.microsoft.graph.http.HttpMethod", "com.microsoft.graph.models.ManagedMobileLobApp", "javax.annotation.Nonnull" ]
import com.microsoft.graph.http.HttpMethod; import com.microsoft.graph.models.ManagedMobileLobApp; import javax.annotation.Nonnull;
import com.microsoft.graph.http.*; import com.microsoft.graph.models.*; import javax.annotation.*;
[ "com.microsoft.graph", "javax.annotation" ]
com.microsoft.graph; javax.annotation;
2,184,542
private void testFailureLogFormatHelper(boolean checkIP, ApplicationId appId, ApplicationAttemptId attemptId, ContainerId containerId) { String fLog = RMAuditLogger.createFailureLog(USER, OPERATION, PERM, TARGET, DESC, appId, attemptId, containerId); StringBuilder expLog = new StringBuilder(); expLog.append("USER=test\t"); if (checkIP) { InetAddress ip = Server.getRemoteIp(); expLog.append(Keys.IP.name() + "=" + ip.getHostAddress() + "\t"); } expLog.append("OPERATION=oper\tTARGET=tgt\tRESULT=FAILURE\t"); expLog.append("DESCRIPTION=description of an audit log"); expLog.append("\tPERMISSIONS=admin group"); if (appId != null) { expLog.append("\tAPPID=app_1"); } if (attemptId != null) { expLog.append("\tAPPATTEMPTID=app_attempt_1"); } if (containerId != null) { expLog.append("\tCONTAINERID=container_1"); } assertEquals(expLog.toString(), fLog); }
void function(boolean checkIP, ApplicationId appId, ApplicationAttemptId attemptId, ContainerId containerId) { String fLog = RMAuditLogger.createFailureLog(USER, OPERATION, PERM, TARGET, DESC, appId, attemptId, containerId); StringBuilder expLog = new StringBuilder(); expLog.append(STR); if (checkIP) { InetAddress ip = Server.getRemoteIp(); expLog.append(Keys.IP.name() + "=" + ip.getHostAddress() + "\t"); } expLog.append(STR); expLog.append(STR); expLog.append(STR); if (appId != null) { expLog.append(STR); } if (attemptId != null) { expLog.append(STR); } if (containerId != null) { expLog.append(STR); } assertEquals(expLog.toString(), fLog); }
/** * Test the AuditLog format for failure events. */
Test the AuditLog format for failure events
testFailureLogFormatHelper
{ "repo_name": "tseen/Federated-HDFS", "path": "tseenliu/FedHDFS-hadoop-src/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAuditLogger.java", "license": "apache-2.0", "size": 8620 }
[ "java.net.InetAddress", "org.apache.hadoop.ipc.Server", "org.apache.hadoop.yarn.api.records.ApplicationAttemptId", "org.apache.hadoop.yarn.api.records.ApplicationId", "org.apache.hadoop.yarn.api.records.ContainerId", "org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger", "org.junit.Assert" ]
import java.net.InetAddress; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger; import org.junit.Assert;
import java.net.*; import org.apache.hadoop.ipc.*; import org.apache.hadoop.yarn.api.records.*; import org.apache.hadoop.yarn.server.resourcemanager.*; import org.junit.*;
[ "java.net", "org.apache.hadoop", "org.junit" ]
java.net; org.apache.hadoop; org.junit;
1,679,185
public static void register(String algorithmURI, String implementingClass) throws AlgorithmAlreadyRegisteredException { // check whether URI is already registered Class registeredClass = getImplementingClass(algorithmURI); if (registeredClass != null) { Object exArgs[] = { algorithmURI, registeredClass }; throw new AlgorithmAlreadyRegisteredException( "algorithm.alreadyRegistered", exArgs); } try { _canonicalizerHash.put(algorithmURI, Class.forName(implementingClass)); } catch (ClassNotFoundException e) { throw new RuntimeException("c14n class not found"); } }
static void function(String algorithmURI, String implementingClass) throws AlgorithmAlreadyRegisteredException { Class registeredClass = getImplementingClass(algorithmURI); if (registeredClass != null) { Object exArgs[] = { algorithmURI, registeredClass }; throw new AlgorithmAlreadyRegisteredException( STR, exArgs); } try { _canonicalizerHash.put(algorithmURI, Class.forName(implementingClass)); } catch (ClassNotFoundException e) { throw new RuntimeException(STR); } }
/** * Method register * * @param algorithmURI * @param implementingClass * @throws AlgorithmAlreadyRegisteredException */
Method register
register
{ "repo_name": "haikuowuya/android_system_code", "path": "src/com/sun/org/apache/xml/internal/security/c14n/Canonicalizer.java", "license": "apache-2.0", "size": 11878 }
[ "com.sun.org.apache.xml.internal.security.exceptions.AlgorithmAlreadyRegisteredException" ]
import com.sun.org.apache.xml.internal.security.exceptions.AlgorithmAlreadyRegisteredException;
import com.sun.org.apache.xml.internal.security.exceptions.*;
[ "com.sun.org" ]
com.sun.org;
1,974,102
@Override public List<int[]> getInitialSetups(T owner) { List<int[]> result = new ArrayList<>(); List<PackData> vpd = owner.getDataSetups(); for (PackData pd:vpd) { result.add(pd.getBits()); owner.printBits(pd.getBits()); } return result; }
List<int[]> function(T owner) { List<int[]> result = new ArrayList<>(); List<PackData> vpd = owner.getDataSetups(); for (PackData pd:vpd) { result.add(pd.getBits()); owner.printBits(pd.getBits()); } return result; }
/** * Provides the initial gene setup. * * @param owner the owning the algorithm * @return the genes (0s and 1s) */
Provides the initial gene setup
getInitialSetups
{ "repo_name": "waikato-datamining/adams-base", "path": "adams-weka/src/main/java/adams/opt/genetic/initialsetups/PackDataInitialSetupsProvider.java", "license": "gpl-3.0", "size": 2057 }
[ "java.util.ArrayList", "java.util.List" ]
import java.util.ArrayList; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,193,931
public static List<String> convertStringArrayToList(String[] data) { List<String> result = new ArrayList<String>(); for (int i = 0; i < data.length; i++) { result.add(data[i]); }// end for return result; }
static List<String> function(String[] data) { List<String> result = new ArrayList<String>(); for (int i = 0; i < data.length; i++) { result.add(data[i]); } return result; }
/** * Convenience method to converts an array of <code>String</code> to a * <code>List&lt;String&gt;</code>. * * @param data * an array of <code>String</code> * @return a new <code>List&lt;String&gt;</code> */
Convenience method to converts an array of <code>String</code> to a <code>List&lt;String&gt;</code>
convertStringArrayToList
{ "repo_name": "freeVM/freeVM", "path": "enhanced/java/classlib/modules/lang-management/src/main/java/org/apache/harmony/lang/management/ManagementUtils.java", "license": "apache-2.0", "size": 64134 }
[ "java.util.ArrayList", "java.util.List" ]
import java.util.ArrayList; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
659,995
public EventhubInner withCaptureDescription(CaptureDescription captureDescription) { if (this.innerProperties() == null) { this.innerProperties = new EventhubProperties(); } this.innerProperties().withCaptureDescription(captureDescription); return this; }
EventhubInner function(CaptureDescription captureDescription) { if (this.innerProperties() == null) { this.innerProperties = new EventhubProperties(); } this.innerProperties().withCaptureDescription(captureDescription); return this; }
/** * Set the captureDescription property: Properties of capture description. * * @param captureDescription the captureDescription value to set. * @return the EventhubInner object itself. */
Set the captureDescription property: Properties of capture description
withCaptureDescription
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-eventhubs/src/main/java/com/azure/resourcemanager/eventhubs/fluent/models/EventhubInner.java", "license": "mit", "size": 6666 }
[ "com.azure.resourcemanager.eventhubs.models.CaptureDescription" ]
import com.azure.resourcemanager.eventhubs.models.CaptureDescription;
import com.azure.resourcemanager.eventhubs.models.*;
[ "com.azure.resourcemanager" ]
com.azure.resourcemanager;
265,817
public MibValueSymbol getParent() { ObjectIdentifierValue oid; if (value instanceof ObjectIdentifierValue) { oid = ((ObjectIdentifierValue) value).getParent(); if (oid != null) { return oid.getSymbol(); } } return null; }
MibValueSymbol function() { ObjectIdentifierValue oid; if (value instanceof ObjectIdentifierValue) { oid = ((ObjectIdentifierValue) value).getParent(); if (oid != null) { return oid.getSymbol(); } } return null; }
/** * Returns the parent symbol in the OID tree. This is a * convenience method for value symbols that have object * identifier values. * * @return the parent symbol in the OID tree, or * null for none or if not applicable * * @see net.percederberg.mibble.value.ObjectIdentifierValue * * @since 2.5 */
Returns the parent symbol in the OID tree. This is a convenience method for value symbols that have object identifier values
getParent
{ "repo_name": "richb-hanover/mibble-2.9.2", "path": "src/java/net/percederberg/mibble/MibValueSymbol.java", "license": "gpl-2.0", "size": 10580 }
[ "net.percederberg.mibble.value.ObjectIdentifierValue" ]
import net.percederberg.mibble.value.ObjectIdentifierValue;
import net.percederberg.mibble.value.*;
[ "net.percederberg.mibble" ]
net.percederberg.mibble;
2,052,213
public boolean checkAxisMismatch() { int left = _path.getAxis(); int right = ((Step)_step).getAxis(); if (((left == Axis.ANCESTOR) || (left == Axis.ANCESTORORSELF)) && ((right == Axis.CHILD) || (right == Axis.DESCENDANT) || (right == Axis.DESCENDANTORSELF) || (right == Axis.PARENT) || (right == Axis.PRECEDING) || (right == Axis.PRECEDINGSIBLING))) return true; if ((left == Axis.CHILD) && (right == Axis.ANCESTOR) || (right == Axis.ANCESTORORSELF) || (right == Axis.PARENT) || (right == Axis.PRECEDING)) return true; if ((left == Axis.DESCENDANT) || (left == Axis.DESCENDANTORSELF)) return true; if (((left == Axis.FOLLOWING) || (left == Axis.FOLLOWINGSIBLING)) && ((right == Axis.FOLLOWING) || (right == Axis.PARENT) || (right == Axis.PRECEDING) || (right == Axis.PRECEDINGSIBLING))) return true; if (((left == Axis.PRECEDING) || (left == Axis.PRECEDINGSIBLING)) && ((right == Axis.DESCENDANT) || (right == Axis.DESCENDANTORSELF) || (right == Axis.FOLLOWING) || (right == Axis.FOLLOWINGSIBLING) || (right == Axis.PARENT) || (right == Axis.PRECEDING) || (right == Axis.PRECEDINGSIBLING))) return true; if ((right == Axis.FOLLOWING) && (left == Axis.CHILD)) { // Special case for '@*/following::*' expressions. The resulting // iterator is initialised with the parent's first child, and this // can cause duplicates in the output if the parent has more than // one attribute that matches the left step. if (_path instanceof Step) { int type = ((Step)_path).getNodeType(); if (type == DTM.ATTRIBUTE_NODE) return true; } } return false; }
boolean function() { int left = _path.getAxis(); int right = ((Step)_step).getAxis(); if (((left == Axis.ANCESTOR) (left == Axis.ANCESTORORSELF)) && ((right == Axis.CHILD) (right == Axis.DESCENDANT) (right == Axis.DESCENDANTORSELF) (right == Axis.PARENT) (right == Axis.PRECEDING) (right == Axis.PRECEDINGSIBLING))) return true; if ((left == Axis.CHILD) && (right == Axis.ANCESTOR) (right == Axis.ANCESTORORSELF) (right == Axis.PARENT) (right == Axis.PRECEDING)) return true; if ((left == Axis.DESCENDANT) (left == Axis.DESCENDANTORSELF)) return true; if (((left == Axis.FOLLOWING) (left == Axis.FOLLOWINGSIBLING)) && ((right == Axis.FOLLOWING) (right == Axis.PARENT) (right == Axis.PRECEDING) (right == Axis.PRECEDINGSIBLING))) return true; if (((left == Axis.PRECEDING) (left == Axis.PRECEDINGSIBLING)) && ((right == Axis.DESCENDANT) (right == Axis.DESCENDANTORSELF) (right == Axis.FOLLOWING) (right == Axis.FOLLOWINGSIBLING) (right == Axis.PARENT) (right == Axis.PRECEDING) (right == Axis.PRECEDINGSIBLING))) return true; if ((right == Axis.FOLLOWING) && (left == Axis.CHILD)) { if (_path instanceof Step) { int type = ((Step)_path).getNodeType(); if (type == DTM.ATTRIBUTE_NODE) return true; } } return false; }
/** * This method is used to determine if this parent location path is a * combination of two step's with axes that will create duplicate or * unordered nodes. */
This method is used to determine if this parent location path is a combination of two step's with axes that will create duplicate or unordered nodes
checkAxisMismatch
{ "repo_name": "rokn/Count_Words_2015", "path": "testing/openjdk2/jaxp/src/com/sun/org/apache/xalan/internal/xsltc/compiler/ParentLocationPath.java", "license": "mit", "size": 9715 }
[ "com.sun.org.apache.xml.internal.dtm.Axis" ]
import com.sun.org.apache.xml.internal.dtm.Axis;
import com.sun.org.apache.xml.internal.dtm.*;
[ "com.sun.org" ]
com.sun.org;
1,510,911
void setPropertyReplacers(List<PropertyReplacer> propertyReplacers);
void setPropertyReplacers(List<PropertyReplacer> propertyReplacers);
/** * Setter for the components property substitutions * * @param propertyReplacers */
Setter for the components property substitutions
setPropertyReplacers
{ "repo_name": "jruchcolo/rice-cd", "path": "rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/uif/component/Component.java", "license": "apache-2.0", "size": 45039 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
625,826
public static MethodDeclaration findParentMethodDeclaration(ASTNode node) { while (node != null) { if (node instanceof MethodDeclaration) { return (MethodDeclaration) node; } else if (node instanceof BodyDeclaration || node instanceof AnonymousClassDeclaration || node instanceof LambdaExpression) { return null; } node= node.getParent(); } return null; }
static MethodDeclaration function(ASTNode node) { while (node != null) { if (node instanceof MethodDeclaration) { return (MethodDeclaration) node; } else if (node instanceof BodyDeclaration node instanceof AnonymousClassDeclaration node instanceof LambdaExpression) { return null; } node= node.getParent(); } return null; }
/** * The node's enclosing method declaration or <code>null</code> if * the node is not inside a method and is not a method declaration itself. * * @param node a node * @return the enclosing method declaration or <code>null</code> */
The node's enclosing method declaration or <code>null</code> if the node is not inside a method and is not a method declaration itself
findParentMethodDeclaration
{ "repo_name": "eclipse/flux", "path": "org.eclipse.flux.jdt.service/jdt ui/org/eclipse/jdt/internal/ui/text/correction/ASTResolving.java", "license": "bsd-3-clause", "size": 45693 }
[ "org.eclipse.jdt.core.dom.ASTNode", "org.eclipse.jdt.core.dom.AnonymousClassDeclaration", "org.eclipse.jdt.core.dom.BodyDeclaration", "org.eclipse.jdt.core.dom.LambdaExpression", "org.eclipse.jdt.core.dom.MethodDeclaration" ]
import org.eclipse.jdt.core.dom.ASTNode; import org.eclipse.jdt.core.dom.AnonymousClassDeclaration; import org.eclipse.jdt.core.dom.BodyDeclaration; import org.eclipse.jdt.core.dom.LambdaExpression; import org.eclipse.jdt.core.dom.MethodDeclaration;
import org.eclipse.jdt.core.dom.*;
[ "org.eclipse.jdt" ]
org.eclipse.jdt;
256,076
public static Image getImage(InputStream is) throws IOException { PngImage png = new PngImage(is); return png.getImage(); }
static Image function(InputStream is) throws IOException { PngImage png = new PngImage(is); return png.getImage(); }
/** Reads a PNG from a stream. * @param is the stream * @throws IOException on error * @return the image */
Reads a PNG from a stream
getImage
{ "repo_name": "shitalm/jsignpdf2", "path": "src/main/java/com/lowagie/text/pdf/codec/PngImage.java", "license": "gpl-2.0", "size": 35889 }
[ "com.lowagie.text.Image", "java.io.IOException", "java.io.InputStream" ]
import com.lowagie.text.Image; import java.io.IOException; import java.io.InputStream;
import com.lowagie.text.*; import java.io.*;
[ "com.lowagie.text", "java.io" ]
com.lowagie.text; java.io;
1,057,071
private static Collection<Object> normalizeCollection(Collection<?> collection) throws IllegalArgumentException { if (collection.size() == 0) { return Collections.emptyList(); } else { final List<Object> lst = new ArrayList<>(collection.size()); for (final Object it : collection) { final Object normalized = normalizeType(it); lst.add(normalized); if (normalized.getClass() != lst.get(0).getClass()) { throw new IllegalArgumentException( "Invalid configuration property. Heterogeneous collection value!"); } } return lst; } }
static Collection<Object> function(Collection<?> collection) throws IllegalArgumentException { if (collection.size() == 0) { return Collections.emptyList(); } else { final List<Object> lst = new ArrayList<>(collection.size()); for (final Object it : collection) { final Object normalized = normalizeType(it); lst.add(normalized); if (normalized.getClass() != lst.get(0).getClass()) { throw new IllegalArgumentException( STR); } } return lst; } }
/** * Normalizes a collection. * * @param collection the collection that entries should be normalized * @return a collection that contains the normalized entries * @throws IllegalArgumentException if the type of the normalized values differ or an invalid type has been given */
Normalizes a collection
normalizeCollection
{ "repo_name": "AchimHentschel/smarthome", "path": "bundles/config/org.eclipse.smarthome.config.core/src/main/java/org/eclipse/smarthome/config/core/ConfigUtil.java", "license": "epl-1.0", "size": 6892 }
[ "java.util.ArrayList", "java.util.Collection", "java.util.Collections", "java.util.List" ]
import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,782,784
public HttpSessionContext getSessionContext() { if (sessionContext == null) sessionContext = new StandardSessionContext(); return (sessionContext); } // ----------------------------------------------HttpSession Public Methods
HttpSessionContext function() { if (sessionContext == null) sessionContext = new StandardSessionContext(); return (sessionContext); }
/** * Return the session context with which this session is associated. * * @deprecated As of Version 2.1, this method is deprecated and has no * replacement. It will be removed in a future version of the * Java Servlet API. */
Return the session context with which this session is associated
getSessionContext
{ "repo_name": "yuyupapa/OpenSource", "path": "apache-tomcat-6.0.48/java/org/apache/catalina/session/StandardSession.java", "license": "apache-2.0", "size": 60890 }
[ "javax.servlet.http.HttpSessionContext" ]
import javax.servlet.http.HttpSessionContext;
import javax.servlet.http.*;
[ "javax.servlet" ]
javax.servlet;
2,654,173
public Path createSnapshot(Path path, String snapshotName) throws IOException { throw new UnsupportedOperationException(getClass().getSimpleName() + " doesn't support createSnapshot"); }
Path function(Path path, String snapshotName) throws IOException { throw new UnsupportedOperationException(getClass().getSimpleName() + STR); }
/** * Create a snapshot * @param path The directory where snapshots will be taken. * @param snapshotName The name of the snapshot * @return the snapshot path. */
Create a snapshot
createSnapshot
{ "repo_name": "Microsoft-CISL/hadoop-prototype", "path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java", "license": "apache-2.0", "size": 116772 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
215,867
public static Authorizations toAuthorizations(byte[] protoBytes) throws DeserializationException { if (protoBytes == null) return null; ClientProtos.Authorizations.Builder builder = ClientProtos.Authorizations.newBuilder(); ClientProtos.Authorizations proto = null; try { proto = builder.mergeFrom(protoBytes).build(); } catch (InvalidProtocolBufferException e) { throw new DeserializationException(e); } return toAuthorizations(proto); }
static Authorizations function(byte[] protoBytes) throws DeserializationException { if (protoBytes == null) return null; ClientProtos.Authorizations.Builder builder = ClientProtos.Authorizations.newBuilder(); ClientProtos.Authorizations proto = null; try { proto = builder.mergeFrom(protoBytes).build(); } catch (InvalidProtocolBufferException e) { throw new DeserializationException(e); } return toAuthorizations(proto); }
/** * Convert a protocol buffer Authorizations bytes to a client Authorizations * * @param protoBytes * @return the converted client Authorizations * @throws DeserializationException */
Convert a protocol buffer Authorizations bytes to a client Authorizations
toAuthorizations
{ "repo_name": "lshmouse/hbase", "path": "hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java", "license": "apache-2.0", "size": 118965 }
[ "com.google.protobuf.InvalidProtocolBufferException", "org.apache.hadoop.hbase.exceptions.DeserializationException", "org.apache.hadoop.hbase.protobuf.generated.ClientProtos", "org.apache.hadoop.hbase.security.visibility.Authorizations" ]
import com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.security.visibility.Authorizations;
import com.google.protobuf.*; import org.apache.hadoop.hbase.exceptions.*; import org.apache.hadoop.hbase.protobuf.generated.*; import org.apache.hadoop.hbase.security.visibility.*;
[ "com.google.protobuf", "org.apache.hadoop" ]
com.google.protobuf; org.apache.hadoop;
186,510
T setContents(String data, Charset charset);
T setContents(String data, Charset charset);
/** * Set the contents of this {@link WriteableResource} to the given {@link String} using the specified encoding. */
Set the contents of this <code>WriteableResource</code> to the given <code>String</code> using the specified encoding
setContents
{ "repo_name": "forge/core", "path": "resources/api/src/main/java/org/jboss/forge/addon/resource/WriteableResource.java", "license": "epl-1.0", "size": 1944 }
[ "java.nio.charset.Charset" ]
import java.nio.charset.Charset;
import java.nio.charset.*;
[ "java.nio" ]
java.nio;
702,625
static int checkPositiveAndMakeMultipleOf32(int bits) { checkArgument(bits > 0, "Number of bits must be positive"); return (bits + 31) & ~31; } // TODO(kevinb): Maybe expose this class via a static Hashing method? @VisibleForTesting static final class ConcatenatedHashFunction extends AbstractCompositeHashFunction { private final int bits; ConcatenatedHashFunction(HashFunction... functions) { super(functions); int bitSum = 0; for (HashFunction function : functions) { bitSum += function.bits(); } this.bits = bitSum; }
static int checkPositiveAndMakeMultipleOf32(int bits) { checkArgument(bits > 0, STR); return (bits + 31) & ~31; } static final class ConcatenatedHashFunction extends AbstractCompositeHashFunction { private final int bits; ConcatenatedHashFunction(HashFunction... functions) { super(functions); int bitSum = 0; for (HashFunction function : functions) { bitSum += function.bits(); } this.bits = bitSum; }
/** * Checks that the passed argument is positive, and ceils it to a multiple of 32. */
Checks that the passed argument is positive, and ceils it to a multiple of 32
checkPositiveAndMakeMultipleOf32
{ "repo_name": "mike10004/appengine-imaging", "path": "gaecompat-awt-imaging/src/common/com/gaecompat/repackaged/com/google/common/hash/Hashing.java", "license": "apache-2.0", "size": 15843 }
[ "com.gaecompat.repackaged.com.google.common.base.Preconditions" ]
import com.gaecompat.repackaged.com.google.common.base.Preconditions;
import com.gaecompat.repackaged.com.google.common.base.*;
[ "com.gaecompat.repackaged" ]
com.gaecompat.repackaged;
2,524,051
@SuppressWarnings("unchecked") public static FieldElement createFromParameters( Map<String, ?> params, FieldElementCollection collection, WorldLayers worlds) { if (!params.containsKey(CLASS_PROPERTY)) { throw new IllegalArgumentException("class not specified for element: " + params); } Class<? extends FieldElement> elementClass = null; // if package not specified, use this package String className = (String) params.get(CLASS_PROPERTY); if (className.indexOf('.') == -1) { className = "com.dozingcatsoftware.vectorpinball.elements." + className; } try { elementClass = (Class<? extends FieldElement>) Class.forName(className); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } FieldElement self; try { self = elementClass.getConstructor().newInstance(); } catch (Exception e) { throw new RuntimeException(e); } // TODO: Have `initialize` take WorldLayers instead of a single World. int layer = params.containsKey(LAYER_PROPERTY) ? ((Number)params.get(LAYER_PROPERTY)).intValue() : 0; self.initialize(params, collection, worlds.existingOrNewWorldForLayer(layer)); return self; }
@SuppressWarnings(STR) static FieldElement function( Map<String, ?> params, FieldElementCollection collection, WorldLayers worlds) { if (!params.containsKey(CLASS_PROPERTY)) { throw new IllegalArgumentException(STR + params); } Class<? extends FieldElement> elementClass = null; String className = (String) params.get(CLASS_PROPERTY); if (className.indexOf('.') == -1) { className = STR + className; } try { elementClass = (Class<? extends FieldElement>) Class.forName(className); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } FieldElement self; try { self = elementClass.getConstructor().newInstance(); } catch (Exception e) { throw new RuntimeException(e); } int layer = params.containsKey(LAYER_PROPERTY) ? ((Number)params.get(LAYER_PROPERTY)).intValue() : 0; self.initialize(params, collection, worlds.existingOrNewWorldForLayer(layer)); return self; }
/** * Creates and returns a FieldElement object from the given map of parameters. The class to * instantiate is given by the "class" property of the parameter map. Calls the no-argument * constructor of the default or custom class, and then calls initialize() passing the * parameter map and World. */
Creates and returns a FieldElement object from the given map of parameters. The class to instantiate is given by the "class" property of the parameter map. Calls the no-argument constructor of the default or custom class, and then calls initialize() passing the parameter map and World
createFromParameters
{ "repo_name": "dozingcat/Vector-Pinball-Editor", "path": "src/com/dozingcatsoftware/vectorpinball/elements/FieldElement.java", "license": "gpl-3.0", "size": 10299 }
[ "com.dozingcatsoftware.vectorpinball.model.WorldLayers", "java.util.Map" ]
import com.dozingcatsoftware.vectorpinball.model.WorldLayers; import java.util.Map;
import com.dozingcatsoftware.vectorpinball.model.*; import java.util.*;
[ "com.dozingcatsoftware.vectorpinball", "java.util" ]
com.dozingcatsoftware.vectorpinball; java.util;
2,091,349
@IntRange(from = 0) public int getStartIndex() { return mStartIndex; }
@IntRange(from = 0) int function() { return mStartIndex; }
/** * Returns start index of the selected part of text. */
Returns start index of the selected part of text
getStartIndex
{ "repo_name": "AndroidX/androidx", "path": "textclassifier/textclassifier/src/main/java/androidx/textclassifier/TextSelection.java", "license": "apache-2.0", "size": 17092 }
[ "androidx.annotation.IntRange" ]
import androidx.annotation.IntRange;
import androidx.annotation.*;
[ "androidx.annotation" ]
androidx.annotation;
1,125,234
@Override public void writeToNBT(NBTTagCompound tCompound) { super.writeToNBT(tCompound); for (int i = 0; i < 24; i++) { if (inventory[i] != null) { NBTTagCompound tc = new NBTTagCompound(); inventory[i].writeToNBT(tc); tCompound.setTag("inventory" + i, tc); } } tCompound.setByte("filterColor", (byte) filterColor.ordinal()); tCompound.setByte("mode", (byte) mode); tCompound.setByte("priority", (byte) priority); }
void function(NBTTagCompound tCompound) { super.writeToNBT(tCompound); for (int i = 0; i < 24; i++) { if (inventory[i] != null) { NBTTagCompound tc = new NBTTagCompound(); inventory[i].writeToNBT(tc); tCompound.setTag(STR + i, tc); } } tCompound.setByte(STR, (byte) filterColor.ordinal()); tCompound.setByte("mode", (byte) mode); tCompound.setByte(STR, (byte) priority); }
/** * This function gets called whenever the world/chunk is saved */
This function gets called whenever the world/chunk is saved
writeToNBT
{ "repo_name": "raulsmail/GlowPower", "path": "src/main/java/com/bluepowermod/tileentities/tier3/TileManager.java", "license": "gpl-3.0", "size": 11054 }
[ "net.minecraft.nbt.NBTTagCompound" ]
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.*;
[ "net.minecraft.nbt" ]
net.minecraft.nbt;
2,470,669
public Object readList(AbstractHessianInput in, int length, String type) throws HessianProtocolException, IOException { Deserializer deserializer = getDeserializer(type); if (deserializer != null) return deserializer.readList(in, length); else return new CollectionDeserializer(ArrayList.class).readList(in, length); }
Object function(AbstractHessianInput in, int length, String type) throws HessianProtocolException, IOException { Deserializer deserializer = getDeserializer(type); if (deserializer != null) return deserializer.readList(in, length); else return new CollectionDeserializer(ArrayList.class).readList(in, length); }
/** * Reads the object as a list. */
Reads the object as a list
readList
{ "repo_name": "mingbotang/dubbo", "path": "hessian-lite/src/main/java/com/alibaba/com/caucho/hessian/io/SerializerFactory.java", "license": "apache-2.0", "size": 25417 }
[ "java.io.IOException", "java.util.ArrayList" ]
import java.io.IOException; import java.util.ArrayList;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
1,840,267
public static <T> T findBy(final Collection<T> collection, final Filter<T> filter) { for (T element : collection) { if (filter.accept(element)) { return element; } } return null; }
static <T> T function(final Collection<T> collection, final Filter<T> filter) { for (T element : collection) { if (filter.accept(element)) { return element; } } return null; }
/** * Iterates the Collection and finds all object elements that match the Filter criteria. * <p/> * @param <T> the class type of the Collection elements. * @param collection the Collection of elements to iterate and filter. * @param filter the Filter applied to the Collection of elements in search of the matching element. * @return a single element from the Collection that match the criteria of the Filter. If multiple elements match * the Filter criteria, then this method will return the first one. If no element of the Collection matches * the criteria of the Filter, then this method returns null. */
Iterates the Collection and finds all object elements that match the Filter criteria.
findBy
{ "repo_name": "nchandrappa/incubator-geode", "path": "gemfire-core/src/main/java/com/gemstone/gemfire/internal/util/CollectionUtils.java", "license": "apache-2.0", "size": 9119 }
[ "com.gemstone.gemfire.internal.lang.Filter", "java.util.Collection" ]
import com.gemstone.gemfire.internal.lang.Filter; import java.util.Collection;
import com.gemstone.gemfire.internal.lang.*; import java.util.*;
[ "com.gemstone.gemfire", "java.util" ]
com.gemstone.gemfire; java.util;
1,934,682
public boolean validateHighLowDate(String missingDateMessage) { lowDate = validateDate(lowDateStr, null, missingDateMessage ); highDate = validateDate(highDateStr, lowDateStr, missingDateMessage); if (lowDate == null || highDate == null ) { return false; } if (highDate.getTime() < lowDate.getTime()) { Date tmpDate = highDate; highDate = lowDate; lowDate = tmpDate; String tmpString = highDateStr; highDateStr = lowDateStr; lowDateStr = tmpString; } return true; }
boolean function(String missingDateMessage) { lowDate = validateDate(lowDateStr, null, missingDateMessage ); highDate = validateDate(highDateStr, lowDateStr, missingDateMessage); if (lowDate == null highDate == null ) { return false; } if (highDate.getTime() < lowDate.getTime()) { Date tmpDate = highDate; highDate = lowDate; lowDate = tmpDate; String tmpString = highDateStr; highDateStr = lowDateStr; lowDateStr = tmpString; } return true; }
/** * <ol> * <li>High date picks up low date if it ain't filled in, * <li>they can't both be empty * <li>they have to be well formed. * * @return true if valid, false otherwise */
High date picks up low date if it ain't filled in, they can't both be empty they have to be well formed
validateHighLowDate
{ "repo_name": "kalwar/openelisglobal-core", "path": "app/src/us/mn/state/health/lims/reports/action/implementation/Report.java", "license": "mpl-2.0", "size": 21163 }
[ "java.sql.Date" ]
import java.sql.Date;
import java.sql.*;
[ "java.sql" ]
java.sql;
345,439
protected final EditCoordinate getEC() { return OTFUtility.getEditCoordinate(); }
final EditCoordinate function() { return OTFUtility.getEditCoordinate(); }
/** * Returns the edit coordinate. * * @return the edit coordinate */
Returns the edit coordinate
getEC
{ "repo_name": "vaskaloidis/va-isaac-gui", "path": "import-export/src/main/java/gov/va/isaac/models/util/CommonBase.java", "license": "apache-2.0", "size": 1951 }
[ "gov.va.isaac.util.OTFUtility", "org.ihtsdo.otf.tcc.api.coordinate.EditCoordinate" ]
import gov.va.isaac.util.OTFUtility; import org.ihtsdo.otf.tcc.api.coordinate.EditCoordinate;
import gov.va.isaac.util.*; import org.ihtsdo.otf.tcc.api.coordinate.*;
[ "gov.va.isaac", "org.ihtsdo.otf" ]
gov.va.isaac; org.ihtsdo.otf;
1,009,697
private static String generateSqlFilter(Table table, ExpressionTree tree, List<Object> params, List<String> joins, boolean dbHasJoinCastBug) throws MetaException { assert table != null; if (tree.getRoot() == null) { return ""; } PartitionFilterGenerator visitor = new PartitionFilterGenerator( table, params, joins, dbHasJoinCastBug); tree.accept(visitor); if (visitor.filterBuffer.hasError()) { LOG.info("Unable to push down SQL filter: " + visitor.filterBuffer.getErrorMessage()); return null; } // Some joins might be null (see processNode for LeafNode), clean them up. for (int i = 0; i < joins.size(); ++i) { if (joins.get(i) != null) continue; joins.remove(i--); } return "(" + visitor.filterBuffer.getFilter() + ")"; }
static String function(Table table, ExpressionTree tree, List<Object> params, List<String> joins, boolean dbHasJoinCastBug) throws MetaException { assert table != null; if (tree.getRoot() == null) { return STRUnable to push down SQL filter: STR(STR)"; }
/** * Generate the ANSI SQL92 filter for the given expression tree * @param table the table being queried * @param params the ordered parameters for the resulting expression * @param joins the joins necessary for the resulting expression * @return the string representation of the expression tree */
Generate the ANSI SQL92 filter for the given expression tree
generateSqlFilter
{ "repo_name": "scalingdata/Impala", "path": "thirdparty/hive-1.2.1.2.3.0.0-2557/src/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java", "license": "apache-2.0", "size": 73915 }
[ "java.util.List", "org.apache.hadoop.hive.metastore.api.MetaException", "org.apache.hadoop.hive.metastore.api.Table", "org.apache.hadoop.hive.metastore.parser.ExpressionTree" ]
import java.util.List; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.parser.ExpressionTree;
import java.util.*; import org.apache.hadoop.hive.metastore.api.*; import org.apache.hadoop.hive.metastore.parser.*;
[ "java.util", "org.apache.hadoop" ]
java.util; org.apache.hadoop;
2,381,313
public EntityResolver getEntityResolver() { return platformUnmarshaller.getEntityResolver(); }
EntityResolver function() { return platformUnmarshaller.getEntityResolver(); }
/** * Get the EntityResolver set on this XMLUnmarshaller * @return the EntityResolver set on this XMLUnmarshaller */
Get the EntityResolver set on this XMLUnmarshaller
getEntityResolver
{ "repo_name": "RallySoftware/eclipselink.runtime", "path": "foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/oxm/XMLUnmarshaller.java", "license": "epl-1.0", "size": 37054 }
[ "org.xml.sax.EntityResolver" ]
import org.xml.sax.EntityResolver;
import org.xml.sax.*;
[ "org.xml.sax" ]
org.xml.sax;
2,322,347
@Before public void ensureNoJobIsLingering() throws Exception { JobManagerCommunicationUtils.waitUntilNoJobIsRunning(flink.getLeaderGateway(timeout)); } // ------------------------------------------------------------------------ // Suite of Tests // // The tests here are all not activated (by an @Test tag), but need // to be invoked from the extending classes. That way, the classes can // select which tests to run. // ------------------------------------------------------------------------
void function() throws Exception { JobManagerCommunicationUtils.waitUntilNoJobIsRunning(flink.getLeaderGateway(timeout)); }
/** * Makes sure that no job is on the JobManager any more from any previous tests that use * the same mini cluster. Otherwise, missing slots may happen. */
Makes sure that no job is on the JobManager any more from any previous tests that use the same mini cluster. Otherwise, missing slots may happen
ensureNoJobIsLingering
{ "repo_name": "DieBauer/flink", "path": "flink-connectors/flink-connector-kafka-base/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaConsumerTestBase.java", "license": "apache-2.0", "size": 85104 }
[ "org.apache.flink.streaming.connectors.kafka.testutils.JobManagerCommunicationUtils" ]
import org.apache.flink.streaming.connectors.kafka.testutils.JobManagerCommunicationUtils;
import org.apache.flink.streaming.connectors.kafka.testutils.*;
[ "org.apache.flink" ]
org.apache.flink;
2,272,208
public int nextInt() throws IOException { int p = peeked; if (p == PEEKED_NONE) { p = doPeek(); } int result; if (p == PEEKED_LONG) { result = (int) peekedLong; if (peekedLong != result) { // Make sure no precision was lost casting to 'int'. throw new NumberFormatException("Expected an int but was " + peekedLong + locationString()); } peeked = PEEKED_NONE; pathIndices[stackSize - 1]++; return result; } if (p == PEEKED_NUMBER) { peekedString = new String(buffer, pos, peekedNumberLength); pos += peekedNumberLength; } else if (p == PEEKED_SINGLE_QUOTED || p == PEEKED_DOUBLE_QUOTED || p == PEEKED_UNQUOTED) { if (p == PEEKED_UNQUOTED) { peekedString = nextUnquotedValue(); } else { peekedString = nextQuotedValue(p == PEEKED_SINGLE_QUOTED ? '\'' : '"'); } try { result = Integer.parseInt(peekedString); peeked = PEEKED_NONE; pathIndices[stackSize - 1]++; return result; } catch (NumberFormatException ignored) { // Fall back to parse as a double below. } } else { throw new IllegalStateException("Expected an int but was " + peek() + locationString()); } peeked = PEEKED_BUFFERED; double asDouble = Double.parseDouble(peekedString); // don't catch this NumberFormatException. result = (int) asDouble; if (result != asDouble) { // Make sure no precision was lost casting to 'int'. throw new NumberFormatException("Expected an int but was " + peekedString + locationString()); } peekedString = null; peeked = PEEKED_NONE; pathIndices[stackSize - 1]++; return result; }
int function() throws IOException { int p = peeked; if (p == PEEKED_NONE) { p = doPeek(); } int result; if (p == PEEKED_LONG) { result = (int) peekedLong; if (peekedLong != result) { throw new NumberFormatException(STR + peekedLong + locationString()); } peeked = PEEKED_NONE; pathIndices[stackSize - 1]++; return result; } if (p == PEEKED_NUMBER) { peekedString = new String(buffer, pos, peekedNumberLength); pos += peekedNumberLength; } else if (p == PEEKED_SINGLE_QUOTED p == PEEKED_DOUBLE_QUOTED p == PEEKED_UNQUOTED) { if (p == PEEKED_UNQUOTED) { peekedString = nextUnquotedValue(); } else { peekedString = nextQuotedValue(p == PEEKED_SINGLE_QUOTED ? '\'' : '"'); } try { result = Integer.parseInt(peekedString); peeked = PEEKED_NONE; pathIndices[stackSize - 1]++; return result; } catch (NumberFormatException ignored) { } } else { throw new IllegalStateException(STR + peek() + locationString()); } peeked = PEEKED_BUFFERED; double asDouble = Double.parseDouble(peekedString); result = (int) asDouble; if (result != asDouble) { throw new NumberFormatException(STR + peekedString + locationString()); } peekedString = null; peeked = PEEKED_NONE; pathIndices[stackSize - 1]++; return result; }
/** * Returns the {@link JsonToken#NUMBER int} value of the next token, * consuming it. If the next token is a string, this method will attempt to * parse it as an int. If the next token's numeric value cannot be exactly * represented by a Java {@code int}, this method throws. * * @throws IllegalStateException if the next token is not a literal value. * @throws NumberFormatException if the next literal value cannot be parsed * as a number, or exactly represented as an int. */
Returns the <code>JsonToken#NUMBER int</code> value of the next token, consuming it. If the next token is a string, this method will attempt to parse it as an int. If the next token's numeric value cannot be exactly represented by a Java int, this method throws
nextInt
{ "repo_name": "google/sagetv", "path": "third_party/GSON/java/sage/epg/sd/gson/stream/JsonReader.java", "license": "apache-2.0", "size": 49785 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,178,215
public List<Issue> getEntries() { return issues; }
List<Issue> function() { return issues; }
/** * Returns a page is issues. * @return */
Returns a page is issues
getEntries
{ "repo_name": "heroicefforts/Viable", "path": "viable-android/src/net/heroicefforts/viable/android/rep/it/gdata/IssuesFeed.java", "license": "gpl-3.0", "size": 1155 }
[ "java.util.List", "net.heroicefforts.viable.android.dao.Issue" ]
import java.util.List; import net.heroicefforts.viable.android.dao.Issue;
import java.util.*; import net.heroicefforts.viable.android.dao.*;
[ "java.util", "net.heroicefforts.viable" ]
java.util; net.heroicefforts.viable;
2,468,357
// [START display_result] private static void displayQueryResults(Bigquery bigquery, String projectId, Job completedJob) throws IOException { GetQueryResultsResponse queryResult = bigquery.jobs() .getQueryResults( projectId, completedJob .getJobReference() .getJobId() ).execute(); List<TableRow> rows = queryResult.getRows(); System.out.print("\nQuery Results:\n------------\n"); for (TableRow row : rows) { for (TableCell field : row.getF()) { System.out.printf("%-50s", field.getV()); } System.out.println(); } } // [END display_result]
static void function(Bigquery bigquery, String projectId, Job completedJob) throws IOException { GetQueryResultsResponse queryResult = bigquery.jobs() .getQueryResults( projectId, completedJob .getJobReference() .getJobId() ).execute(); List<TableRow> rows = queryResult.getRows(); System.out.print(STR); for (TableRow row : rows) { for (TableCell field : row.getF()) { System.out.printf("%-50s", field.getV()); } System.out.println(); } }
/** * Makes an API call to the BigQuery API * * @param bigquery an authorized BigQuery client * @param projectId a string containing the current project ID * @param completedJob to the completed Job * @throws IOException */
Makes an API call to the BigQuery API
displayQueryResults
{ "repo_name": "googlearchive/bigquery-samples-java", "path": "src/main/java/com/google/cloud/bigquery/samples/BigQueryJavaGettingStarted.java", "license": "apache-2.0", "size": 11021 }
[ "com.google.api.services.bigquery.Bigquery", "com.google.api.services.bigquery.model.GetQueryResultsResponse", "com.google.api.services.bigquery.model.Job", "com.google.api.services.bigquery.model.TableCell", "com.google.api.services.bigquery.model.TableRow", "java.io.IOException", "java.util.List" ]
import com.google.api.services.bigquery.Bigquery; import com.google.api.services.bigquery.model.GetQueryResultsResponse; import com.google.api.services.bigquery.model.Job; import com.google.api.services.bigquery.model.TableCell; import com.google.api.services.bigquery.model.TableRow; import java.io.IOException; import java.util.List;
import com.google.api.services.bigquery.*; import com.google.api.services.bigquery.model.*; import java.io.*; import java.util.*;
[ "com.google.api", "java.io", "java.util" ]
com.google.api; java.io; java.util;
1,351,404
public void av_dict_copy(Pointer<Pointer<?>> dst, Pointer<?> src, int flags) { Lib.av_dict_copy(dst, src, flags); }
void function(Pointer<Pointer<?>> dst, Pointer<?> src, int flags) { Lib.av_dict_copy(dst, src, flags); }
/** * Copy entries from one AVDictionary struct into another. * * NOTE: * Metadata is read using the AV_DICT_IGNORE_SUFFIX flag. * * @param dst pointer to a pointer to a AVDictionary struct. If dst is null, * this function will allocate a struct for you and put it in dst. * @param src pointer to source AVDictionary struct * @param flags flags to use when setting entries in dst */
Copy entries from one AVDictionary struct into another. Metadata is read using the AV_DICT_IGNORE_SUFFIX flag
av_dict_copy
{ "repo_name": "operutka/jlibav", "path": "jlibav/src/main/java/org/libav/avutil/bridge/AVUtilLibrary.java", "license": "lgpl-3.0", "size": 31604 }
[ "org.bridj.Pointer" ]
import org.bridj.Pointer;
import org.bridj.*;
[ "org.bridj" ]
org.bridj;
2,390,642
HttpStatus getErrorCode();
HttpStatus getErrorCode();
/** * Return error code of PNG picture (200 : no error, 400 : height or width less than or equal to 0, 404 : Country code does not exist) * * @return errorCode */
Return error code of PNG picture (200 : no error, 400 : height or width less than or equal to 0, 404 : Country code does not exist)
getErrorCode
{ "repo_name": "sgrillon14/flags-rest-service", "path": "project/src/main/java/com/sgrillon/flags/service/PngContainer.java", "license": "agpl-3.0", "size": 2355 }
[ "org.springframework.http.HttpStatus" ]
import org.springframework.http.HttpStatus;
import org.springframework.http.*;
[ "org.springframework.http" ]
org.springframework.http;
917,305
private int addOneForTwo() { List<Integer> solutions = new ArrayList<Integer>(); for (int i = 0; i < state.length; i++) { int[][] nextState = copy(state); tryMove(nextState, player, i); if (hasTwoInRow(nextState, player) == true) { solutions.add(i); } } // TODO It is better to form three in a row where it will be possible to // form four in a row. if (solutions.size() == 0) { return -1; } else { Collections.shuffle(solutions); return solutions.get(0); } }
int function() { List<Integer> solutions = new ArrayList<Integer>(); for (int i = 0; i < state.length; i++) { int[][] nextState = copy(state); tryMove(nextState, player, i); if (hasTwoInRow(nextState, player) == true) { solutions.add(i); } } if (solutions.size() == 0) { return -1; } else { Collections.shuffle(solutions); return solutions.get(0); } }
/** * Add one piece in order to to form two pieces in a row. * * @return Index of a column to play or -1 if rule is not applicable. */
Add one piece in order to to form two pieces in a row
addOneForTwo
{ "repo_name": "VelbazhdSoftwareLLC/Complica4", "path": "client/src/eu/veldsoft/complica4/model/ia/SimpleRulesArtificialIntelligence.java", "license": "gpl-3.0", "size": 12465 }
[ "java.util.ArrayList", "java.util.Collections", "java.util.List" ]
import java.util.ArrayList; import java.util.Collections; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
469,846
public static byte[] getBytes(File file) throws IOException { return getBytes(new FileInputStream(file)); }
static byte[] function(File file) throws IOException { return getBytes(new FileInputStream(file)); }
/** * Read the content of the File and returns it as a byte[]. * * @param file the file whose content we want to read * @return a String containing the content of the file * @throws IOException if an IOException occurs. * @since 1.7.1 */
Read the content of the File and returns it as a byte[]
getBytes
{ "repo_name": "xien777/yajsw", "path": "yajsw/wrapper/src/main/java/org/codehaus/groovy/runtime/DefaultGroovyMethods.java", "license": "lgpl-2.1", "size": 704150 }
[ "java.io.File", "java.io.FileInputStream", "java.io.IOException" ]
import java.io.File; import java.io.FileInputStream; import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,416,102
public Attribute getLookupAttribute(String lookupAttrName) { return stream(getLookupAttributes()) .filter(lookupAttr -> lookupAttr.getName().equals(lookupAttrName)) .findFirst() .orElse(null); }
Attribute function(String lookupAttrName) { return stream(getLookupAttributes()) .filter(lookupAttr -> lookupAttr.getName().equals(lookupAttrName)) .findFirst() .orElse(null); }
/** * Get lookup attribute by name (case insensitive), returns null if not found * * @param lookupAttrName lookup attribute name * @return lookup attribute or <tt>null</tt> */
Get lookup attribute by name (case insensitive), returns null if not found
getLookupAttribute
{ "repo_name": "sidohaakma/molgenis", "path": "molgenis-data/src/main/java/org/molgenis/data/meta/model/EntityType.java", "license": "lgpl-3.0", "size": 24680 }
[ "com.google.common.collect.Streams" ]
import com.google.common.collect.Streams;
import com.google.common.collect.*;
[ "com.google.common" ]
com.google.common;
1,856,778
public Tree<String> extractBestViterbiParse(int gState, int gp, int start, int end, List<String> sentence, boolean unaryAllowed) { // find sources of inside score // no backtraces so we can speed up the parsing for its primary use double bestScore = (unaryAllowed) ? iScorePostU[start][end][gState][gp] : iScorePreU[start][end][gState][gp]; String goalStr = (String) tagNumberer.object(gState); // System.out.println("Looking for "+goalStr+" from "+start+" to "+end+" with score "+ // bestScore+"."); if (end - start == 1) { // if the goal state is a preterminal state, then it can't transform // into // anything but the word below it // if (lexicon.getAllTags().contains(gState)) { if (!grammar.isGrammarTag[gState]) { List<Tree<String>> child = new ArrayList<Tree<String>>(); child.add(new Tree<String>(sentence.get(start))); return new Tree<String>(goalStr, child); } // if the goal state is not a preterminal state, then find a way to // transform it into one else { double veryBestScore = Double.NEGATIVE_INFINITY; int newIndex = -1; UnaryRule[] unaries = grammar .getClosedViterbiUnaryRulesByParent(gState); for (int r = 0; r < unaries.length; r++) { UnaryRule ur = unaries[r]; int cState = ur.childState; double[][] scores = ur.getScores2(); for (int cp = 0; cp < scores.length; cp++) { if (scores[cp] == null) continue; double ruleScore = iScorePreU[start][end][cState][cp] * scores[cp][gp]; if ((ruleScore >= veryBestScore) && (gState != cState || gp != cp) && (!grammar.isGrammarTag[ur.getChildState()])) { // && lexicon.getAllTags().contains(cState)) { veryBestScore = ruleScore; newIndex = cState; } } } List<Tree<String>> child1 = new ArrayList<Tree<String>>(); child1.add(new Tree<String>(sentence.get(start))); String goalStr1 = (String) tagNumberer.object(newIndex); if (goalStr1 == null) System.out.println("goalStr1==null with newIndex==" + newIndex + " goalStr==" + goalStr); List<Tree<String>> child = new ArrayList<Tree<String>>(); child.add(new Tree<String>(goalStr1, child1)); return new Tree<String>(goalStr, child); } } // check binaries first for (int split = start + 1; split < end; split++) { // for (Iterator binaryI = grammar.bRuleIteratorByParent(gState, // gp); binaryI.hasNext();) { // BinaryRule br = (BinaryRule) binaryI.next(); BinaryRule[] parentRules = grammar.splitRulesWithP(gState); for (int i = 0; i < parentRules.length; i++) { BinaryRule br = parentRules[i]; int lState = br.leftChildState; if (iScorePostU[start][split][lState] == null) continue; int rState = br.rightChildState; if (iScorePostU[split][end][rState] == null) continue; // new: iterate over substates double[][][] scores = br.getScores2(); for (int lp = 0; lp < scores.length; lp++) { for (int rp = 0; rp < scores[lp].length; rp++) { if (scores[lp][rp] == null) continue; double score = ScalingTools.scaleToScale( scores[lp][rp][gp] * iScorePostU[start][split][lState][lp] * iScorePostU[split][end][rState][rp], iScale[start][split][lState] + iScale[split][end][rState], iScale[start][end][gState]); if (matches(score, bestScore)) { // build binary split Tree<String> leftChildTree = extractBestViterbiParse( lState, lp, start, split, sentence, true); Tree<String> rightChildTree = extractBestViterbiParse( rState, rp, split, end, sentence, true); List<Tree<String>> children = new ArrayList<Tree<String>>(); children.add(leftChildTree); children.add(rightChildTree); Tree<String> result = new Tree<String>(goalStr, children); // System.out.println("Binary node: "+result); // result.setScore(score); return result; } } } } } // check unaries // for (Iterator unaryI = grammar.uRuleIteratorByParent(gState, gp); // unaryI.hasNext();) { // UnaryRule ur = (UnaryRule) unaryI.next(); UnaryRule[] unaries = grammar .getClosedViterbiUnaryRulesByParent(gState); for (int r = 0; r < unaries.length; r++) { UnaryRule ur = unaries[r]; int cState = ur.childState; if (iScorePostU[start][end][cState] == null) continue; // new: iterate over substates double[][] scores = ur.getScores2(); for (int cp = 0; cp < scores.length; cp++) { if (scores[cp] == null) continue; double score = ScalingTools.scaleToScale(scores[cp][gp] * iScorePreU[start][end][cState][cp], iScale[start][end][cState], iScale[start][end][gState]); if ((cState != ur.parentState || cp != gp) && matches(score, bestScore)) { // build unary Tree<String> childTree = extractBestViterbiParse(cState, cp, start, end, sentence, false); List<Tree<String>> children = new ArrayList<Tree<String>>(); children.add(childTree); Tree<String> result = new Tree<String>(goalStr, children); // System.out.println("Unary node: "+result); // result.setScore(score); return result; } } } System.err .println("Warning: could not find the optimal way to build state " + goalStr + " spanning from " + start + " to " + end + "."); return null; }
Tree<String> function(int gState, int gp, int start, int end, List<String> sentence, boolean unaryAllowed) { double bestScore = (unaryAllowed) ? iScorePostU[start][end][gState][gp] : iScorePreU[start][end][gState][gp]; String goalStr = (String) tagNumberer.object(gState); if (end - start == 1) { if (!grammar.isGrammarTag[gState]) { List<Tree<String>> child = new ArrayList<Tree<String>>(); child.add(new Tree<String>(sentence.get(start))); return new Tree<String>(goalStr, child); } else { double veryBestScore = Double.NEGATIVE_INFINITY; int newIndex = -1; UnaryRule[] unaries = grammar .getClosedViterbiUnaryRulesByParent(gState); for (int r = 0; r < unaries.length; r++) { UnaryRule ur = unaries[r]; int cState = ur.childState; double[][] scores = ur.getScores2(); for (int cp = 0; cp < scores.length; cp++) { if (scores[cp] == null) continue; double ruleScore = iScorePreU[start][end][cState][cp] * scores[cp][gp]; if ((ruleScore >= veryBestScore) && (gState != cState gp != cp) && (!grammar.isGrammarTag[ur.getChildState()])) { veryBestScore = ruleScore; newIndex = cState; } } } List<Tree<String>> child1 = new ArrayList<Tree<String>>(); child1.add(new Tree<String>(sentence.get(start))); String goalStr1 = (String) tagNumberer.object(newIndex); if (goalStr1 == null) System.out.println(STR + newIndex + STR + goalStr); List<Tree<String>> child = new ArrayList<Tree<String>>(); child.add(new Tree<String>(goalStr1, child1)); return new Tree<String>(goalStr, child); } } for (int split = start + 1; split < end; split++) { BinaryRule[] parentRules = grammar.splitRulesWithP(gState); for (int i = 0; i < parentRules.length; i++) { BinaryRule br = parentRules[i]; int lState = br.leftChildState; if (iScorePostU[start][split][lState] == null) continue; int rState = br.rightChildState; if (iScorePostU[split][end][rState] == null) continue; double[][][] scores = br.getScores2(); for (int lp = 0; lp < scores.length; lp++) { for (int rp = 0; rp < scores[lp].length; rp++) { if (scores[lp][rp] == null) continue; double score = ScalingTools.scaleToScale( scores[lp][rp][gp] * iScorePostU[start][split][lState][lp] * iScorePostU[split][end][rState][rp], iScale[start][split][lState] + iScale[split][end][rState], iScale[start][end][gState]); if (matches(score, bestScore)) { Tree<String> leftChildTree = extractBestViterbiParse( lState, lp, start, split, sentence, true); Tree<String> rightChildTree = extractBestViterbiParse( rState, rp, split, end, sentence, true); List<Tree<String>> children = new ArrayList<Tree<String>>(); children.add(leftChildTree); children.add(rightChildTree); Tree<String> result = new Tree<String>(goalStr, children); return result; } } } } } UnaryRule[] unaries = grammar .getClosedViterbiUnaryRulesByParent(gState); for (int r = 0; r < unaries.length; r++) { UnaryRule ur = unaries[r]; int cState = ur.childState; if (iScorePostU[start][end][cState] == null) continue; double[][] scores = ur.getScores2(); for (int cp = 0; cp < scores.length; cp++) { if (scores[cp] == null) continue; double score = ScalingTools.scaleToScale(scores[cp][gp] * iScorePreU[start][end][cState][cp], iScale[start][end][cState], iScale[start][end][gState]); if ((cState != ur.parentState cp != gp) && matches(score, bestScore)) { Tree<String> childTree = extractBestViterbiParse(cState, cp, start, end, sentence, false); List<Tree<String>> children = new ArrayList<Tree<String>>(); children.add(childTree); Tree<String> result = new Tree<String>(goalStr, children); return result; } } } System.err .println(STR + goalStr + STR + start + STR + end + "."); return null; }
/** * Return the single best parse. Note that the returned tree may be missing * intermediate nodes in a unary chain because it parses with a unary-closed * grammar. */
Return the single best parse. Note that the returned tree may be missing intermediate nodes in a unary chain because it parses with a unary-closed grammar
extractBestViterbiParse
{ "repo_name": "text-machine-lab/CliRel", "path": "model/kim/berkeleyparser/src/edu/berkeley/nlp/PCFGLA/ConstrainedTwoChartsParser.java", "license": "apache-2.0", "size": 84171 }
[ "edu.berkeley.nlp.syntax.Tree", "edu.berkeley.nlp.util.ScalingTools", "java.util.ArrayList", "java.util.List" ]
import edu.berkeley.nlp.syntax.Tree; import edu.berkeley.nlp.util.ScalingTools; import java.util.ArrayList; import java.util.List;
import edu.berkeley.nlp.syntax.*; import edu.berkeley.nlp.util.*; import java.util.*;
[ "edu.berkeley.nlp", "java.util" ]
edu.berkeley.nlp; java.util;
2,269,315
@Test public void testSerialization() { TimeSeriesCollection c1 = new TimeSeriesCollection(createSeries()); TimeSeriesCollection c2 = (TimeSeriesCollection) TestUtils.serialised(c1); assertEquals(c1, c2); }
void function() { TimeSeriesCollection c1 = new TimeSeriesCollection(createSeries()); TimeSeriesCollection c2 = (TimeSeriesCollection) TestUtils.serialised(c1); assertEquals(c1, c2); }
/** * Serialize an instance, restore it, and check for equality. */
Serialize an instance, restore it, and check for equality
testSerialization
{ "repo_name": "oskopek/jfreechart-fse", "path": "src/test/java/org/jfree/data/time/TimeSeriesCollectionTest.java", "license": "lgpl-2.1", "size": 16958 }
[ "org.jfree.chart.TestUtils", "org.junit.Assert" ]
import org.jfree.chart.TestUtils; import org.junit.Assert;
import org.jfree.chart.*; import org.junit.*;
[ "org.jfree.chart", "org.junit" ]
org.jfree.chart; org.junit;
1,364,139
static public OutputStream createInputFile(MiniCluster cluster, String fileName) throws IOException { FileSystem fs = cluster.getFileSystem(); if (fs.exists(new Path(fileName))) { throw new IOException("File " + fileName + " already exists on the minicluster"); } return fs.create(new Path(fileName)); }
static OutputStream function(MiniCluster cluster, String fileName) throws IOException { FileSystem fs = cluster.getFileSystem(); if (fs.exists(new Path(fileName))) { throw new IOException(STR + fileName + STR); } return fs.create(new Path(fileName)); }
/** * Helper to create a dfs file on the MiniCluster dfs. This returns an * outputstream that can be used in test cases to write data. * * @param cluster * reference to the MiniCluster where the file should be created * @param fileName * pathname of the file to be created * @return OutputStream to write any data to the file created on the * MiniCluster. * @throws IOException */
Helper to create a dfs file on the MiniCluster dfs. This returns an outputstream that can be used in test cases to write data
createInputFile
{ "repo_name": "dmeister/pig-cll-gz", "path": "test/org/apache/pig/test/Util.java", "license": "apache-2.0", "size": 37825 }
[ "java.io.IOException", "java.io.OutputStream", "org.apache.hadoop.fs.FileSystem", "org.apache.hadoop.fs.Path" ]
import java.io.IOException; import java.io.OutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path;
import java.io.*; import org.apache.hadoop.fs.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
762,775
public void addAll(Collection<Pin> pins) { this.pins.addAll(pins); if (pinMap != null) for (Pin p : pins) pinMap.put(p, this); }
void function(Collection<Pin> pins) { this.pins.addAll(pins); if (pinMap != null) for (Pin p : pins) pinMap.put(p, this); }
/** * Add all given pins to the net. * Used during custom component connection. * * @param pins the pins */
Add all given pins to the net. Used during custom component connection
addAll
{ "repo_name": "hneemann/Digital", "path": "src/main/java/de/neemann/digital/draw/model/Net.java", "license": "gpl-3.0", "size": 9517 }
[ "de.neemann.digital.draw.elements.Pin", "java.util.Collection" ]
import de.neemann.digital.draw.elements.Pin; import java.util.Collection;
import de.neemann.digital.draw.elements.*; import java.util.*;
[ "de.neemann.digital", "java.util" ]
de.neemann.digital; java.util;
2,238,275
protected void percolateDownMaxHeap(final int index) { final Activation element = elements[index]; int hole = index; while ((hole * 2) <= size) { int child = hole * 2; // if we have a right child and that child can not be percolated // up then move onto other child if (child != size && compare(elements[child + 1], elements[child]) > 0) { child++; } // if we found resting place of bubble then terminate search if (compare(elements[child], element) <= 0) { break; } setElement( hole, elements[child] ); hole = child; } setElement( hole, element); }
void function(final int index) { final Activation element = elements[index]; int hole = index; while ((hole * 2) <= size) { int child = hole * 2; if (child != size && compare(elements[child + 1], elements[child]) > 0) { child++; } if (compare(elements[child], element) <= 0) { break; } setElement( hole, elements[child] ); hole = child; } setElement( hole, element); }
/** * Percolates element down heap from the position given by the index. * <p> * Assumes it is a maximum heap. * * @param index the index of the element */
Percolates element down heap from the position given by the index. Assumes it is a maximum heap
percolateDownMaxHeap
{ "repo_name": "jomarko/drools", "path": "drools-core/src/main/java/org/drools/core/util/BinaryHeapQueue.java", "license": "apache-2.0", "size": 10118 }
[ "org.drools.core.spi.Activation" ]
import org.drools.core.spi.Activation;
import org.drools.core.spi.*;
[ "org.drools.core" ]
org.drools.core;
821,414
public static void loadFromJSONArrayChild(List<AbstractExpression> starter, JSONObject parent, String label, StmtTableScan tableScan) throws JSONException { if( parent.isNull(label) ) { return; } JSONArray jarray = parent.getJSONArray(label); loadFromJSONArray(starter, jarray, tableScan); }
static void function(List<AbstractExpression> starter, JSONObject parent, String label, StmtTableScan tableScan) throws JSONException { if( parent.isNull(label) ) { return; } JSONArray jarray = parent.getJSONArray(label); loadFromJSONArray(starter, jarray, tableScan); }
/** * For TVEs, it is only serialized column index and table index. In order to match expression, * there needs more information to revert back the table name, table alisa and column name. * By adding @param tableScan, the TVE will load table name, table alias and column name for TVE. * @param starter * @param parent * @param label * @param tableScan * @throws JSONException */
For TVEs, it is only serialized column index and table index. In order to match expression, there needs more information to revert back the table name, table alisa and column name. By adding @param tableScan, the TVE will load table name, table alias and column name for TVE
loadFromJSONArrayChild
{ "repo_name": "wolffcm/voltdb", "path": "src/frontend/org/voltdb/expressions/AbstractExpression.java", "license": "agpl-3.0", "size": 37638 }
[ "java.util.List", "org.json_voltpatches.JSONArray", "org.json_voltpatches.JSONException", "org.json_voltpatches.JSONObject", "org.voltdb.planner.parseinfo.StmtTableScan" ]
import java.util.List; import org.json_voltpatches.JSONArray; import org.json_voltpatches.JSONException; import org.json_voltpatches.JSONObject; import org.voltdb.planner.parseinfo.StmtTableScan;
import java.util.*; import org.json_voltpatches.*; import org.voltdb.planner.parseinfo.*;
[ "java.util", "org.json_voltpatches", "org.voltdb.planner" ]
java.util; org.json_voltpatches; org.voltdb.planner;
398,198
public EarliestCheckpointMapSnapshot earliestCheckpointsMapSnapshot() { Map<UUID, Map<Integer, GroupStateSnapshot>> data = new HashMap<>(); synchronized (earliestCp) { Collection<CheckpointEntry> values = earliestCp.values(); for (CheckpointEntry cp : values) { UUID checkpointId = cp.checkpointId(); if (data.containsKey(checkpointId)) continue; Map<Integer, GroupState> map = cp.groupStates(); if (map != null) { Map<Integer, GroupStateSnapshot> groupStates = new HashMap<>(); map.forEach((k, v) -> groupStates.put(k, new GroupStateSnapshot( v.partitionIds(), v.partitionCounters(), v.size() )) ); data.put(checkpointId, groupStates); } } } Set<UUID> ids = histMap.values().stream().map(CheckpointEntry::checkpointId).collect(Collectors.toSet()); return new EarliestCheckpointMapSnapshot(ids, data); }
EarliestCheckpointMapSnapshot function() { Map<UUID, Map<Integer, GroupStateSnapshot>> data = new HashMap<>(); synchronized (earliestCp) { Collection<CheckpointEntry> values = earliestCp.values(); for (CheckpointEntry cp : values) { UUID checkpointId = cp.checkpointId(); if (data.containsKey(checkpointId)) continue; Map<Integer, GroupState> map = cp.groupStates(); if (map != null) { Map<Integer, GroupStateSnapshot> groupStates = new HashMap<>(); map.forEach((k, v) -> groupStates.put(k, new GroupStateSnapshot( v.partitionIds(), v.partitionCounters(), v.size() )) ); data.put(checkpointId, groupStates); } } } Set<UUID> ids = histMap.values().stream().map(CheckpointEntry::checkpointId).collect(Collectors.toSet()); return new EarliestCheckpointMapSnapshot(ids, data); }
/** * Creates a snapshot of {@link #earliestCp} map. * Guarded by checkpoint read lock. * * @return Snapshot of a map. */
Creates a snapshot of <code>#earliestCp</code> map. Guarded by checkpoint read lock
earliestCheckpointsMapSnapshot
{ "repo_name": "NSAmelchev/ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/checkpoint/CheckpointHistory.java", "license": "apache-2.0", "size": 29146 }
[ "java.util.Collection", "java.util.HashMap", "java.util.Map", "java.util.Set", "java.util.stream.Collectors", "org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointEntry", "org.apache.ignite.internal.processors.cache.persistence.checkpoint.EarliestCheckpointMapSnapshot" ]
import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointEntry; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.EarliestCheckpointMapSnapshot;
import java.util.*; import java.util.stream.*; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.*;
[ "java.util", "org.apache.ignite" ]
java.util; org.apache.ignite;
1,114,526
public ConceptDescription getDescription(Locale locale) { return getDescription(locale, false); }
ConceptDescription function(Locale locale) { return getDescription(locale, false); }
/** * Finds the description of the concept in the given locale. Returns null if none found. * * @param locale * @return ConceptDescription attributed to the Concept in the given locale */
Finds the description of the concept in the given locale. Returns null if none found
getDescription
{ "repo_name": "MitchellBot/openmrs-core", "path": "api/src/main/java/org/openmrs/Concept.java", "license": "mpl-2.0", "size": 52648 }
[ "java.util.Locale" ]
import java.util.Locale;
import java.util.*;
[ "java.util" ]
java.util;
1,156,668
public static char readChar(DataInputStream dis) throws IOException { return (char) dis.readByte(); }
static char function(DataInputStream dis) throws IOException { return (char) dis.readByte(); }
/** * Reads a single char from the stream * * @param dis the stream to read * @return the next character on the stream * * @throws IOException if an error occurs */
Reads a single char from the stream
readChar
{ "repo_name": "edwardtoday/PolyU_MScST", "path": "COMP5517/JavaSpeech/freetts-1.2.2-src/freetts-1.2.2/tools/FestVoxToFreeTTS/FindSTS.java", "license": "mit", "size": 30039 }
[ "java.io.DataInputStream", "java.io.IOException" ]
import java.io.DataInputStream; import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
534,523
public void handleFileUpload(FileUploadEvent event) throws Exception { FacesContext fc = FacesContext.getCurrentInstance(); UploadedFile file = event.getFile(); if (! validFile(file, fc)) { return; } String context = getContext(); if ((context == null) || (action == null)) { UtilImpl.LOGGER.warning("FileUpload - Malformed URL on Upload Action - context, binding, or action is null"); FacesMessage msg = new FacesMessage("Failure", "Malformed URL"); fc.addMessage(null, msg); return; } ExternalContext ec = fc.getExternalContext(); HttpServletRequest request = (HttpServletRequest) ec.getRequest(); HttpServletResponse response = (HttpServletResponse) ec.getResponse(); AbstractWebContext webContext = StateUtil.getCachedConversation(context, request, response); if (webContext == null) { UtilImpl.LOGGER.warning("FileUpload - Malformed URL on Upload Action - context does not exist"); FacesMessage msg = new FacesMessage("Failure", "Malformed URL"); FacesContext.getCurrentInstance().addMessage(null, msg); return; } // NB Persistence has been set with the restore processing inside the SkyvePhaseListener Persistence persistence = CORE.getPersistence(); try { AbstractRepository repository = AbstractRepository.get(); User user = persistence.getUser(); CustomerImpl customer = (CustomerImpl) user.getCustomer(); Bean currentBean = webContext.getCurrentBean(); Bean bean = currentBean; String binding = getBinding(); if (binding != null) { bean = (Bean) BindUtil.get(bean, binding); } Module module = customer.getModule(bean.getBizModule()); Document document = module.getDocument(customer, bean.getBizDocument()); if (! user.canExecuteAction(document, action)) { throw new SecurityException(action, user.getName()); } UploadAction<Bean> uploadAction = repository.getUploadAction(customer, document, action, true); UploadException exception = new UploadException(); MimeType mimeType = null; try { MimeType.valueOf(file.getContentType()); } catch (@SuppressWarnings("unused") Exception e) { // do nothing } try{ @SuppressWarnings("resource") UploadAction.UploadedFile bizFile = new UploadAction.UploadedFile(FilenameUtils.getName(file.getFileName()), file.getInputStream(), mimeType); boolean vetoed = customer.interceptBeforeUploadAction(document, action, bean, bizFile, webContext); if (! vetoed) { bean = uploadAction.upload(bean, bizFile, exception, webContext); if (binding == null) { webContext.setCurrentBean(bean); } else { BindUtil.set(currentBean, binding, bean); } customer.interceptAfterUploadAction(document, action, bean, bizFile, webContext); // throw if we have errors found, to ensure rollback if (exception.hasErrors()) { throw exception; } } } catch (UploadException e) { e.printStackTrace(); persistence.rollback(); exception = e; } catch (IOException e) { // hide any file system paths from file operation problems encountered throw new DomainException("File Upload could not be processed", e); } // only put conversation in cache if we have been successful in executing StateUtil.cacheConversation(webContext); if (exception.hasProblems()) { for (Problem error : exception.getErrors()) { problems.add(error); } for (Problem warning : exception.getWarnings()) { problems.add(warning); } if (exception.hasErrors()) { String message = "The import did <b>NOT</b> complete successfully.<br/>" + "No data has changed as a result of this import.<br/>" + "Please review the errors and warnings displayed before closing this window.<br/>" + "The above list includes only the first 50 errors and warnings, there may be more.<br/>" + "If the nature of the problem is not clear from the message, it may be because it is caused by another issue being compounded.<br/>" + "In this case, you may find that fixing one or two problems you can easily identify, may resolve a number of related issues."; FacesMessage msg = new FacesMessage(FacesMessage.SEVERITY_ERROR, "Unsuccessful", message); fc.addMessage(null, msg); } else { String message = "The import completed successfully with warnings.<br/>" + "Please review the warnings displayed before closing this window.<br/>" + "The above list includes only the first 50 errors and warnings, there may be more.<br/>" + "If the nature of the problem is not clear from the message, it may be because it is caused by another issue being compounded.<br/>" + "In this case, you may find that fixing one or two problems you can easily identify, may resolve a number of related issues."; FacesMessage msg = new FacesMessage(FacesMessage.SEVERITY_WARN, "Successful", message); fc.addMessage(null, msg); } } else { long size = file.getSize(); StringBuilder message = new StringBuilder(128); message.append(file.getFileName()).append(" is uploaded. File Size is "); DecimalFormat format = CORE.getDecimalFormat("###,##0.00"); if (size > 1048576) { message.append(format.format(size / 1048576.0)).append(" MB"); } else { message.append(format.format(size / 1024.0)).append(" KB"); } FacesMessage msg = new FacesMessage("Successful", message.toString()); FacesContext.getCurrentInstance().addMessage(null, msg); } } catch (Exception e) { persistence.rollback(); e.printStackTrace(); FacesMessage msg = new FacesMessage("Failure", e.getMessage()); fc.addMessage(null, msg); } // NB No need to disconnect Persistence as it is done in the SkyvePhaseListener after the response is rendered. }
void function(FileUploadEvent event) throws Exception { FacesContext fc = FacesContext.getCurrentInstance(); UploadedFile file = event.getFile(); if (! validFile(file, fc)) { return; } String context = getContext(); if ((context == null) (action == null)) { UtilImpl.LOGGER.warning(STR); FacesMessage msg = new FacesMessage(STR, STR); fc.addMessage(null, msg); return; } ExternalContext ec = fc.getExternalContext(); HttpServletRequest request = (HttpServletRequest) ec.getRequest(); HttpServletResponse response = (HttpServletResponse) ec.getResponse(); AbstractWebContext webContext = StateUtil.getCachedConversation(context, request, response); if (webContext == null) { UtilImpl.LOGGER.warning(STR); FacesMessage msg = new FacesMessage(STR, STR); FacesContext.getCurrentInstance().addMessage(null, msg); return; } Persistence persistence = CORE.getPersistence(); try { AbstractRepository repository = AbstractRepository.get(); User user = persistence.getUser(); CustomerImpl customer = (CustomerImpl) user.getCustomer(); Bean currentBean = webContext.getCurrentBean(); Bean bean = currentBean; String binding = getBinding(); if (binding != null) { bean = (Bean) BindUtil.get(bean, binding); } Module module = customer.getModule(bean.getBizModule()); Document document = module.getDocument(customer, bean.getBizDocument()); if (! user.canExecuteAction(document, action)) { throw new SecurityException(action, user.getName()); } UploadAction<Bean> uploadAction = repository.getUploadAction(customer, document, action, true); UploadException exception = new UploadException(); MimeType mimeType = null; try { MimeType.valueOf(file.getContentType()); } catch (@SuppressWarnings(STR) Exception e) { } try{ @SuppressWarnings(STR) UploadAction.UploadedFile bizFile = new UploadAction.UploadedFile(FilenameUtils.getName(file.getFileName()), file.getInputStream(), mimeType); boolean vetoed = customer.interceptBeforeUploadAction(document, action, bean, bizFile, webContext); if (! vetoed) { bean = uploadAction.upload(bean, bizFile, exception, webContext); if (binding == null) { webContext.setCurrentBean(bean); } else { BindUtil.set(currentBean, binding, bean); } customer.interceptAfterUploadAction(document, action, bean, bizFile, webContext); if (exception.hasErrors()) { throw exception; } } } catch (UploadException e) { e.printStackTrace(); persistence.rollback(); exception = e; } catch (IOException e) { throw new DomainException(STR, e); } StateUtil.cacheConversation(webContext); if (exception.hasProblems()) { for (Problem error : exception.getErrors()) { problems.add(error); } for (Problem warning : exception.getWarnings()) { problems.add(warning); } if (exception.hasErrors()) { String message = STR + STR + STR + STR + STR + STR; FacesMessage msg = new FacesMessage(FacesMessage.SEVERITY_ERROR, STR, message); fc.addMessage(null, msg); } else { String message = STR + STR + STR + STR + STR; FacesMessage msg = new FacesMessage(FacesMessage.SEVERITY_WARN, STR, message); fc.addMessage(null, msg); } } else { long size = file.getSize(); StringBuilder message = new StringBuilder(128); message.append(file.getFileName()).append(STR); DecimalFormat format = CORE.getDecimalFormat(STR); if (size > 1048576) { message.append(format.format(size / 1048576.0)).append(STR); } else { message.append(format.format(size / 1024.0)).append(STR); } FacesMessage msg = new FacesMessage(STR, message.toString()); FacesContext.getCurrentInstance().addMessage(null, msg); } } catch (Exception e) { persistence.rollback(); e.printStackTrace(); FacesMessage msg = new FacesMessage(STR, e.getMessage()); fc.addMessage(null, msg); } }
/** * Process the file upload * * @param event */
Process the file upload
handleFileUpload
{ "repo_name": "skyvers/wildcat", "path": "skyve-web/src/main/java/org/skyve/impl/web/faces/beans/FileUpload.java", "license": "lgpl-2.1", "size": 8598 }
[ "java.io.IOException", "java.text.DecimalFormat", "javax.faces.application.FacesMessage", "javax.faces.context.ExternalContext", "javax.faces.context.FacesContext", "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse", "org.apache.commons.io.FilenameUtils", "org.primefaces.event.FileUploadEvent", "org.primefaces.model.file.UploadedFile", "org.skyve.CORE", "org.skyve.cache.StateUtil", "org.skyve.content.MimeType", "org.skyve.domain.Bean", "org.skyve.domain.messages.DomainException", "org.skyve.domain.messages.UploadException", "org.skyve.impl.bind.BindUtil", "org.skyve.impl.domain.messages.SecurityException", "org.skyve.impl.metadata.customer.CustomerImpl", "org.skyve.impl.metadata.repository.AbstractRepository", "org.skyve.impl.util.UtilImpl", "org.skyve.impl.web.AbstractWebContext", "org.skyve.metadata.controller.UploadAction", "org.skyve.metadata.model.document.Document", "org.skyve.metadata.module.Module", "org.skyve.metadata.user.User", "org.skyve.persistence.Persistence" ]
import java.io.IOException; import java.text.DecimalFormat; import javax.faces.application.FacesMessage; import javax.faces.context.ExternalContext; import javax.faces.context.FacesContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.io.FilenameUtils; import org.primefaces.event.FileUploadEvent; import org.primefaces.model.file.UploadedFile; import org.skyve.CORE; import org.skyve.cache.StateUtil; import org.skyve.content.MimeType; import org.skyve.domain.Bean; import org.skyve.domain.messages.DomainException; import org.skyve.domain.messages.UploadException; import org.skyve.impl.bind.BindUtil; import org.skyve.impl.domain.messages.SecurityException; import org.skyve.impl.metadata.customer.CustomerImpl; import org.skyve.impl.metadata.repository.AbstractRepository; import org.skyve.impl.util.UtilImpl; import org.skyve.impl.web.AbstractWebContext; import org.skyve.metadata.controller.UploadAction; import org.skyve.metadata.model.document.Document; import org.skyve.metadata.module.Module; import org.skyve.metadata.user.User; import org.skyve.persistence.Persistence;
import java.io.*; import java.text.*; import javax.faces.application.*; import javax.faces.context.*; import javax.servlet.http.*; import org.apache.commons.io.*; import org.primefaces.event.*; import org.primefaces.model.file.*; import org.skyve.*; import org.skyve.cache.*; import org.skyve.content.*; import org.skyve.domain.*; import org.skyve.domain.messages.*; import org.skyve.impl.bind.*; import org.skyve.impl.domain.messages.*; import org.skyve.impl.metadata.customer.*; import org.skyve.impl.metadata.repository.*; import org.skyve.impl.util.*; import org.skyve.impl.web.*; import org.skyve.metadata.controller.*; import org.skyve.metadata.model.document.*; import org.skyve.metadata.module.*; import org.skyve.metadata.user.*; import org.skyve.persistence.*;
[ "java.io", "java.text", "javax.faces", "javax.servlet", "org.apache.commons", "org.primefaces.event", "org.primefaces.model", "org.skyve", "org.skyve.cache", "org.skyve.content", "org.skyve.domain", "org.skyve.impl", "org.skyve.metadata", "org.skyve.persistence" ]
java.io; java.text; javax.faces; javax.servlet; org.apache.commons; org.primefaces.event; org.primefaces.model; org.skyve; org.skyve.cache; org.skyve.content; org.skyve.domain; org.skyve.impl; org.skyve.metadata; org.skyve.persistence;
689,466
public static void validateSimple(String s, String legal) throws URISyntaxException { for (int i = 0; i < s.length(); i++) { char ch = s.charAt(i); if (!((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || (ch >= '0' && ch <= '9') || legal.indexOf(ch) > -1)) { throw new URISyntaxException(s, "Illegal character", i); } } }
static void function(String s, String legal) throws URISyntaxException { for (int i = 0; i < s.length(); i++) { char ch = s.charAt(i); if (!((ch >= 'a' && ch <= 'z') (ch >= 'A' && ch <= 'Z') (ch >= '0' && ch <= '9') legal.indexOf(ch) > -1)) { throw new URISyntaxException(s, STR, i); } } }
/** * Throws if {@code s} contains characters that are not letters, digits or * in {@code legal}. */
Throws if s contains characters that are not letters, digits or in legal
validateSimple
{ "repo_name": "s20121035/rk3288_android5.1_repo", "path": "packages/apps/Exchange/src/com/android/exchange/utility/UriCodec.java", "license": "gpl-3.0", "size": 8416 }
[ "java.net.URISyntaxException" ]
import java.net.URISyntaxException;
import java.net.*;
[ "java.net" ]
java.net;
103,213
public static void removeDate(Model model, org.ontoware.rdf2go.model.node.Resource instanceResource, Thing value) { Base.remove(model, instanceResource, DATE, value); }
static void function(Model model, org.ontoware.rdf2go.model.node.Resource instanceResource, Thing value) { Base.remove(model, instanceResource, DATE, value); }
/** * Removes a value of property Date given as an instance of Thing * * @param model an RDF2Go model * @param resource an RDF2Go resource * @param value the value to be removed [Generated from RDFReactor template * rule #remove3static] */
Removes a value of property Date given as an instance of Thing
removeDate
{ "repo_name": "m0ep/master-thesis", "path": "source/apis/rdf2go/rdf2go-foaf/src/main/java/com/xmlns/foaf/Thing.java", "license": "mit", "size": 274766 }
[ "org.ontoware.rdf2go.model.Model", "org.ontoware.rdfreactor.runtime.Base" ]
import org.ontoware.rdf2go.model.Model; import org.ontoware.rdfreactor.runtime.Base;
import org.ontoware.rdf2go.model.*; import org.ontoware.rdfreactor.runtime.*;
[ "org.ontoware.rdf2go", "org.ontoware.rdfreactor" ]
org.ontoware.rdf2go; org.ontoware.rdfreactor;
2,809,696
public void setSplitterDraggingDrawable(Drawable splitterDraggingDrawable) { mSplitterDraggingDrawable = splitterDraggingDrawable; if (isDragging) { invalidate(); } }
void function(Drawable splitterDraggingDrawable) { mSplitterDraggingDrawable = splitterDraggingDrawable; if (isDragging) { invalidate(); } }
/** * Sets the drawable used for the splitter dragging overlay. * * @param splitterDraggingDrawable the drawable to use while dragging the splitter */
Sets the drawable used for the splitter dragging overlay
setSplitterDraggingDrawable
{ "repo_name": "MobiDevelop/android-split-pane-layout", "path": "split-pane-layout/src/main/java/com/mobidevelop/spl/widget/SplitPaneLayout.java", "license": "apache-2.0", "size": 28236 }
[ "android.graphics.drawable.Drawable" ]
import android.graphics.drawable.Drawable;
import android.graphics.drawable.*;
[ "android.graphics" ]
android.graphics;
1,486,369
public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2, int fileNameColumn ) { int idx = 1; if ( rows1.size() != rows2.size() ) { fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() ); } Iterator<RowMetaAndData> it1 = rows1.iterator(); Iterator<RowMetaAndData> it2 = rows2.iterator(); while ( it1.hasNext() && it2.hasNext() ) { RowMetaAndData rm1 = it1.next(); RowMetaAndData rm2 = it2.next(); Object[] r1 = rm1.getData(); Object[] r2 = rm2.getData(); if ( rm1.size() != rm2.size() ) { fail( "row nr " + idx + " is not equal" ); } int[] fields = new int[r1.length]; for ( int ydx = 0; ydx < r1.length; ydx++ ) { fields[ydx] = ydx; } try { r1[fileNameColumn] = r2[fileNameColumn]; if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) { fail( "row nr " + idx + " is not equal" ); } } catch ( KettleValueException e ) { fail( "row nr " + idx + " is not equal" ); } idx++; } }
void function( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2, int fileNameColumn ) { int idx = 1; if ( rows1.size() != rows2.size() ) { fail( STR + rows1.size() + STR + rows2.size() ); } Iterator<RowMetaAndData> it1 = rows1.iterator(); Iterator<RowMetaAndData> it2 = rows2.iterator(); while ( it1.hasNext() && it2.hasNext() ) { RowMetaAndData rm1 = it1.next(); RowMetaAndData rm2 = it2.next(); Object[] r1 = rm1.getData(); Object[] r2 = rm2.getData(); if ( rm1.size() != rm2.size() ) { fail( STR + idx + STR ); } int[] fields = new int[r1.length]; for ( int ydx = 0; ydx < r1.length; ydx++ ) { fields[ydx] = ydx; } try { r1[fileNameColumn] = r2[fileNameColumn]; if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) { fail( STR + idx + STR ); } } catch ( KettleValueException e ) { fail( STR + idx + STR ); } idx++; } }
/** * Check the 2 lists comparing the rows in order. If they are not the same fail the test. * * @param rows1 * set 1 of rows to compare * @param rows2 * set 2 of rows to compare * @param fileNameColumn * Number of the column containing the filename. This is only checked for being non-null (some systems maybe * canonize names differently than we input). */
Check the 2 lists comparing the rows in order. If they are not the same fail the test
checkRows
{ "repo_name": "tkafalas/pentaho-kettle", "path": "integration/src/it/java/org/pentaho/di/trans/steps/csvinput/CsvInputBase.java", "license": "apache-2.0", "size": 7468 }
[ "java.util.Iterator", "java.util.List", "org.pentaho.di.core.RowMetaAndData", "org.pentaho.di.core.exception.KettleValueException" ]
import java.util.Iterator; import java.util.List; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleValueException;
import java.util.*; import org.pentaho.di.core.*; import org.pentaho.di.core.exception.*;
[ "java.util", "org.pentaho.di" ]
java.util; org.pentaho.di;
548,995
private boolean checkItem(Item item) { boolean result = true; if (item == null) { System.out.println("id not found"); result = false; } return result; } private class AddItem extends BaseAction { AddItem(String name) { super(name); }
boolean function(Item item) { boolean result = true; if (item == null) { System.out.println(STR); result = false; } return result; } private class AddItem extends BaseAction { AddItem(String name) { super(name); }
/** * Checks item for null. * * @param item Item * @return true, if item != null */
Checks item for null
checkItem
{ "repo_name": "roman-sd/java-a-to-z", "path": "chapter_008/src/main/java/ru/sdroman/jdbc/tracker/MenuTracker.java", "license": "apache-2.0", "size": 7182 }
[ "ru.sdroman.jdbc.tracker.action.BaseAction", "ru.sdroman.jdbc.tracker.models.Item" ]
import ru.sdroman.jdbc.tracker.action.BaseAction; import ru.sdroman.jdbc.tracker.models.Item;
import ru.sdroman.jdbc.tracker.action.*; import ru.sdroman.jdbc.tracker.models.*;
[ "ru.sdroman.jdbc" ]
ru.sdroman.jdbc;
1,119,995
@SuppressWarnings("unchecked") public static <T> T[] reverse(T[] self, boolean mutate) { if (!mutate) { return (T[]) toList(new ReverseListIterator<T>(Arrays.asList(self))).toArray(); } List<T> items = Arrays.asList(self); Collections.reverse(items); System.arraycopy((T[])items.toArray(), 0, self, 0, items.size()); return self; }
@SuppressWarnings(STR) static <T> T[] function(T[] self, boolean mutate) { if (!mutate) { return (T[]) toList(new ReverseListIterator<T>(Arrays.asList(self))).toArray(); } List<T> items = Arrays.asList(self); Collections.reverse(items); System.arraycopy((T[])items.toArray(), 0, self, 0, items.size()); return self; }
/** * Reverse the items in an array. If mutate is true, the original array is modified in place and returned. * Otherwise, a new array containing the reversed items is produced. * * @param self an array * @param mutate true if the array itself should be reversed in place and returned, false if a new array should be created * @return an array containing the reversed items * @since 1.8.1 */
Reverse the items in an array. If mutate is true, the original array is modified in place and returned. Otherwise, a new array containing the reversed items is produced
reverse
{ "repo_name": "xien777/yajsw", "path": "yajsw/wrapper/src/main/java/org/codehaus/groovy/runtime/DefaultGroovyMethods.java", "license": "lgpl-2.1", "size": 704150 }
[ "java.util.Arrays", "java.util.Collections", "java.util.List" ]
import java.util.Arrays; import java.util.Collections; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,415,855
public byte[] engineCanonicalizeXPathNodeSet(NodeList xpathNodeSet) throws CanonicalizationException { return this.engineCanonicalizeXPathNodeSet( XMLUtils.convertNodelistToSet(xpathNodeSet) ); }
byte[] function(NodeList xpathNodeSet) throws CanonicalizationException { return this.engineCanonicalizeXPathNodeSet( XMLUtils.convertNodelistToSet(xpathNodeSet) ); }
/** * Method engineCanonicalizeXPathNodeSet * * @param xpathNodeSet * @return the c14n bytes * @throws CanonicalizationException */
Method engineCanonicalizeXPathNodeSet
engineCanonicalizeXPathNodeSet
{ "repo_name": "rokn/Count_Words_2015", "path": "testing/openjdk2/jdk/src/share/classes/com/sun/org/apache/xml/internal/security/c14n/CanonicalizerSpi.java", "license": "mit", "size": 5132 }
[ "com.sun.org.apache.xml.internal.security.utils.XMLUtils", "org.w3c.dom.NodeList" ]
import com.sun.org.apache.xml.internal.security.utils.XMLUtils; import org.w3c.dom.NodeList;
import com.sun.org.apache.xml.internal.security.utils.*; import org.w3c.dom.*;
[ "com.sun.org", "org.w3c.dom" ]
com.sun.org; org.w3c.dom;
1,451,162
public String getStorageAttributeValueByName(String attributeName, StorageEntity storageEntity, boolean attributeRequired, boolean attributeValueRequiredIfExists) throws IllegalStateException { boolean attributeExists = false; String attributeValue = null; for (StorageAttributeEntity attributeEntity : storageEntity.getAttributes()) { if (attributeEntity.getName().equalsIgnoreCase(attributeName)) { attributeExists = true; attributeValue = attributeEntity.getValue(); break; } } // If the attribute must exist and doesn't, throw an exception. if (attributeRequired && !attributeExists) { throw new IllegalStateException(String.format("Attribute \"%s\" for \"%s\" storage must be configured.", attributeName, storageEntity.getName())); } // If the attribute is configured, but has a blank value, throw an exception. if (attributeExists && attributeValueRequiredIfExists && StringUtils.isBlank(attributeValue)) { throw new IllegalStateException( String.format("Attribute \"%s\" for \"%s\" storage must have a value that is not blank.", attributeName, storageEntity.getName())); } return attributeValue; }
String function(String attributeName, StorageEntity storageEntity, boolean attributeRequired, boolean attributeValueRequiredIfExists) throws IllegalStateException { boolean attributeExists = false; String attributeValue = null; for (StorageAttributeEntity attributeEntity : storageEntity.getAttributes()) { if (attributeEntity.getName().equalsIgnoreCase(attributeName)) { attributeExists = true; attributeValue = attributeEntity.getValue(); break; } } if (attributeRequired && !attributeExists) { throw new IllegalStateException(String.format(STR%s\STR%s\STR, attributeName, storageEntity.getName())); } if (attributeExists && attributeValueRequiredIfExists && StringUtils.isBlank(attributeValue)) { throw new IllegalStateException( String.format(STR%s\STR%s\STR, attributeName, storageEntity.getName())); } return attributeValue; }
/** * Gets attribute value by name from the storage entity while specifying whether the attribute is required and whether the attribute value is required. * * @param attributeName the attribute name (case insensitive) * @param storageEntity the storage entity * @param attributeRequired specifies whether the attribute is mandatory (i.e. whether it has a value or not). * @param attributeValueRequiredIfExists specifies whether the attribute value is mandatory (i.e. the attribute must exist and its value must also contain a * value). * * @return the attribute value from the attribute with the attribute name. * @throws IllegalStateException if the attribute is mandatory and this storage contains no attribute with this attribute name or the value is blank.This * will produce a 500 HTTP status code error. If storage attributes are able to be updated by a REST invocation in the future, we might want to consider * making this a 400 instead since the user has the ability to fix the issue on their own. */
Gets attribute value by name from the storage entity while specifying whether the attribute is required and whether the attribute value is required
getStorageAttributeValueByName
{ "repo_name": "FINRAOS/herd", "path": "herd-code/herd-service/src/main/java/org/finra/herd/service/helper/StorageHelper.java", "license": "apache-2.0", "size": 17059 }
[ "org.apache.commons.lang3.StringUtils", "org.finra.herd.model.jpa.StorageAttributeEntity", "org.finra.herd.model.jpa.StorageEntity" ]
import org.apache.commons.lang3.StringUtils; import org.finra.herd.model.jpa.StorageAttributeEntity; import org.finra.herd.model.jpa.StorageEntity;
import org.apache.commons.lang3.*; import org.finra.herd.model.jpa.*;
[ "org.apache.commons", "org.finra.herd" ]
org.apache.commons; org.finra.herd;
1,974,151
public Map<String, Map<String, EmployeeList>> getSortedWorkforce() { return sortedWorkforce; }
Map<String, Map<String, EmployeeList>> function() { return sortedWorkforce; }
/** * A getter method. * * @return the department's workforce */
A getter method
getSortedWorkforce
{ "repo_name": "gammalgris/jmul", "path": "Utilities/Persistence-Tests/legacy-src/test/jmul/datatypes/legacy/department/DepartmentGenderDetailsImpl.java", "license": "gpl-3.0", "size": 7379 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
55,573
private void initButtonsPanel() { JPanel panel = new JPanel(); BoxLayout layout = new BoxLayout(panel, BoxLayout.LINE_AXIS); panel.setLayout(layout); panel.add(Box.createHorizontalGlue()); backButton = new JButton("Retour"); backButton.addActionListener(this); panel.add(backButton); panel.add(Box.createHorizontalGlue()); statButton = new JButton("Afficher"); statButton.addActionListener(this); panel.add(statButton); add(panel); }
void function() { JPanel panel = new JPanel(); BoxLayout layout = new BoxLayout(panel, BoxLayout.LINE_AXIS); panel.setLayout(layout); panel.add(Box.createHorizontalGlue()); backButton = new JButton(STR); backButton.addActionListener(this); panel.add(backButton); panel.add(Box.createHorizontalGlue()); statButton = new JButton(STR); statButton.addActionListener(this); panel.add(statButton); add(panel); }
/** * Initialisation des boutons contenus dans ce panel. */
Initialisation des boutons contenus dans ce panel
initButtonsPanel
{ "repo_name": "thedoctor84/Projet2015", "path": "src/fr/univavignon/courbes/inter/stats/StatPanel.java", "license": "gpl-2.0", "size": 4286 }
[ "javax.swing.Box", "javax.swing.BoxLayout", "javax.swing.JButton", "javax.swing.JPanel" ]
import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JPanel;
import javax.swing.*;
[ "javax.swing" ]
javax.swing;
1,802,869
private Map<ValueNode, TypeExpr> getValueNodeToUnconstrainedTypeMap() { Map<ValueNode, TypeExpr> returnMap = new HashMap<ValueNode, TypeExpr>(); // Get the value nodes for the tuple and the items in the tuple. NTupleValueNode currentTupleValue = (NTupleValueNode)getValueNode(); int tupleSize = currentTupleValue.getTupleSize(); // Populate the map // Tuple TypeExpr unconstrainedTupleType = getContext().getLeastConstrainedTypeExpr(); if (unconstrainedTupleType.rootTypeVar() != null) { // not constrained by the context to be a tuple unconstrainedTupleType = TypeExpr.makeTupleType(tupleSize); } returnMap.put(currentTupleValue, unconstrainedTupleType); // Tuple elements Map<FieldName, TypeExpr> hasFieldsMap = unconstrainedTupleType.rootRecordType().getHasFieldsMap(); int j = 0; for (final TypeExpr unconstrainedTupleElementType : hasFieldsMap.values()) { ValueNode currentTupleItem = currentTupleValue.getValueAt(j); returnMap.put(currentTupleItem, unconstrainedTupleElementType); ++j; } return returnMap; } /** * {@inheritDoc}
Map<ValueNode, TypeExpr> function() { Map<ValueNode, TypeExpr> returnMap = new HashMap<ValueNode, TypeExpr>(); NTupleValueNode currentTupleValue = (NTupleValueNode)getValueNode(); int tupleSize = currentTupleValue.getTupleSize(); TypeExpr unconstrainedTupleType = getContext().getLeastConstrainedTypeExpr(); if (unconstrainedTupleType.rootTypeVar() != null) { unconstrainedTupleType = TypeExpr.makeTupleType(tupleSize); } returnMap.put(currentTupleValue, unconstrainedTupleType); Map<FieldName, TypeExpr> hasFieldsMap = unconstrainedTupleType.rootRecordType().getHasFieldsMap(); int j = 0; for (final TypeExpr unconstrainedTupleElementType : hasFieldsMap.values()) { ValueNode currentTupleItem = currentTupleValue.getValueAt(j); returnMap.put(currentTupleItem, unconstrainedTupleElementType); ++j; } return returnMap; } /** * {@inheritDoc}
/** * Get a map from every value node managed by this editor to its least constrained type. * @return Map from every value node managed by this editor to its least constrained type. */
Get a map from every value node managed by this editor to its least constrained type
getValueNodeToUnconstrainedTypeMap
{ "repo_name": "levans/Open-Quark", "path": "src/Quark_Gems/src/org/openquark/gems/client/valueentry/TupleValueEditor.java", "license": "bsd-3-clause", "size": 14047 }
[ "java.util.HashMap", "java.util.Map", "org.openquark.cal.compiler.FieldName", "org.openquark.cal.compiler.TypeExpr", "org.openquark.cal.valuenode.NTupleValueNode", "org.openquark.cal.valuenode.ValueNode" ]
import java.util.HashMap; import java.util.Map; import org.openquark.cal.compiler.FieldName; import org.openquark.cal.compiler.TypeExpr; import org.openquark.cal.valuenode.NTupleValueNode; import org.openquark.cal.valuenode.ValueNode;
import java.util.*; import org.openquark.cal.compiler.*; import org.openquark.cal.valuenode.*;
[ "java.util", "org.openquark.cal" ]
java.util; org.openquark.cal;
1,239,113
public ServiceResponse<Void> putIntegerValid(Map<String, Integer> arrayBody) throws ErrorException, IOException, IllegalArgumentException { if (arrayBody == null) { throw new IllegalArgumentException("Parameter arrayBody is required and cannot be null."); } Validator.validate(arrayBody); Call<ResponseBody> call = service.putIntegerValid(arrayBody); return putIntegerValidDelegate(call.execute()); }
ServiceResponse<Void> function(Map<String, Integer> arrayBody) throws ErrorException, IOException, IllegalArgumentException { if (arrayBody == null) { throw new IllegalArgumentException(STR); } Validator.validate(arrayBody); Call<ResponseBody> call = service.putIntegerValid(arrayBody); return putIntegerValidDelegate(call.execute()); }
/** * Set dictionary value empty {"0": 1, "1": -1, "2": 3, "3": 300}. * * @param arrayBody the Map&lt;String, Integer&gt; value * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the {@link ServiceResponse} object if successful. */
Set dictionary value empty {"0": 1, "1": -1, "2": 3, "3": 300}
putIntegerValid
{ "repo_name": "John-Hart/autorest", "path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/bodydictionary/implementation/DictionarysImpl.java", "license": "mit", "size": 172079 }
[ "com.microsoft.rest.ServiceResponse", "com.microsoft.rest.Validator", "java.io.IOException", "java.util.Map" ]
import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator; import java.io.IOException; import java.util.Map;
import com.microsoft.rest.*; import java.io.*; import java.util.*;
[ "com.microsoft.rest", "java.io", "java.util" ]
com.microsoft.rest; java.io; java.util;
2,322,839
public static Throwable getError(Intent result) { return (Throwable) result.getSerializableExtra(Extra.ERROR); }
static Throwable function(Intent result) { return (Throwable) result.getSerializableExtra(Extra.ERROR); }
/** * Retrieve error that caused crop to fail * * @param result Result Intent * @return Throwable handled in CropImageActivity */
Retrieve error that caused crop to fail
getError
{ "repo_name": "mityung/XERUNG", "path": "Andriod/Xerung/Xerung/src/main/java/com/example/contactplusgroup/crop/Crop.java", "license": "apache-2.0", "size": 7548 }
[ "android.content.Intent" ]
import android.content.Intent;
import android.content.*;
[ "android.content" ]
android.content;
799,317
public void setRepositoryRoot(SVNUrl url);
void function(SVNUrl url);
/** * set the repository root url * @param url */
set the repository root url
setRepositoryRoot
{ "repo_name": "apicloudcom/APICloud-Studio", "path": "org.tigris.subversion.subclipse.core/src/org/tigris/subversion/subclipse/core/ISVNRepositoryLocation.java", "license": "gpl-3.0", "size": 4583 }
[ "org.tigris.subversion.svnclientadapter.SVNUrl" ]
import org.tigris.subversion.svnclientadapter.SVNUrl;
import org.tigris.subversion.svnclientadapter.*;
[ "org.tigris.subversion" ]
org.tigris.subversion;
2,911,962
public Image getImage() { return image; }
Image function() { return image; }
/** * Getter for property image. * * @return Value of property image. */
Getter for property image
getImage
{ "repo_name": "SafetyCulture/DroidText", "path": "app/src/main/java/com/lowagie/text/pdf/PdfPCell.java", "license": "lgpl-3.0", "size": 25767 }
[ "com.lowagie.text.Image" ]
import com.lowagie.text.Image;
import com.lowagie.text.*;
[ "com.lowagie.text" ]
com.lowagie.text;
2,604,855
@Override protected void rendezvousWithJms(final ProcessContext context, final ProcessSession processSession, final JMSConsumer consumer) throws ProcessException { final String destinationName = context.getProperty(DESTINATION).evaluateAttributeExpressions().getValue(); final String errorQueueName = context.getProperty(ERROR_QUEUE).evaluateAttributeExpressions().getValue(); final boolean durable = isDurableSubscriber(context); final boolean shared = isShared(context); final String subscriptionName = context.getProperty(SUBSCRIPTION_NAME).evaluateAttributeExpressions().getValue(); final String charset = context.getProperty(CHARSET).evaluateAttributeExpressions().getValue();
void function(final ProcessContext context, final ProcessSession processSession, final JMSConsumer consumer) throws ProcessException { final String destinationName = context.getProperty(DESTINATION).evaluateAttributeExpressions().getValue(); final String errorQueueName = context.getProperty(ERROR_QUEUE).evaluateAttributeExpressions().getValue(); final boolean durable = isDurableSubscriber(context); final boolean shared = isShared(context); final String subscriptionName = context.getProperty(SUBSCRIPTION_NAME).evaluateAttributeExpressions().getValue(); final String charset = context.getProperty(CHARSET).evaluateAttributeExpressions().getValue();
/** * Will construct a {@link FlowFile} containing the body of the consumed JMS * message (if {@link JMSResponse} returned by {@link JMSConsumer} is not * null) and JMS properties that came with message which are added to a * {@link FlowFile} as attributes, transferring {@link FlowFile} to * 'success' {@link Relationship}. */
Will construct a <code>FlowFile</code> containing the body of the consumed JMS message (if <code>JMSResponse</code> returned by <code>JMSConsumer</code> is not null) and JMS properties that came with message which are added to a <code>FlowFile</code> as attributes, transferring <code>FlowFile</code> to 'success' <code>Relationship</code>
rendezvousWithJms
{ "repo_name": "jtstorck/nifi", "path": "nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/ConsumeJMS.java", "license": "apache-2.0", "size": 18636 }
[ "org.apache.nifi.processor.ProcessContext", "org.apache.nifi.processor.ProcessSession", "org.apache.nifi.processor.exception.ProcessException" ]
import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.*; import org.apache.nifi.processor.exception.*;
[ "org.apache.nifi" ]
org.apache.nifi;
1,179,863