method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public DataNode setSet_voltageScalar(double set_voltage);
DataNode function(double set_voltage);
/** * volage set on supply. * <p> * <b>Type:</b> NX_FLOAT * <b>Units:</b> NX_VOLTAGE * </p> * * @param set_voltage the set_voltage */
volage set on supply. Type: NX_FLOAT Units: NX_VOLTAGE
setSet_voltageScalar
{ "repo_name": "colinpalmer/dawnsci", "path": "org.eclipse.dawnsci.nexus/autogen/org/eclipse/dawnsci/nexus/NXelectrostatic_kicker.java", "license": "epl-1.0", "size": 6013 }
[ "org.eclipse.dawnsci.analysis.api.tree.DataNode" ]
import org.eclipse.dawnsci.analysis.api.tree.DataNode;
import org.eclipse.dawnsci.analysis.api.tree.*;
[ "org.eclipse.dawnsci" ]
org.eclipse.dawnsci;
1,511,207
List<String> getTableNames() throws IOException;
List<String> getTableNames() throws IOException;
/** * Gets the list of Kiji table names. * * @return A list of the names of Kiji tables installed in the Kiji instance. * @throws IOException If there is an error. */
Gets the list of Kiji table names
getTableNames
{ "repo_name": "robotoer/ast-java", "path": "Kiji.java", "license": "apache-2.0", "size": 11430 }
[ "java.io.IOException", "java.util.List" ]
import java.io.IOException; import java.util.List;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
351,756
public char[] getData(int index) { // retrieve the length int l = Utils.twoChars2Int(theList.get(index), theList.get(index+1)); // now retrieve the characters for this data block char data[] = new char[l]; for(int i=0; i<l; i++) { data[i] = theList.get(index+2+i); } return data; } //// FIXED LENGTH DATA METHODS
char[] function(int index) { int l = Utils.twoChars2Int(theList.get(index), theList.get(index+1)); char data[] = new char[l]; for(int i=0; i<l; i++) { data[i] = theList.get(index+2+i); } return data; }
/** * Get variable length data from at the given index. * @param index * @return char array of data */
Get variable length data from at the given index
getData
{ "repo_name": "johann-petrak/gateplugin-StringAnnotation", "path": "src/com/jpetrak/gate/stringannotation/utils/StoreArrayOfCharArrays.java", "license": "lgpl-2.1", "size": 15071 }
[ "com.jpetrak.gate.stringannotation.extendedgazetteer.trie.Utils" ]
import com.jpetrak.gate.stringannotation.extendedgazetteer.trie.Utils;
import com.jpetrak.gate.stringannotation.extendedgazetteer.trie.*;
[ "com.jpetrak.gate" ]
com.jpetrak.gate;
1,275,860
@ReflectiveMethod(name = "d", types = {}) public boolean d(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); }
@ReflectiveMethod(name = "d", types = {}) boolean function(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); }
/** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.BlockFence#d() */
TODO Find correct name
d
{ "repo_name": "Vilsol/NMSWrapper", "path": "src/main/java/me/vilsol/nmswrapper/wraps/unparsed/NMSBlockFence.java", "license": "apache-2.0", "size": 4671 }
[ "me.vilsol.nmswrapper.NMSWrapper", "me.vilsol.nmswrapper.reflections.ReflectiveMethod" ]
import me.vilsol.nmswrapper.NMSWrapper; import me.vilsol.nmswrapper.reflections.ReflectiveMethod;
import me.vilsol.nmswrapper.*; import me.vilsol.nmswrapper.reflections.*;
[ "me.vilsol.nmswrapper" ]
me.vilsol.nmswrapper;
629,017
public SubResource backendHttpSettings() { return this.backendHttpSettings; }
SubResource function() { return this.backendHttpSettings; }
/** * Get the backendHttpSettings property: Backend http settings resource of the application gateway. * * @return the backendHttpSettings value. */
Get the backendHttpSettings property: Backend http settings resource of the application gateway
backendHttpSettings
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/fluent/models/ApplicationGatewayRequestRoutingRuleInner.java", "license": "mit", "size": 9804 }
[ "com.azure.core.management.SubResource" ]
import com.azure.core.management.SubResource;
import com.azure.core.management.*;
[ "com.azure.core" ]
com.azure.core;
665,627
public static ThingStatusInfoChangedEvent createStatusInfoChangedEvent(ThingUID thingUID, ThingStatusInfo thingStatusInfo, ThingStatusInfo oldThingStatusInfo) { Preconditions.checkArgument(thingUID != null, "The argument 'thingUID' must not be null."); Preconditions.checkArgument(thingStatusInfo != null, "The argument 'thingStatusInfo' must not be null."); Preconditions.checkArgument(oldThingStatusInfo != null, "The argument 'oldThingStatusInfo' must not be null."); String topic = buildTopic(THING_STATUS_INFO_CHANGED_EVENT_TOPIC, thingUID); String payload = serializePayload(new ThingStatusInfo[] { thingStatusInfo, oldThingStatusInfo }); return new ThingStatusInfoChangedEvent(topic, payload, thingUID, thingStatusInfo, oldThingStatusInfo); }
static ThingStatusInfoChangedEvent function(ThingUID thingUID, ThingStatusInfo thingStatusInfo, ThingStatusInfo oldThingStatusInfo) { Preconditions.checkArgument(thingUID != null, STR); Preconditions.checkArgument(thingStatusInfo != null, STR); Preconditions.checkArgument(oldThingStatusInfo != null, STR); String topic = buildTopic(THING_STATUS_INFO_CHANGED_EVENT_TOPIC, thingUID); String payload = serializePayload(new ThingStatusInfo[] { thingStatusInfo, oldThingStatusInfo }); return new ThingStatusInfoChangedEvent(topic, payload, thingUID, thingStatusInfo, oldThingStatusInfo); }
/** * Creates a new thing status info changed event based on a thing UID, a thing status info and the old thing status * info object. * * @param thingUID the thing UID * @param thingStatusInfo the thing status info object * @param oldThingStatusInfo the old thing status info object * * @return the created thing status info changed event * * @throws IllegalArgumentException if thingUID or thingStatusInfo is null */
Creates a new thing status info changed event based on a thing UID, a thing status info and the old thing status info object
createStatusInfoChangedEvent
{ "repo_name": "philomatic/smarthome", "path": "bundles/core/org.eclipse.smarthome.core.thing/src/main/java/org/eclipse/smarthome/core/thing/events/ThingEventFactory.java", "license": "epl-1.0", "size": 11854 }
[ "com.google.common.base.Preconditions", "org.eclipse.smarthome.core.thing.ThingStatusInfo", "org.eclipse.smarthome.core.thing.ThingUID" ]
import com.google.common.base.Preconditions; import org.eclipse.smarthome.core.thing.ThingStatusInfo; import org.eclipse.smarthome.core.thing.ThingUID;
import com.google.common.base.*; import org.eclipse.smarthome.core.thing.*;
[ "com.google.common", "org.eclipse.smarthome" ]
com.google.common; org.eclipse.smarthome;
2,461,346
public ServiceResponseWithHeaders<Void, LROSADsDeleteAsyncRelativeRetryInvalidHeaderHeaders> beginDeleteAsyncRelativeRetryInvalidHeader() throws CloudException, IOException { Call<ResponseBody> call = service.beginDeleteAsyncRelativeRetryInvalidHeader(this.client.getAcceptLanguage()); return beginDeleteAsyncRelativeRetryInvalidHeaderDelegate(call.execute()); }
ServiceResponseWithHeaders<Void, LROSADsDeleteAsyncRelativeRetryInvalidHeaderHeaders> function() throws CloudException, IOException { Call<ResponseBody> call = service.beginDeleteAsyncRelativeRetryInvalidHeader(this.client.getAcceptLanguage()); return beginDeleteAsyncRelativeRetryInvalidHeaderDelegate(call.execute()); }
/** * Long running delete request, service returns a 202 to the initial request. The endpoint indicated in the Azure-AsyncOperation header is invalid. * * @throws CloudException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the {@link ServiceResponseWithHeaders} object if successful. */
Long running delete request, service returns a 202 to the initial request. The endpoint indicated in the Azure-AsyncOperation header is invalid
beginDeleteAsyncRelativeRetryInvalidHeader
{ "repo_name": "stankovski/AutoRest", "path": "AutoRest/Generators/Java/Azure.Java.Tests/src/main/java/fixtures/lro/LROSADsOperationsImpl.java", "license": "mit", "size": 226665 }
[ "com.microsoft.azure.CloudException", "com.microsoft.rest.ServiceResponseWithHeaders", "java.io.IOException" ]
import com.microsoft.azure.CloudException; import com.microsoft.rest.ServiceResponseWithHeaders; import java.io.IOException;
import com.microsoft.azure.*; import com.microsoft.rest.*; import java.io.*;
[ "com.microsoft.azure", "com.microsoft.rest", "java.io" ]
com.microsoft.azure; com.microsoft.rest; java.io;
2,703,295
static public void drawBoxWireframe(GL2 gl2,Tuple3d bottom,Tuple3d top) { gl2.glDisable(GL2.GL_TEXTURE_2D); boolean lightWasOn = OpenGLHelper.disableLightingStart(gl2); double x0=bottom.x; double y0=bottom.y; double z0=bottom.z; double x1=top.x; double y1=top.y; double z1=top.z; gl2.glBegin(GL2.GL_LINE_LOOP); gl2.glNormal3f( 0, 0,-1); gl2.glVertex3d(x0,y1,z0); gl2.glVertex3d(x1,y1,z0); gl2.glVertex3d(x1,y0,z0); gl2.glVertex3d(x0,y0,z0); gl2.glEnd(); // bottom gl2.glBegin(GL2.GL_LINE_LOOP); gl2.glNormal3f( 0, 0, 1); gl2.glVertex3d(x1,y1,z1); gl2.glVertex3d(x0,y1,z1); gl2.glVertex3d(x0,y0,z1); gl2.glVertex3d(x1,y0,z1); gl2.glEnd(); // top gl2.glBegin(GL2.GL_LINE_LOOP); gl2.glNormal3f( 0, 1, 0); gl2.glVertex3d(x0,y1,z1); gl2.glVertex3d(x1,y1,z1); gl2.glVertex3d(x1,y1,z0); gl2.glVertex3d(x0,y1,z0); gl2.glEnd(); // side gl2.glBegin(GL2.GL_LINE_LOOP); gl2.glNormal3f( 0,-1, 0); gl2.glVertex3d(x1,y0,z1); gl2.glVertex3d(x0,y0,z1); gl2.glVertex3d(x0,y0,z0); gl2.glVertex3d(x1,y0,z0); gl2.glEnd(); gl2.glBegin(GL2.GL_LINE_LOOP); gl2.glNormal3f( 1, 0, 0); gl2.glVertex3d(x1,y1,z0); gl2.glVertex3d(x1,y1,z1); gl2.glVertex3d(x1,y0,z1); gl2.glVertex3d(x1,y0,z0); gl2.glEnd(); gl2.glBegin(GL2.GL_LINE_LOOP); gl2.glNormal3f(-1, 0, 0); gl2.glVertex3d(x0,y0,z1); gl2.glVertex3d(x0,y1,z1); gl2.glVertex3d(x0,y1,z0); gl2.glVertex3d(x0,y0,z0); gl2.glEnd(); OpenGLHelper.disableLightingEnd(gl2,lightWasOn); }
static void function(GL2 gl2,Tuple3d bottom,Tuple3d top) { gl2.glDisable(GL2.GL_TEXTURE_2D); boolean lightWasOn = OpenGLHelper.disableLightingStart(gl2); double x0=bottom.x; double y0=bottom.y; double z0=bottom.z; double x1=top.x; double y1=top.y; double z1=top.z; gl2.glBegin(GL2.GL_LINE_LOOP); gl2.glNormal3f( 0, 0,-1); gl2.glVertex3d(x0,y1,z0); gl2.glVertex3d(x1,y1,z0); gl2.glVertex3d(x1,y0,z0); gl2.glVertex3d(x0,y0,z0); gl2.glEnd(); gl2.glBegin(GL2.GL_LINE_LOOP); gl2.glNormal3f( 0, 0, 1); gl2.glVertex3d(x1,y1,z1); gl2.glVertex3d(x0,y1,z1); gl2.glVertex3d(x0,y0,z1); gl2.glVertex3d(x1,y0,z1); gl2.glEnd(); gl2.glBegin(GL2.GL_LINE_LOOP); gl2.glNormal3f( 0, 1, 0); gl2.glVertex3d(x0,y1,z1); gl2.glVertex3d(x1,y1,z1); gl2.glVertex3d(x1,y1,z0); gl2.glVertex3d(x0,y1,z0); gl2.glEnd(); gl2.glBegin(GL2.GL_LINE_LOOP); gl2.glNormal3f( 0,-1, 0); gl2.glVertex3d(x1,y0,z1); gl2.glVertex3d(x0,y0,z1); gl2.glVertex3d(x0,y0,z0); gl2.glVertex3d(x1,y0,z0); gl2.glEnd(); gl2.glBegin(GL2.GL_LINE_LOOP); gl2.glNormal3f( 1, 0, 0); gl2.glVertex3d(x1,y1,z0); gl2.glVertex3d(x1,y1,z1); gl2.glVertex3d(x1,y0,z1); gl2.glVertex3d(x1,y0,z0); gl2.glEnd(); gl2.glBegin(GL2.GL_LINE_LOOP); gl2.glNormal3f(-1, 0, 0); gl2.glVertex3d(x0,y0,z1); gl2.glVertex3d(x0,y1,z1); gl2.glVertex3d(x0,y1,z0); gl2.glVertex3d(x0,y0,z0); gl2.glEnd(); OpenGLHelper.disableLightingEnd(gl2,lightWasOn); }
/** * draw box based on two corners * @param gl2 * @param bottom minimum bounds * @param top maximum bounds */
draw box based on two corners
drawBoxWireframe
{ "repo_name": "i-make-robots/Evil-Overlord", "path": "src/main/java/com/marginallyclever/convenience/PrimitiveSolids.java", "license": "gpl-2.0", "size": 11759 }
[ "javax.vecmath.Tuple3d" ]
import javax.vecmath.Tuple3d;
import javax.vecmath.*;
[ "javax.vecmath" ]
javax.vecmath;
2,467,083
public synchronized NodeList getNodeList(String path) throws ProcessingException { NodeList result = null; path = this.createPath(path); String[] pathComponents = DOMUtil.buildPathArray(path); if (pathComponents == null) { result = this.xpathProcessor.selectNodeList(this.data, path); } else { result = DOMUtil.getNodeListFromPath(data, pathComponents); } // clone list if (result != null) { result = new NodeListImpl(result); } return result; }
synchronized NodeList function(String path) throws ProcessingException { NodeList result = null; path = this.createPath(path); String[] pathComponents = DOMUtil.buildPathArray(path); if (pathComponents == null) { result = this.xpathProcessor.selectNodeList(this.data, path); } else { result = DOMUtil.getNodeListFromPath(data, pathComponents); } if (result != null) { result = new NodeListImpl(result); } return result; }
/** * Get a copy all the nodes specified by the path. */
Get a copy all the nodes specified by the path
getNodeList
{ "repo_name": "apache/cocoon", "path": "blocks/cocoon-session-fw/cocoon-session-fw-impl/src/main/java/org/apache/cocoon/webapps/session/context/SimpleSessionContext.java", "license": "apache-2.0", "size": 15998 }
[ "org.apache.cocoon.ProcessingException", "org.apache.cocoon.xml.dom.DOMUtil", "org.apache.excalibur.xml.xpath.NodeListImpl", "org.w3c.dom.NodeList" ]
import org.apache.cocoon.ProcessingException; import org.apache.cocoon.xml.dom.DOMUtil; import org.apache.excalibur.xml.xpath.NodeListImpl; import org.w3c.dom.NodeList;
import org.apache.cocoon.*; import org.apache.cocoon.xml.dom.*; import org.apache.excalibur.xml.xpath.*; import org.w3c.dom.*;
[ "org.apache.cocoon", "org.apache.excalibur", "org.w3c.dom" ]
org.apache.cocoon; org.apache.excalibur; org.w3c.dom;
664,782
@SmallTest @Feature({"Preferences"}) @CommandLineFlags.Add(ChromeSwitches.USE_FAKE_DEVICE_FOR_MEDIA_STREAM) public void testMicBlocked() throws Exception { setEnableMic(false); // Test that the microphone permission doesn't get requested. loadUrl(TestHttpServerClient.getUrl("content/test/data/media/getusermedia.html")); runJavaScriptCodeInCurrentTab("getUserMediaAndStop({video: false, audio: true});"); // No infobars are expected. assertTrue(getInfoBars().isEmpty()); }
@Feature({STR}) @CommandLineFlags.Add(ChromeSwitches.USE_FAKE_DEVICE_FOR_MEDIA_STREAM) void function() throws Exception { setEnableMic(false); loadUrl(TestHttpServerClient.getUrl(STR)); runJavaScriptCodeInCurrentTab(STR); assertTrue(getInfoBars().isEmpty()); }
/** * Sets Allow Mic Enabled to be false and make sure it is set correctly. * @throws Exception */
Sets Allow Mic Enabled to be false and make sure it is set correctly
testMicBlocked
{ "repo_name": "lihui7115/ChromiumGStreamerBackend", "path": "chrome/android/javatests/src/org/chromium/chrome/browser/preferences/website/SiteSettingsPreferencesTest.java", "license": "bsd-3-clause", "size": 17765 }
[ "org.chromium.base.test.util.CommandLineFlags", "org.chromium.base.test.util.Feature", "org.chromium.chrome.browser.ChromeSwitches", "org.chromium.chrome.test.util.TestHttpServerClient" ]
import org.chromium.base.test.util.CommandLineFlags; import org.chromium.base.test.util.Feature; import org.chromium.chrome.browser.ChromeSwitches; import org.chromium.chrome.test.util.TestHttpServerClient;
import org.chromium.base.test.util.*; import org.chromium.chrome.browser.*; import org.chromium.chrome.test.util.*;
[ "org.chromium.base", "org.chromium.chrome" ]
org.chromium.base; org.chromium.chrome;
2,581,028
public final Object lookup(Name name) throws NamingException { return lookup(name.toString()); }
final Object function(Name name) throws NamingException { return lookup(name.toString()); }
/** * Retrieves the named object. If name is empty, returns a new instance * of this context (which represents the same naming context as this * context, but its environment may be modified independently and it may * be accessed concurrently). * * @param name the name of the object to look up * @return the object bound to name * @exception NamingException if a naming exception is encountered */
Retrieves the named object. If name is empty, returns a new instance of this context (which represents the same naming context as this context, but its environment may be modified independently and it may be accessed concurrently)
lookup
{ "repo_name": "plumer/codana", "path": "tomcat_files/7.0.0/BaseDirContext.java", "license": "mit", "size": 57376 }
[ "javax.naming.Name", "javax.naming.NamingException" ]
import javax.naming.Name; import javax.naming.NamingException;
import javax.naming.*;
[ "javax.naming" ]
javax.naming;
2,558,153
public void selectTable1Column(Index column) { table1Columns.add(column); }
void function(Index column) { table1Columns.add(column); }
/** * Select table1 column name. */
Select table1 column name
selectTable1Column
{ "repo_name": "skekre98/apex-mlhr", "path": "library/src/main/java/com/datatorrent/lib/streamquery/InnerJoinOperator.java", "license": "apache-2.0", "size": 5628 }
[ "com.datatorrent.lib.streamquery.index.Index" ]
import com.datatorrent.lib.streamquery.index.Index;
import com.datatorrent.lib.streamquery.index.*;
[ "com.datatorrent.lib" ]
com.datatorrent.lib;
776,979
protected SearchableModelProvider<T, ?> getDataProvider() { return dataProvider; }
SearchableModelProvider<T, ?> function() { return dataProvider; }
/** * Returns the model data provider. * @return The {@code SearchableModelProvider}. */
Returns the model data provider
getDataProvider
{ "repo_name": "jtux270/translate", "path": "ovirt/frontend/webadmin/modules/gwt-common/src/main/java/org/ovirt/engine/ui/common/widget/action/AbstractActionPanel.java", "license": "gpl-3.0", "size": 26238 }
[ "org.ovirt.engine.ui.common.uicommon.model.SearchableModelProvider" ]
import org.ovirt.engine.ui.common.uicommon.model.SearchableModelProvider;
import org.ovirt.engine.ui.common.uicommon.model.*;
[ "org.ovirt.engine" ]
org.ovirt.engine;
2,217,607
@Override public void releaseConnection() { Header header = getResponseHeader("content-type"); if (header != null) { String contentTypeHeader = header.getValue(); if (contentTypeHeader != null && contentTypeHeader.toLowerCase(Locale.ROOT).contains("text/event-stream")) { return; } } super.releaseConnection(); } /** * {@inheritDoc}
void function() { Header header = getResponseHeader(STR); if (header != null) { String contentTypeHeader = header.getValue(); if (contentTypeHeader != null && contentTypeHeader.toLowerCase(Locale.ROOT).contains(STR)) { return; } } super.releaseConnection(); } /** * {@inheritDoc}
/** * Avoid releasing connection on event stream that is used in Server-Sent * Events. */
Avoid releasing connection on event stream that is used in Server-Sent Events
releaseConnection
{ "repo_name": "zapbot/zaproxy", "path": "src/org/zaproxy/zap/ZapGetMethod.java", "license": "apache-2.0", "size": 7085 }
[ "java.util.Locale", "org.apache.commons.httpclient.Header" ]
import java.util.Locale; import org.apache.commons.httpclient.Header;
import java.util.*; import org.apache.commons.httpclient.*;
[ "java.util", "org.apache.commons" ]
java.util; org.apache.commons;
2,325,963
private void addOperationProvider(final Element providerDef) throws FileSystemException { final String classname = providerDef.getAttribute("class-name"); // Attach only to available schemas final String[] schemas = getSchemas(providerDef); for (final String schema : schemas) { if (hasProvider(schema)) { final FileOperationProvider operationProvider = (FileOperationProvider) createInstance(classname); addOperationProvider(schema, operationProvider); } } }
void function(final Element providerDef) throws FileSystemException { final String classname = providerDef.getAttribute(STR); final String[] schemas = getSchemas(providerDef); for (final String schema : schemas) { if (hasProvider(schema)) { final FileOperationProvider operationProvider = (FileOperationProvider) createInstance(classname); addOperationProvider(schema, operationProvider); } } }
/** * Adds a operationProvider from a operationProvider definition. */
Adds a operationProvider from a operationProvider definition
addOperationProvider
{ "repo_name": "distribuitech/datos", "path": "datos-vfs/src/main/java/com/datos/vfs/impl/StandardFileSystemManager.java", "license": "apache-2.0", "size": 16528 }
[ "com.datos.vfs.FileSystemException", "com.datos.vfs.operations.FileOperationProvider", "org.w3c.dom.Element" ]
import com.datos.vfs.FileSystemException; import com.datos.vfs.operations.FileOperationProvider; import org.w3c.dom.Element;
import com.datos.vfs.*; import com.datos.vfs.operations.*; import org.w3c.dom.*;
[ "com.datos.vfs", "org.w3c.dom" ]
com.datos.vfs; org.w3c.dom;
2,494,671
@ServiceMethod(returns = ReturnType.SINGLE) private Mono<NotificationHubResourceInner> createOrUpdateAsync( String resourceGroupName, String namespaceName, String notificationHubName, NotificationHubCreateOrUpdateParameters parameters) { return createOrUpdateWithResponseAsync(resourceGroupName, namespaceName, notificationHubName, parameters) .flatMap( (Response<NotificationHubResourceInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); }
@ServiceMethod(returns = ReturnType.SINGLE) Mono<NotificationHubResourceInner> function( String resourceGroupName, String namespaceName, String notificationHubName, NotificationHubCreateOrUpdateParameters parameters) { return createOrUpdateWithResponseAsync(resourceGroupName, namespaceName, notificationHubName, parameters) .flatMap( (Response<NotificationHubResourceInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); }
/** * Creates/Update a NotificationHub in a namespace. * * @param resourceGroupName The name of the resource group. * @param namespaceName The namespace name. * @param notificationHubName The notification hub name. * @param parameters Parameters supplied to the create/update a NotificationHub Resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return description of a NotificationHub Resource. */
Creates/Update a NotificationHub in a namespace
createOrUpdateAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/notificationhubs/azure-resourcemanager-notificationhubs/src/main/java/com/azure/resourcemanager/notificationhubs/implementation/NotificationHubsClientImpl.java", "license": "mit", "size": 154387 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.Response", "com.azure.resourcemanager.notificationhubs.fluent.models.NotificationHubResourceInner", "com.azure.resourcemanager.notificationhubs.models.NotificationHubCreateOrUpdateParameters" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.resourcemanager.notificationhubs.fluent.models.NotificationHubResourceInner; import com.azure.resourcemanager.notificationhubs.models.NotificationHubCreateOrUpdateParameters;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.notificationhubs.fluent.models.*; import com.azure.resourcemanager.notificationhubs.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
2,251,472
public String toString(final String indent) { final Map<SourceNode<?, ?, ?, ?>, List<String>> sourceToTopics = new HashMap<>(); for (final Map.Entry<String, SourceNode<?, ?, ?, ?>> sourceNodeEntry : sourceNodesByTopic.entrySet()) { final String topic = sourceNodeEntry.getKey(); final SourceNode<?, ?, ?, ?> source = sourceNodeEntry.getValue(); sourceToTopics.computeIfAbsent(source, s -> new ArrayList<>()); sourceToTopics.get(source).add(topic); } final StringBuilder sb = new StringBuilder(indent + "ProcessorTopology:\n"); // start from sources for (final Map.Entry<SourceNode<?, ?, ?, ?>, List<String>> sourceNodeEntry : sourceToTopics.entrySet()) { final SourceNode<?, ?, ?, ?> source = sourceNodeEntry.getKey(); final List<String> topics = sourceNodeEntry.getValue(); sb.append(source.toString(indent + "\t")) .append(topicsToString(indent + "\t", topics)) .append(childrenToString(indent + "\t", source.children())); } return sb.toString(); }
String function(final String indent) { final Map<SourceNode<?, ?, ?, ?>, List<String>> sourceToTopics = new HashMap<>(); for (final Map.Entry<String, SourceNode<?, ?, ?, ?>> sourceNodeEntry : sourceNodesByTopic.entrySet()) { final String topic = sourceNodeEntry.getKey(); final SourceNode<?, ?, ?, ?> source = sourceNodeEntry.getValue(); sourceToTopics.computeIfAbsent(source, s -> new ArrayList<>()); sourceToTopics.get(source).add(topic); } final StringBuilder sb = new StringBuilder(indent + STR); for (final Map.Entry<SourceNode<?, ?, ?, ?>, List<String>> sourceNodeEntry : sourceToTopics.entrySet()) { final SourceNode<?, ?, ?, ?> source = sourceNodeEntry.getKey(); final List<String> topics = sourceNodeEntry.getValue(); sb.append(source.toString(indent + "\t")) .append(topicsToString(indent + "\t", topics)) .append(childrenToString(indent + "\t", source.children())); } return sb.toString(); }
/** * Produces a string representation containing useful information this topology. * This is useful in debugging scenarios. * @return A string representation of this instance. */
Produces a string representation containing useful information this topology. This is useful in debugging scenarios
toString
{ "repo_name": "Chasego/kafka", "path": "streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorTopology.java", "license": "apache-2.0", "size": 10289 }
[ "java.util.ArrayList", "java.util.HashMap", "java.util.List", "java.util.Map" ]
import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,573,277
public Adapter createUserDefinedAdapter() { return null; }
Adapter function() { return null; }
/** * Creates a new adapter for an object of class '{@link iso20022.UserDefined <em>User Defined</em>}'. * <!-- begin-user-doc --> * This default implementation returns null so that we can easily ignore cases; * it's useful to ignore a case when inheritance will catch all the cases anyway. * <!-- end-user-doc --> * @return the new adapter. * @see iso20022.UserDefined * @generated */
Creates a new adapter for an object of class '<code>iso20022.UserDefined User Defined</code>'. This default implementation returns null so that we can easily ignore cases; it's useful to ignore a case when inheritance will catch all the cases anyway.
createUserDefinedAdapter
{ "repo_name": "ISO20022ArchitectForum/sample-code-public", "path": "DLT/Corda/ISO20022Generated/src/iso20022/util/Iso20022AdapterFactory.java", "license": "apache-2.0", "size": 55827 }
[ "org.eclipse.emf.common.notify.Adapter" ]
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.notify.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
781,749
public Vector transformNonZeros( final UnivariateScalarFunction function);
Vector function( final UnivariateScalarFunction function);
/** * Applies the given function to each of the non-zero elements of this * vector and returns a new vector with the result. * * @param function * The function from double to double to apply to the non-zero * elements. * @return * A new vector whose elements represent the result of applying the * function to the corresponding element in this vector, except for * zeros, which are exactly the same as in this vector. */
Applies the given function to each of the non-zero elements of this vector and returns a new vector with the result
transformNonZeros
{ "repo_name": "codeaudit/Foundry", "path": "Components/CommonCore/Source/gov/sandia/cognition/math/matrix/Vector.java", "license": "bsd-3-clause", "size": 13700 }
[ "gov.sandia.cognition.math.UnivariateScalarFunction" ]
import gov.sandia.cognition.math.UnivariateScalarFunction;
import gov.sandia.cognition.math.*;
[ "gov.sandia.cognition" ]
gov.sandia.cognition;
564,937
public INode getINode(String src) throws UnresolvedLinkException { return getLastINodeInPath(src).getINode(0); }
INode function(String src) throws UnresolvedLinkException { return getLastINodeInPath(src).getINode(0); }
/** * Get {@link INode} associated with the file / directory. */
Get <code>INode</code> associated with the file / directory
getINode
{ "repo_name": "yncxcw/Yarn-SBlock", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java", "license": "apache-2.0", "size": 120183 }
[ "org.apache.hadoop.fs.UnresolvedLinkException" ]
import org.apache.hadoop.fs.UnresolvedLinkException;
import org.apache.hadoop.fs.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
136,554
public void setSocket(Socket socket) { this.socket = socket; }
void function(Socket socket) { this.socket = socket; }
/** * set the socket * @param socket */
set the socket
setSocket
{ "repo_name": "michel57/OnlineTicTacToe", "path": "Server/src/outils/Joueur.java", "license": "gpl-2.0", "size": 1917 }
[ "java.net.Socket" ]
import java.net.Socket;
import java.net.*;
[ "java.net" ]
java.net;
1,469,967
EReference getTrace_LastCommand();
EReference getTrace_LastCommand();
/** * Returns the meta object for the reference '{@link eu.mondo.collaboration.operationtracemodel.Trace#getLastCommand <em>Last Command</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the reference '<em>Last Command</em>'. * @see eu.mondo.collaboration.operationtracemodel.Trace#getLastCommand() * @see #getTrace() * @generated */
Returns the meta object for the reference '<code>eu.mondo.collaboration.operationtracemodel.Trace#getLastCommand Last Command</code>'.
getTrace_LastCommand
{ "repo_name": "FTSRG/mondo-collab-framework", "path": "archive/workspaceTracker/VA/traceModel/src/eu/mondo/collaboration/operationtracemodel/OperationtracemodelPackage.java", "license": "epl-1.0", "size": 59018 }
[ "org.eclipse.emf.ecore.EReference" ]
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
2,712,427
private Collection<String> parse( final File p_grammar, final Set<String> p_docuclean, final Map<String, File> p_imports, final ITemplate p_template ) throws IOException { // lexing and parsing the input grammar file final CASTVisitorAntLR l_visitor = new CASTVisitorAntLR( p_template, p_docuclean ); l_visitor.visit( new ANTLRv4Parser( new CommonTokenStream( new ANTLRv4Lexer( new ANTLRInputStream( new FileInputStream( p_grammar ) ) ) ) ).grammarSpec() ); return l_visitor.getGrammarImports().stream() .map( i -> p_imports.get( i.get() ) ) .filter( Objects::nonNull ) .flatMap( i -> { try { return this.parse( i, p_docuclean, p_imports, p_template ).stream(); } catch ( final IOException l_exception ) { return Stream.of( l_exception.getMessage() ); } } ).collect( Collectors.toList() ); }
Collection<String> function( final File p_grammar, final Set<String> p_docuclean, final Map<String, File> p_imports, final ITemplate p_template ) throws IOException { final CASTVisitorAntLR l_visitor = new CASTVisitorAntLR( p_template, p_docuclean ); l_visitor.visit( new ANTLRv4Parser( new CommonTokenStream( new ANTLRv4Lexer( new ANTLRInputStream( new FileInputStream( p_grammar ) ) ) ) ).grammarSpec() ); return l_visitor.getGrammarImports().stream() .map( i -> p_imports.get( i.get() ) ) .filter( Objects::nonNull ) .flatMap( i -> { try { return this.parse( i, p_docuclean, p_imports, p_template ).stream(); } catch ( final IOException l_exception ) { return Stream.of( l_exception.getMessage() ); } } ).collect( Collectors.toList() ); }
/** * runs parsing process with recursive descent of a grammar file * * @param p_grammar grammar file * @param p_docuclean set with documentation clean regex * @param p_imports map with grammar imported grammar files * @param p_template template which will be passend * @return colleciton with error messages * * @throws IOException thrown on IO errors */
runs parsing process with recursive descent of a grammar file
parse
{ "repo_name": "flashpixx/RRD-ANTLR4", "path": "src/main/java/de/flashpixx/rrd_antlr4/engine/CEngine.java", "license": "lgpl-3.0", "size": 6319 }
[ "de.flashpixx.rrd_antlr4.antlr.ANTLRv4Lexer", "de.flashpixx.rrd_antlr4.antlr.ANTLRv4Parser", "de.flashpixx.rrd_antlr4.antlr.CASTVisitorAntLR", "de.flashpixx.rrd_antlr4.engine.template.ITemplate", "java.io.File", "java.io.FileInputStream", "java.io.IOException", "java.util.Collection", "java.util.Map", "java.util.Objects", "java.util.Set", "java.util.stream.Collectors", "java.util.stream.Stream", "org.antlr.v4.runtime.ANTLRInputStream", "org.antlr.v4.runtime.CommonTokenStream" ]
import de.flashpixx.rrd_antlr4.antlr.ANTLRv4Lexer; import de.flashpixx.rrd_antlr4.antlr.ANTLRv4Parser; import de.flashpixx.rrd_antlr4.antlr.CASTVisitorAntLR; import de.flashpixx.rrd_antlr4.engine.template.ITemplate; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.CommonTokenStream;
import de.flashpixx.rrd_antlr4.antlr.*; import de.flashpixx.rrd_antlr4.engine.template.*; import java.io.*; import java.util.*; import java.util.stream.*; import org.antlr.v4.runtime.*;
[ "de.flashpixx.rrd_antlr4", "java.io", "java.util", "org.antlr.v4" ]
de.flashpixx.rrd_antlr4; java.io; java.util; org.antlr.v4;
2,437,077
public void closeChatRoom() { // Specify the end time. super.closeChatRoom(); // Remove Listener SparkManager.getConnection().removePacketListener(this); ChatContainer container = SparkManager.getChatManager() .getChatContainer(); container.leaveChatRoom(this); container.closeTab(this); }
void function() { super.closeChatRoom(); SparkManager.getConnection().removePacketListener(this); ChatContainer container = SparkManager.getChatManager() .getChatContainer(); container.leaveChatRoom(this); container.closeTab(this); }
/** * Have the user leave this chat room and then close it. */
Have the user leave this chat room and then close it
closeChatRoom
{ "repo_name": "joshuairl/toothchat-client", "path": "src/java/org/jivesoftware/spark/ui/rooms/GroupChatRoom.java", "license": "apache-2.0", "size": 42834 }
[ "org.jivesoftware.spark.SparkManager", "org.jivesoftware.spark.ui.ChatContainer" ]
import org.jivesoftware.spark.SparkManager; import org.jivesoftware.spark.ui.ChatContainer;
import org.jivesoftware.spark.*; import org.jivesoftware.spark.ui.*;
[ "org.jivesoftware.spark" ]
org.jivesoftware.spark;
2,622,976
public ServiceCall<List<ServerFirewallRuleInner>> listFirewallRulesAsync(String resourceGroupName, String serverName, final ServiceCallback<List<ServerFirewallRuleInner>> serviceCallback) { return ServiceCall.fromResponse(listFirewallRulesWithServiceResponseAsync(resourceGroupName, serverName), serviceCallback); }
ServiceCall<List<ServerFirewallRuleInner>> function(String resourceGroupName, String serverName, final ServiceCallback<List<ServerFirewallRuleInner>> serviceCallback) { return ServiceCall.fromResponse(listFirewallRulesWithServiceResponseAsync(resourceGroupName, serverName), serviceCallback); }
/** * Returns a list of Azure SQL server firewall rules. * * @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. * @param serverName The name of the Azure SQL server. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link ServiceCall} object */
Returns a list of Azure SQL server firewall rules
listFirewallRulesAsync
{ "repo_name": "pomortaz/azure-sdk-for-java", "path": "azure-mgmt-sql/src/main/java/com/microsoft/azure/management/sql/implementation/ServersInner.java", "license": "mit", "size": 66442 }
[ "com.microsoft.rest.ServiceCall", "com.microsoft.rest.ServiceCallback", "java.util.List" ]
import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback; import java.util.List;
import com.microsoft.rest.*; import java.util.*;
[ "com.microsoft.rest", "java.util" ]
com.microsoft.rest; java.util;
1,798,559
public List<SQLStatement> getSQLStatements( Repository repository, IMetaStore metaStore, ProgressMonitorListener monitor ) throws KettleException { if ( monitor != null ) { monitor .beginTask( BaseMessages.getString( PKG, "JobMeta.Monitor.GettingSQLNeededForThisJob" ), nrJobEntries() + 1 ); } List<SQLStatement> stats = new ArrayList<SQLStatement>(); for ( int i = 0; i < nrJobEntries(); i++ ) { JobEntryCopy copy = getJobEntry( i ); if ( monitor != null ) { monitor.subTask( BaseMessages.getString( PKG, "JobMeta.Monitor.GettingSQLForJobEntryCopy" ) + copy + "]" ); } stats.addAll( copy.getEntry().getSQLStatements( repository, metaStore, this ) ); stats.addAll( compatibleGetEntrySQLStatements( copy.getEntry(), repository ) ); stats.addAll( compatibleGetEntrySQLStatements( copy.getEntry(), repository, this ) ); if ( monitor != null ) { monitor.worked( 1 ); } } // Also check the sql for the logtable... if ( monitor != null ) { monitor.subTask( BaseMessages.getString( PKG, "JobMeta.Monitor.GettingSQLStatementsForJobLogTables" ) ); } if ( jobLogTable.getDatabaseMeta() != null && !Utils.isEmpty( jobLogTable.getTableName() ) ) { Database db = new Database( this, jobLogTable.getDatabaseMeta() ); try { db.connect(); RowMetaInterface fields = jobLogTable.getLogRecord( LogStatus.START, null, null ).getRowMeta(); String sql = db.getDDL( jobLogTable.getTableName(), fields ); if ( sql != null && sql.length() > 0 ) { SQLStatement stat = new SQLStatement( BaseMessages.getString( PKG, "JobMeta.SQLFeedback.ThisJob" ), jobLogTable.getDatabaseMeta(), sql ); stats.add( stat ); } } catch ( KettleDatabaseException dbe ) { SQLStatement stat = new SQLStatement( BaseMessages.getString( PKG, "JobMeta.SQLFeedback.ThisJob" ), jobLogTable.getDatabaseMeta(), null ); stat.setError( BaseMessages.getString( PKG, "JobMeta.SQLFeedback.ErrorObtainingJobLogTableInfo" ) + dbe.getMessage() ); stats.add( stat ); } finally { db.disconnect(); } } if ( monitor != null ) { monitor.worked( 1 ); } if ( monitor != null ) { monitor.done(); } return stats; }
List<SQLStatement> function( Repository repository, IMetaStore metaStore, ProgressMonitorListener monitor ) throws KettleException { if ( monitor != null ) { monitor .beginTask( BaseMessages.getString( PKG, STR ), nrJobEntries() + 1 ); } List<SQLStatement> stats = new ArrayList<SQLStatement>(); for ( int i = 0; i < nrJobEntries(); i++ ) { JobEntryCopy copy = getJobEntry( i ); if ( monitor != null ) { monitor.subTask( BaseMessages.getString( PKG, STR ) + copy + "]" ); } stats.addAll( copy.getEntry().getSQLStatements( repository, metaStore, this ) ); stats.addAll( compatibleGetEntrySQLStatements( copy.getEntry(), repository ) ); stats.addAll( compatibleGetEntrySQLStatements( copy.getEntry(), repository, this ) ); if ( monitor != null ) { monitor.worked( 1 ); } } if ( monitor != null ) { monitor.subTask( BaseMessages.getString( PKG, STR ) ); } if ( jobLogTable.getDatabaseMeta() != null && !Utils.isEmpty( jobLogTable.getTableName() ) ) { Database db = new Database( this, jobLogTable.getDatabaseMeta() ); try { db.connect(); RowMetaInterface fields = jobLogTable.getLogRecord( LogStatus.START, null, null ).getRowMeta(); String sql = db.getDDL( jobLogTable.getTableName(), fields ); if ( sql != null && sql.length() > 0 ) { SQLStatement stat = new SQLStatement( BaseMessages.getString( PKG, STR ), jobLogTable.getDatabaseMeta(), sql ); stats.add( stat ); } } catch ( KettleDatabaseException dbe ) { SQLStatement stat = new SQLStatement( BaseMessages.getString( PKG, STR ), jobLogTable.getDatabaseMeta(), null ); stat.setError( BaseMessages.getString( PKG, STR ) + dbe.getMessage() ); stats.add( stat ); } finally { db.disconnect(); } } if ( monitor != null ) { monitor.worked( 1 ); } if ( monitor != null ) { monitor.done(); } return stats; }
/** * Builds a list of all the SQL statements that this transformation needs in order to work properly. * * @return An ArrayList of SQLStatement objects. */
Builds a list of all the SQL statements that this transformation needs in order to work properly
getSQLStatements
{ "repo_name": "TatsianaKasiankova/pentaho-kettle", "path": "engine/src/main/java/org/pentaho/di/job/JobMeta.java", "license": "apache-2.0", "size": 87172 }
[ "java.util.ArrayList", "java.util.List", "org.pentaho.di.core.ProgressMonitorListener", "org.pentaho.di.core.SQLStatement", "org.pentaho.di.core.database.Database", "org.pentaho.di.core.exception.KettleDatabaseException", "org.pentaho.di.core.exception.KettleException", "org.pentaho.di.core.logging.LogStatus", "org.pentaho.di.core.row.RowMetaInterface", "org.pentaho.di.core.util.Utils", "org.pentaho.di.i18n.BaseMessages", "org.pentaho.di.job.entry.JobEntryCopy", "org.pentaho.di.repository.Repository", "org.pentaho.metastore.api.IMetaStore" ]
import java.util.ArrayList; import java.util.List; import org.pentaho.di.core.ProgressMonitorListener; import org.pentaho.di.core.SQLStatement; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.LogStatus; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.util.Utils; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.entry.JobEntryCopy; import org.pentaho.di.repository.Repository; import org.pentaho.metastore.api.IMetaStore;
import java.util.*; import org.pentaho.di.core.*; import org.pentaho.di.core.database.*; import org.pentaho.di.core.exception.*; import org.pentaho.di.core.logging.*; import org.pentaho.di.core.row.*; import org.pentaho.di.core.util.*; import org.pentaho.di.i18n.*; import org.pentaho.di.job.entry.*; import org.pentaho.di.repository.*; import org.pentaho.metastore.api.*;
[ "java.util", "org.pentaho.di", "org.pentaho.metastore" ]
java.util; org.pentaho.di; org.pentaho.metastore;
901,119
public void setAttribute(ObjectName name, Attribute attribute) throws InstanceNotFoundException, AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException;
void function(ObjectName name, Attribute attribute) throws InstanceNotFoundException, AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException;
/** * Sets the value of a specific attribute of a named MBean. The MBean * is identified by its object name. * * @param name The name of the MBean within which the attribute is * to be set. * @param attribute The identification of the attribute to be set * and the value it is to be set to. * * @exception InstanceNotFoundException The MBean specified is not * registered in the MBean server. * @exception AttributeNotFoundException The attribute specified * is not accessible in the MBean. * @exception InvalidAttributeValueException The value specified * for the attribute is not valid. * @exception MBeanException Wraps an exception thrown by the * MBean's setter. * @exception ReflectionException Wraps a * <CODE>java.lang.Exception</CODE> thrown when trying to invoke * the setter. * @exception RuntimeOperationsException Wraps a * <CODE>java.lang.IllegalArgumentException</CODE>: The object * name in parameter is null or the attribute in parameter is * null. */
Sets the value of a specific attribute of a named MBean. The MBean is identified by its object name
setAttribute
{ "repo_name": "andreagenso/java2scala", "path": "test/J2s/java/openjdk-6-src-b27/jdk/src/share/classes/com/sun/jmx/interceptor/MBeanServerInterceptor.java", "license": "apache-2.0", "size": 31366 }
[ "javax.management.Attribute", "javax.management.AttributeNotFoundException", "javax.management.InstanceNotFoundException", "javax.management.InvalidAttributeValueException", "javax.management.MBeanException", "javax.management.ObjectName", "javax.management.ReflectionException" ]
import javax.management.Attribute; import javax.management.AttributeNotFoundException; import javax.management.InstanceNotFoundException; import javax.management.InvalidAttributeValueException; import javax.management.MBeanException; import javax.management.ObjectName; import javax.management.ReflectionException;
import javax.management.*;
[ "javax.management" ]
javax.management;
1,873,123
public void processUnsubscribe(String key, HttpServletRequest req, HttpServletResponse res) throws IOException { String timeoutHdr = req.getHeader("TIMEOUT"); String callbackHdr = req.getHeader("CALLBACK"); String sidHdr = req.getHeader("SID"); // Perform error checking: // 1. Method must be UNSUBSCRIBE // 2. SID header must be present // 3. NT and CALLBACK headers not present if (!"UNSUBSCRIBE".equalsIgnoreCase(req.getMethod())) { // Return 405 status res.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "Method " + req.getMethod() + " not allowed for this resource."); return; } if (sidHdr == null || sidHdr.length() == 0) { res.sendError(HttpServletResponse.SC_PRECONDITION_FAILED, "SID header field is missing or empty."); } if (timeoutHdr != null || callbackHdr != null) { res.sendError(HttpServletResponse.SC_BAD_REQUEST, "An SID header field and one of NT or CALLBACK header fields are present."); return; } Map<UUID, SubscriptionInfo> m = subscriptions.get(key); if (m == null) { res.sendError(HttpServletResponse.SC_PRECONDITION_FAILED, "SID doesn't correspond to a known subscription."); return; } // parse SID & remove subscription String ss = sidHdr.substring(5).trim(); UUID sid = new UUID(ss); if (m.remove(sid) == null) { res.sendError(HttpServletResponse.SC_PRECONDITION_FAILED, "SID doesn't correspond to a known subscription."); return; } }
void function(String key, HttpServletRequest req, HttpServletResponse res) throws IOException { String timeoutHdr = req.getHeader(STR); String callbackHdr = req.getHeader(STR); String sidHdr = req.getHeader("SID"); if (!STR.equalsIgnoreCase(req.getMethod())) { res.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, STR + req.getMethod() + STR); return; } if (sidHdr == null sidHdr.length() == 0) { res.sendError(HttpServletResponse.SC_PRECONDITION_FAILED, STR); } if (timeoutHdr != null callbackHdr != null) { res.sendError(HttpServletResponse.SC_BAD_REQUEST, STR); return; } Map<UUID, SubscriptionInfo> m = subscriptions.get(key); if (m == null) { res.sendError(HttpServletResponse.SC_PRECONDITION_FAILED, STR); return; } String ss = sidHdr.substring(5).trim(); UUID sid = new UUID(ss); if (m.remove(sid) == null) { res.sendError(HttpServletResponse.SC_PRECONDITION_FAILED, STR); return; } }
/** * Processes a UPnP UNSUBSCRIBE request and removes a subscription. * * @param key * The key identifies the resource to which the subscription applies. * @param req * HTTP request * @param res * Response to the request * @throws IOException * Thrown by HttpServletResponse.sendError if an error occurs writing the response. */
Processes a UPnP UNSUBSCRIBE request and removes a subscription
processUnsubscribe
{ "repo_name": "gsteckman/rpi-rest", "path": "src/main/java/io/github/gsteckman/rpi_rest/SubscriptionManager.java", "license": "apache-2.0", "size": 15459 }
[ "java.io.IOException", "java.util.Map", "javax.servlet.http.HttpServletRequest", "javax.servlet.http.HttpServletResponse" ]
import java.io.IOException; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse;
import java.io.*; import java.util.*; import javax.servlet.http.*;
[ "java.io", "java.util", "javax.servlet" ]
java.io; java.util; javax.servlet;
2,532,154
public LayoutLibrary getLayoutLibrary() { return getReadyLayoutLib(false ); }
LayoutLibrary function() { return getReadyLayoutLib(false ); }
/** * Returns the {@link LayoutLibrary} associated with this editor, if it has * been initialized already. May return null if it has not been initialized (or has * not finished initializing). * * @return The {@link LayoutLibrary}, or null */
Returns the <code>LayoutLibrary</code> associated with this editor, if it has been initialized already. May return null if it has not been initialized (or has not finished initializing)
getLayoutLibrary
{ "repo_name": "rex-xxx/mt6572_x201", "path": "sdk/eclipse/plugins/com.android.ide.eclipse.adt/src/com/android/ide/eclipse/adt/internal/editors/layout/gle2/GraphicalEditorPart.java", "license": "gpl-2.0", "size": 114055 }
[ "com.android.ide.common.rendering.LayoutLibrary" ]
import com.android.ide.common.rendering.LayoutLibrary;
import com.android.ide.common.rendering.*;
[ "com.android.ide" ]
com.android.ide;
2,273,022
public ColourNames getBackgroundColor$() { return backgroundColor$; }
public ColourNames getBackgroundColor$() { return backgroundColor$; }
/** * Specifies the background color of an element * * @return */
Specifies the background color of an element
getBackgroundColor$
{ "repo_name": "GedMarc/JWebSwing", "path": "src/main/java/com/jwebmp/core/htmlbuilder/css/backgrounds/BackgroundCSSImpl.java", "license": "gpl-3.0", "size": 8177 }
[ "com.jwebmp.core.htmlbuilder.css.colours.ColourNames" ]
import com.jwebmp.core.htmlbuilder.css.colours.ColourNames;
import com.jwebmp.core.htmlbuilder.css.colours.*;
[ "com.jwebmp.core" ]
com.jwebmp.core;
2,885,191
public ClassicAdministratorListResult withValue(List<ClassicAdministratorInner> value) { this.value = value; return this; }
ClassicAdministratorListResult function(List<ClassicAdministratorInner> value) { this.value = value; return this; }
/** * Set the value property: An array of administrators. * * @param value the value value to set. * @return the ClassicAdministratorListResult object itself. */
Set the value property: An array of administrators
withValue
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanagerhybrid/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/models/ClassicAdministratorListResult.java", "license": "mit", "size": 2393 }
[ "com.azure.resourcemanager.authorization.fluent.models.ClassicAdministratorInner", "java.util.List" ]
import com.azure.resourcemanager.authorization.fluent.models.ClassicAdministratorInner; import java.util.List;
import com.azure.resourcemanager.authorization.fluent.models.*; import java.util.*;
[ "com.azure.resourcemanager", "java.util" ]
com.azure.resourcemanager; java.util;
2,360,194
public void setSpinningBarLength(@FloatRange(from = 0.0) float barLength) { this.mSpinningBarLengthCurrent = mSpinningBarLengthOrig = barLength; }
void function(@FloatRange(from = 0.0) float barLength) { this.mSpinningBarLengthCurrent = mSpinningBarLengthOrig = barLength; }
/** * Length of spinning bar in degree. * * @param barLength length in degree */
Length of spinning bar in degree
setSpinningBarLength
{ "repo_name": "captainbupt/Circle-Progress-View", "path": "CircleProgressView/src/main/java/at/grabner/circleprogress/CircleProgressView.java", "license": "mit", "size": 52578 }
[ "android.support.annotation.FloatRange" ]
import android.support.annotation.FloatRange;
import android.support.annotation.*;
[ "android.support" ]
android.support;
1,427,112
@Nullable LockClient tryLock();
LockClient tryLock();
/** * Acquires the lock only if it is free at the time of invocation. This * method does not block. * * @return {@code null} if the lock was acquired, or one of the current * holders of the lock if it could not be acquired immediately. */
Acquires the lock only if it is free at the time of invocation. This method does not block
tryLock
{ "repo_name": "EvilMcJerkface/atlasdb", "path": "lock-impl/src/main/java/com/palantir/lock/impl/KnownClientLock.java", "license": "apache-2.0", "size": 3767 }
[ "com.palantir.lock.LockClient" ]
import com.palantir.lock.LockClient;
import com.palantir.lock.*;
[ "com.palantir.lock" ]
com.palantir.lock;
1,090,197
@PutMapping(REQUEST_CODE_ENDPOINT) public ResponseEntity saveRequest(@PathVariable final String projectCode, @PathVariable final String groupCode, @PathVariable final String requestCode, @Valid @RequestBody final AbstractRequest request, final BindingResult result) { if (result.hasErrors()) { throw new IllegalArgumentException(result.getFieldError().getDefaultMessage()); } findProjectByCode(projectCode); findGroupByCode(groupCode); // If mock is found that means that id and unique code were not changed final AbstractRequest existingRequest = findRequestByCode(requestCode); if (!existingRequest.getId().equals(request.getId())) { throw new IllegalArgumentException("MockRequest IDs in the URL and in the payload are not equals"); } // Creation date can't be changed by user request.setCreationDate(existingRequest.getCreationDate()); request.setLastUpdateDate(new Date()); // Set latency request.setLatency(cleanUpObject(request.getLatency())); final Headers requestHeaders = request.getHeaders(); // Change headers to lowercase request.setHeaders(processHeaders(requestHeaders)); return okResponseWithDefaultHeaders( processBodyAndSave(request, isURLEncodedForm(requestHeaders))); }
@PutMapping(REQUEST_CODE_ENDPOINT) ResponseEntity function(@PathVariable final String projectCode, @PathVariable final String groupCode, @PathVariable final String requestCode, @Valid @RequestBody final AbstractRequest request, final BindingResult result) { if (result.hasErrors()) { throw new IllegalArgumentException(result.getFieldError().getDefaultMessage()); } findProjectByCode(projectCode); findGroupByCode(groupCode); final AbstractRequest existingRequest = findRequestByCode(requestCode); if (!existingRequest.getId().equals(request.getId())) { throw new IllegalArgumentException(STR); } request.setCreationDate(existingRequest.getCreationDate()); request.setLastUpdateDate(new Date()); request.setLatency(cleanUpObject(request.getLatency())); final Headers requestHeaders = request.getHeaders(); request.setHeaders(processHeaders(requestHeaders)); return okResponseWithDefaultHeaders( processBodyAndSave(request, isURLEncodedForm(requestHeaders))); }
/** * Updates existing mock-request * * @param projectCode * @param groupCode * @param requestCode * @param request * @return */
Updates existing mock-request
saveRequest
{ "repo_name": "dryazanov/mockenger", "path": "core/src/main/java/org/mockenger/core/web/controller/endpoint/RequestController.java", "license": "gpl-2.0", "size": 7402 }
[ "java.util.Date", "javax.validation.Valid", "org.mockenger.core.util.MockRequestUtils", "org.mockenger.data.model.persistent.mock.request.AbstractRequest", "org.mockenger.data.model.persistent.mock.request.part.Headers", "org.springframework.http.ResponseEntity", "org.springframework.validation.BindingResult", "org.springframework.web.bind.annotation.PathVariable", "org.springframework.web.bind.annotation.PutMapping", "org.springframework.web.bind.annotation.RequestBody" ]
import java.util.Date; import javax.validation.Valid; import org.mockenger.core.util.MockRequestUtils; import org.mockenger.data.model.persistent.mock.request.AbstractRequest; import org.mockenger.data.model.persistent.mock.request.part.Headers; import org.springframework.http.ResponseEntity; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestBody;
import java.util.*; import javax.validation.*; import org.mockenger.core.util.*; import org.mockenger.data.model.persistent.mock.request.*; import org.mockenger.data.model.persistent.mock.request.part.*; import org.springframework.http.*; import org.springframework.validation.*; import org.springframework.web.bind.annotation.*;
[ "java.util", "javax.validation", "org.mockenger.core", "org.mockenger.data", "org.springframework.http", "org.springframework.validation", "org.springframework.web" ]
java.util; javax.validation; org.mockenger.core; org.mockenger.data; org.springframework.http; org.springframework.validation; org.springframework.web;
2,628,994
@Override ResultSet getExportedKeys( String catalog, String schema, String table ) throws SQLException;
ResultSet getExportedKeys( String catalog, String schema, String table ) throws SQLException;
/** * <strong>Drill</strong>: Currently, returns an empty (zero-row) result set. * (Note: Currently, result set might not have the expected columns.) */
Drill: Currently, returns an empty (zero-row) result set. (Note: Currently, result set might not have the expected columns.)
getExportedKeys
{ "repo_name": "parthchandra/incubator-drill", "path": "exec/jdbc/src/main/java/org/apache/drill/jdbc/DrillDatabaseMetaData.java", "license": "apache-2.0", "size": 15649 }
[ "java.sql.ResultSet", "java.sql.SQLException" ]
import java.sql.ResultSet; import java.sql.SQLException;
import java.sql.*;
[ "java.sql" ]
java.sql;
71,318
public ThingStatusInfo build() { return new ThingStatusInfo(status, statusDetail, description); }
ThingStatusInfo function() { return new ThingStatusInfo(status, statusDetail, description); }
/** * Builds and returns the status info. * * @return status info */
Builds and returns the status info
build
{ "repo_name": "phxql/smarthome", "path": "bundles/core/org.eclipse.smarthome.core.thing/src/main/java/org/eclipse/smarthome/core/thing/binding/builder/ThingStatusInfoBuilder.java", "license": "epl-1.0", "size": 3464 }
[ "org.eclipse.smarthome.core.thing.ThingStatusInfo" ]
import org.eclipse.smarthome.core.thing.ThingStatusInfo;
import org.eclipse.smarthome.core.thing.*;
[ "org.eclipse.smarthome" ]
org.eclipse.smarthome;
1,107,798
@ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<BillingMeterInner> list(String billingLocation, String osType, Context context);
@ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<BillingMeterInner> list(String billingLocation, String osType, Context context);
/** * Description for Gets a list of meters for a given location. * * @param billingLocation Azure Location of billable resource. * @param osType App Service OS type meters used for. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.appservice.models.DefaultErrorResponseErrorException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return collection of Billing Meters as paginated response with {@link PagedIterable}. */
Description for Gets a list of meters for a given location
list
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-appservice/src/main/java/com/azure/resourcemanager/appservice/fluent/ResourceProvidersClient.java", "license": "mit", "size": 48714 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.PagedIterable", "com.azure.core.util.Context", "com.azure.resourcemanager.appservice.fluent.models.BillingMeterInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedIterable; import com.azure.core.util.Context; import com.azure.resourcemanager.appservice.fluent.models.BillingMeterInner;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.appservice.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
210,409
@Override public void updated(Dictionary<String, ?> config) throws ConfigurationException { if (config != null) { context.getJobScheduler().stop(); context.getConfig().parse(config); logger.info(context.getConfig().toString()); if (context.getConfig().isValid()) { context.getJobScheduler().restart(); } } }
void function(Dictionary<String, ?> config) throws ConfigurationException { if (config != null) { context.getJobScheduler().stop(); context.getConfig().parse(config); logger.info(context.getConfig().toString()); if (context.getConfig().isValid()) { context.getJobScheduler().restart(); } } }
/** * Restart scheduler if config changes. */
Restart scheduler if config changes
updated
{ "repo_name": "hubermi/openhab", "path": "bundles/binding/org.openhab.binding.astro/src/main/java/org/openhab/binding/astro/internal/bus/AstroBinding.java", "license": "epl-1.0", "size": 3223 }
[ "java.util.Dictionary", "org.osgi.service.cm.ConfigurationException" ]
import java.util.Dictionary; import org.osgi.service.cm.ConfigurationException;
import java.util.*; import org.osgi.service.cm.*;
[ "java.util", "org.osgi.service" ]
java.util; org.osgi.service;
793,140
public void listFindings( com.google.cloud.websecurityscanner.v1alpha.ListFindingsRequest request, io.grpc.stub.StreamObserver< com.google.cloud.websecurityscanner.v1alpha.ListFindingsResponse> responseObserver) { asyncUnimplementedUnaryCall(getListFindingsMethodHelper(), responseObserver); }
void function( com.google.cloud.websecurityscanner.v1alpha.ListFindingsRequest request, io.grpc.stub.StreamObserver< com.google.cloud.websecurityscanner.v1alpha.ListFindingsResponse> responseObserver) { asyncUnimplementedUnaryCall(getListFindingsMethodHelper(), responseObserver); }
/** * * * <pre> * List Findings under a given ScanRun. * </pre> */
<code> List Findings under a given ScanRun. </code>
listFindings
{ "repo_name": "vam-google/google-cloud-java", "path": "google-api-grpc/grpc-google-cloud-websecurityscanner-v1alpha/src/main/java/com/google/cloud/websecurityscanner/v1alpha/WebSecurityScannerGrpc.java", "license": "apache-2.0", "size": 83312 }
[ "io.grpc.stub.ServerCalls" ]
import io.grpc.stub.ServerCalls;
import io.grpc.stub.*;
[ "io.grpc.stub" ]
io.grpc.stub;
331,932
public TimerTriggerDescriptor timerTrigger() { return this.timerTrigger; }
TimerTriggerDescriptor function() { return this.timerTrigger; }
/** * Get the timerTrigger property: The timer trigger that caused the run. * * @return the timerTrigger value. */
Get the timerTrigger property: The timer trigger that caused the run
timerTrigger
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-containerregistry/src/main/java/com/azure/resourcemanager/containerregistry/fluent/models/RunProperties.java", "license": "mit", "size": 18096 }
[ "com.azure.resourcemanager.containerregistry.models.TimerTriggerDescriptor" ]
import com.azure.resourcemanager.containerregistry.models.TimerTriggerDescriptor;
import com.azure.resourcemanager.containerregistry.models.*;
[ "com.azure.resourcemanager" ]
com.azure.resourcemanager;
66,540
public static int getMasterInfoPort(final ZooKeeperWatcher zkw) throws KeeperException, IOException { byte[] data; try { data = ZKUtil.getData(zkw, zkw.getMasterAddressZNode()); } catch (InterruptedException e) { throw new InterruptedIOException(); } // TODO javadoc claims we return null in this case. :/ if (data == null) { throw new IOException("Can't get master address from ZooKeeper; znode data == null"); } try { return parse(data).getInfoPort(); } catch (DeserializationException e) { KeeperException ke = new KeeperException.DataInconsistencyException(); ke.initCause(e); throw ke; } }
static int function(final ZooKeeperWatcher zkw) throws KeeperException, IOException { byte[] data; try { data = ZKUtil.getData(zkw, zkw.getMasterAddressZNode()); } catch (InterruptedException e) { throw new InterruptedIOException(); } if (data == null) { throw new IOException(STR); } try { return parse(data).getInfoPort(); } catch (DeserializationException e) { KeeperException ke = new KeeperException.DataInconsistencyException(); ke.initCause(e); throw ke; } }
/** * Get master info port. * Use this instead of {@link #getMasterInfoPort()} if you do not have an * instance of this tracker in your context. * @param zkw ZooKeeperWatcher to use * @return master info port in the the master address znode or null if no * znode present. * // TODO can't return null for 'int' return type. non-static verison returns 0 * @throws KeeperException * @throws IOException */
Get master info port. Use this instead of <code>#getMasterInfoPort()</code> if you do not have an instance of this tracker in your context
getMasterInfoPort
{ "repo_name": "baishuo/hbase-1.0.0-cdh5.4.7_baishuo", "path": "hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java", "license": "apache-2.0", "size": 10285 }
[ "java.io.IOException", "java.io.InterruptedIOException", "org.apache.hadoop.hbase.exceptions.DeserializationException", "org.apache.zookeeper.KeeperException" ]
import java.io.IOException; import java.io.InterruptedIOException; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.zookeeper.KeeperException;
import java.io.*; import org.apache.hadoop.hbase.exceptions.*; import org.apache.zookeeper.*;
[ "java.io", "org.apache.hadoop", "org.apache.zookeeper" ]
java.io; org.apache.hadoop; org.apache.zookeeper;
2,564,990
protected synchronized void discardKeyEvents(Component comp) { if (comp == null) { return; } long start = -1; for (Iterator<TypeAheadMarker> iter = typeAheadMarkers.iterator(); iter.hasNext(); ) { TypeAheadMarker marker = iter.next(); Component toTest = marker.untilFocused; boolean match = (toTest == comp); while (!match && toTest != null && !(toTest instanceof Window)) { toTest = toTest.getParent(); match = (toTest == comp); } if (match) { if (start < 0) { start = marker.after; } iter.remove(); } else if (start >= 0) { purgeStampedEvents(start, marker.after); start = -1; } } purgeStampedEvents(start, -1); }
synchronized void function(Component comp) { if (comp == null) { return; } long start = -1; for (Iterator<TypeAheadMarker> iter = typeAheadMarkers.iterator(); iter.hasNext(); ) { TypeAheadMarker marker = iter.next(); Component toTest = marker.untilFocused; boolean match = (toTest == comp); while (!match && toTest != null && !(toTest instanceof Window)) { toTest = toTest.getParent(); match = (toTest == comp); } if (match) { if (start < 0) { start = marker.after; } iter.remove(); } else if (start >= 0) { purgeStampedEvents(start, marker.after); start = -1; } } purgeStampedEvents(start, -1); }
/** * Discards all KeyEvents which were enqueued because of one or more calls * to <code>enqueueKeyEvents</code> with the specified Component, or one of * its descendants. * * @param comp the Component specified in one or more calls to * <code>enqueueKeyEvents</code>, or a parent of such a Component * @see #enqueueKeyEvents * @see #dequeueKeyEvents */
Discards all KeyEvents which were enqueued because of one or more calls to <code>enqueueKeyEvents</code> with the specified Component, or one of its descendants
discardKeyEvents
{ "repo_name": "shun634501730/java_source_cn", "path": "src_en/java/awt/DefaultKeyboardFocusManager.java", "license": "apache-2.0", "size": 57698 }
[ "java.util.Iterator" ]
import java.util.Iterator;
import java.util.*;
[ "java.util" ]
java.util;
1,568,482
@Override public Adapter createOperationalConfigurationModelAdapter() { if (operationalConfigurationModelItemProvider == null) { operationalConfigurationModelItemProvider = new OperationalConfigurationModelItemProvider(this); } return operationalConfigurationModelItemProvider; } protected StandardConfigurationNodeItemProvider standardConfigurationNodeItemProvider;
Adapter function() { if (operationalConfigurationModelItemProvider == null) { operationalConfigurationModelItemProvider = new OperationalConfigurationModelItemProvider(this); } return operationalConfigurationModelItemProvider; } protected StandardConfigurationNodeItemProvider standardConfigurationNodeItemProvider;
/** * This creates an adapter for a {@link org.tud.inf.st.mbt.ocm.OperationalConfigurationModel}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This creates an adapter for a <code>org.tud.inf.st.mbt.ocm.OperationalConfigurationModel</code>.
createOperationalConfigurationModelAdapter
{ "repo_name": "paetti1988/qmate", "path": "MATE/org.tud.inf.st.mbt.emf.edit/src-gen/org/tud/inf/st/mbt/ocm/provider/OcmItemProviderAdapterFactory.java", "license": "apache-2.0", "size": 9077 }
[ "org.eclipse.emf.common.notify.Adapter" ]
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.notify.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,361,524
@Test public void getReviewTest() { ReviewEntity entity = data.get(0); ReviewEntity newEntity = reviewPersistence.find(dataBook.get(0).getId(), entity.getId()); Assert.assertNotNull(newEntity); Assert.assertEquals(entity.getName(), newEntity.getName()); Assert.assertEquals(entity.getSource(), newEntity.getSource()); Assert.assertEquals(entity.getDescription(), newEntity.getDescription()); }
void function() { ReviewEntity entity = data.get(0); ReviewEntity newEntity = reviewPersistence.find(dataBook.get(0).getId(), entity.getId()); Assert.assertNotNull(newEntity); Assert.assertEquals(entity.getName(), newEntity.getName()); Assert.assertEquals(entity.getSource(), newEntity.getSource()); Assert.assertEquals(entity.getDescription(), newEntity.getDescription()); }
/** * Prueba para consultar un Review. */
Prueba para consultar un Review
getReviewTest
{ "repo_name": "Uniandes-isis2603/backstepbystep", "path": "backstepbystep-back/src/test/java/co/edu/uniandes/csw/bookstore/test/persistence/ReviewPersistenceTest.java", "license": "mit", "size": 6749 }
[ "co.edu.uniandes.csw.bookstore.entities.ReviewEntity", "org.junit.Assert" ]
import co.edu.uniandes.csw.bookstore.entities.ReviewEntity; import org.junit.Assert;
import co.edu.uniandes.csw.bookstore.entities.*; import org.junit.*;
[ "co.edu.uniandes", "org.junit" ]
co.edu.uniandes; org.junit;
1,712,851
private void renderStar(){ GL11.glPushMatrix(); GL11.glTranslated(getScreenX(), getScreenY(), getScreenZ()); if (isEnlighten()) renderLight(); GL11.glColor3f(color[0], color[1], color[2]); Sphere s = new Sphere(); s.draw(Math.max(1, (float)getScreenSize()), getPolygons(), getPolygons()); GL11.glPopMatrix(); }
void function(){ GL11.glPushMatrix(); GL11.glTranslated(getScreenX(), getScreenY(), getScreenZ()); if (isEnlighten()) renderLight(); GL11.glColor3f(color[0], color[1], color[2]); Sphere s = new Sphere(); s.draw(Math.max(1, (float)getScreenSize()), getPolygons(), getPolygons()); GL11.glPopMatrix(); }
/** * Render star itself */
Render star itself
renderStar
{ "repo_name": "mbautista/humanity", "path": "src/fr/ircf/humanity/aster/Star.java", "license": "gpl-3.0", "size": 7251 }
[ "org.lwjgl.util.glu.Sphere" ]
import org.lwjgl.util.glu.Sphere;
import org.lwjgl.util.glu.*;
[ "org.lwjgl.util" ]
org.lwjgl.util;
2,244,972
@Test @SuppressWarnings("unchecked") public void canStartJobMonitor() { final String job1Id = UUID.randomUUID().toString(); final String job2Id = UUID.randomUUID().toString(); final String job3Id = UUID.randomUUID().toString(); final String job4Id = UUID.randomUUID().toString(); final JobExecution.Builder builder = new JobExecution.Builder(UUID.randomUUID().toString()) .withProcessId(2818) .withCheckDelay(DELAY) .withMemory(1024) .withTimeout(this.tomorrow); builder.withId(job1Id); final JobExecution job1 = builder.build(); builder.withId(job2Id); final JobExecution job2 = builder.build(); builder.withId(job3Id); final JobExecution job3 = builder.build(); builder.withId(job4Id); final JobExecution job4 = builder.build(); coordinator.init(job1Id); coordinator.schedule(job1Id, null, null, null, null, 1024); coordinator.init(job2Id); coordinator.schedule(job2Id, null, null, null, null, 1024); coordinator.init(job3Id); coordinator.schedule(job3Id, null, null, null, null, 1024); coordinator.init(job4Id); coordinator.schedule(job4Id, null, null, null, null, 1024); coordinator.schedule(job1Id, null, null, null, null, 1024); final JobStartedEvent event1 = new JobStartedEvent(job1, this); final JobStartedEvent event2 = new JobStartedEvent(job2, this); final JobStartedEvent event3 = new JobStartedEvent(job3, this); final JobStartedEvent event4 = new JobStartedEvent(job4, this); final JobStartedEvent event5 = new JobStartedEvent(job1, this); final ScheduledFuture future = Mockito.mock(ScheduledFuture.class); Mockito.when( this.scheduler.scheduleWithFixedDelay(Mockito.any(JobMonitor.class), Mockito.eq(DELAY)) ).thenReturn(future); Assert.assertThat(this.coordinator.getNumActiveJobs(), Matchers.is(4)); Assert.assertThat(this.coordinator.getUsedMemory(), Matchers.is(4096)); this.coordinator.onJobStarted(event1); Assert.assertThat(this.coordinator.getNumActiveJobs(), Matchers.is(4)); Assert.assertThat(this.coordinator.getUsedMemory(), Matchers.is(4096)); this.coordinator.onJobStarted(event2); Assert.assertThat(this.coordinator.getNumActiveJobs(), Matchers.is(4)); Assert.assertThat(this.coordinator.getUsedMemory(), Matchers.is(4096)); this.coordinator.onJobStarted(event3); Assert.assertThat(this.coordinator.getNumActiveJobs(), Matchers.is(4)); Assert.assertThat(this.coordinator.getUsedMemory(), Matchers.is(4096)); this.coordinator.onJobStarted(event4); Assert.assertThat(this.coordinator.getNumActiveJobs(), Matchers.is(4)); Assert.assertThat(this.coordinator.getUsedMemory(), Matchers.is(4096)); this.coordinator.onJobStarted(event5); Assert.assertThat(this.coordinator.getNumActiveJobs(), Matchers.is(4)); Assert.assertThat(this.coordinator.getUsedMemory(), Matchers.is(4096)); Mockito .verify(this.scheduler, Mockito.times(5)) .scheduleWithFixedDelay(Mockito.any(JobMonitor.class), Mockito.eq(DELAY)); }
@SuppressWarnings(STR) void function() { final String job1Id = UUID.randomUUID().toString(); final String job2Id = UUID.randomUUID().toString(); final String job3Id = UUID.randomUUID().toString(); final String job4Id = UUID.randomUUID().toString(); final JobExecution.Builder builder = new JobExecution.Builder(UUID.randomUUID().toString()) .withProcessId(2818) .withCheckDelay(DELAY) .withMemory(1024) .withTimeout(this.tomorrow); builder.withId(job1Id); final JobExecution job1 = builder.build(); builder.withId(job2Id); final JobExecution job2 = builder.build(); builder.withId(job3Id); final JobExecution job3 = builder.build(); builder.withId(job4Id); final JobExecution job4 = builder.build(); coordinator.init(job1Id); coordinator.schedule(job1Id, null, null, null, null, 1024); coordinator.init(job2Id); coordinator.schedule(job2Id, null, null, null, null, 1024); coordinator.init(job3Id); coordinator.schedule(job3Id, null, null, null, null, 1024); coordinator.init(job4Id); coordinator.schedule(job4Id, null, null, null, null, 1024); coordinator.schedule(job1Id, null, null, null, null, 1024); final JobStartedEvent event1 = new JobStartedEvent(job1, this); final JobStartedEvent event2 = new JobStartedEvent(job2, this); final JobStartedEvent event3 = new JobStartedEvent(job3, this); final JobStartedEvent event4 = new JobStartedEvent(job4, this); final JobStartedEvent event5 = new JobStartedEvent(job1, this); final ScheduledFuture future = Mockito.mock(ScheduledFuture.class); Mockito.when( this.scheduler.scheduleWithFixedDelay(Mockito.any(JobMonitor.class), Mockito.eq(DELAY)) ).thenReturn(future); Assert.assertThat(this.coordinator.getNumActiveJobs(), Matchers.is(4)); Assert.assertThat(this.coordinator.getUsedMemory(), Matchers.is(4096)); this.coordinator.onJobStarted(event1); Assert.assertThat(this.coordinator.getNumActiveJobs(), Matchers.is(4)); Assert.assertThat(this.coordinator.getUsedMemory(), Matchers.is(4096)); this.coordinator.onJobStarted(event2); Assert.assertThat(this.coordinator.getNumActiveJobs(), Matchers.is(4)); Assert.assertThat(this.coordinator.getUsedMemory(), Matchers.is(4096)); this.coordinator.onJobStarted(event3); Assert.assertThat(this.coordinator.getNumActiveJobs(), Matchers.is(4)); Assert.assertThat(this.coordinator.getUsedMemory(), Matchers.is(4096)); this.coordinator.onJobStarted(event4); Assert.assertThat(this.coordinator.getNumActiveJobs(), Matchers.is(4)); Assert.assertThat(this.coordinator.getUsedMemory(), Matchers.is(4096)); this.coordinator.onJobStarted(event5); Assert.assertThat(this.coordinator.getNumActiveJobs(), Matchers.is(4)); Assert.assertThat(this.coordinator.getUsedMemory(), Matchers.is(4096)); Mockito .verify(this.scheduler, Mockito.times(5)) .scheduleWithFixedDelay(Mockito.any(JobMonitor.class), Mockito.eq(DELAY)); }
/** * Make sure when a {@link com.netflix.genie.core.events.JobStartedEvent} is sent a new monitor is spawned. */
Make sure when a <code>com.netflix.genie.core.events.JobStartedEvent</code> is sent a new monitor is spawned
canStartJobMonitor
{ "repo_name": "irontable/genie", "path": "genie-web/src/test/java/com/netflix/genie/web/tasks/job/JobMonitoringCoordinatorUnitTests.java", "license": "apache-2.0", "size": 18631 }
[ "com.netflix.genie.common.dto.JobExecution", "com.netflix.genie.core.events.JobStartedEvent", "java.util.UUID", "java.util.concurrent.ScheduledFuture", "org.hamcrest.Matchers", "org.junit.Assert", "org.mockito.Mockito" ]
import com.netflix.genie.common.dto.JobExecution; import com.netflix.genie.core.events.JobStartedEvent; import java.util.UUID; import java.util.concurrent.ScheduledFuture; import org.hamcrest.Matchers; import org.junit.Assert; import org.mockito.Mockito;
import com.netflix.genie.common.dto.*; import com.netflix.genie.core.events.*; import java.util.*; import java.util.concurrent.*; import org.hamcrest.*; import org.junit.*; import org.mockito.*;
[ "com.netflix.genie", "java.util", "org.hamcrest", "org.junit", "org.mockito" ]
com.netflix.genie; java.util; org.hamcrest; org.junit; org.mockito;
777,439
private void messageIngestListeners(final Properties aProperties, final File aImageFile, final String aID, final JsonObject aJsonObject) { final JsonObject jsonMessage = new JsonObject(); FileUtils.getExt(aImageFile.getAbsolutePath()); // Pass along some metadata about the image being ingested jsonMessage.put(ID_KEY, aID); jsonMessage.put(TILE_SIZE_PROP, getConfig().getTileSize()); jsonMessage.put(FILE_PATH_KEY, aImageFile.getAbsolutePath()); // These are the tasks we trigger, according to user's ingest request if (!aJsonObject.getBoolean(SKIP_PROPERTIES, false)) { sendMessage(jsonMessage, ImagePropertiesVerticle.class.getName(), INGEST_TIMEOUT); } else { LOGGER.debug(MessageCodes.DBG_105, aID); } if (!aJsonObject.getBoolean(SKIP_IMAGES, false)) { if (!aJsonObject.getBoolean(SolrMetadata.SKIP_INDEXING, false)) { jsonMessage.put(SolrMetadata.SKIP_INDEXING, false); } else { LOGGER.debug(MessageCodes.DBG_104, aID); } sendMessage(jsonMessage, TileMasterVerticle.class.getName(), INGEST_TIMEOUT); } else { if (!aJsonObject.getBoolean(SolrMetadata.SKIP_INDEXING, false)) { jsonMessage.put(SolrMetadata.ACTION_TYPE, SolrMetadata.UPDATE_ACTION); sendMessage(jsonMessage, ImageIndexVerticle.class.getName(), INGEST_TIMEOUT); } else { LOGGER.debug(MessageCodes.DBG_104, aID); } LOGGER.debug(MessageCodes.DBG_106, aID); } }
void function(final Properties aProperties, final File aImageFile, final String aID, final JsonObject aJsonObject) { final JsonObject jsonMessage = new JsonObject(); FileUtils.getExt(aImageFile.getAbsolutePath()); jsonMessage.put(ID_KEY, aID); jsonMessage.put(TILE_SIZE_PROP, getConfig().getTileSize()); jsonMessage.put(FILE_PATH_KEY, aImageFile.getAbsolutePath()); if (!aJsonObject.getBoolean(SKIP_PROPERTIES, false)) { sendMessage(jsonMessage, ImagePropertiesVerticle.class.getName(), INGEST_TIMEOUT); } else { LOGGER.debug(MessageCodes.DBG_105, aID); } if (!aJsonObject.getBoolean(SKIP_IMAGES, false)) { if (!aJsonObject.getBoolean(SolrMetadata.SKIP_INDEXING, false)) { jsonMessage.put(SolrMetadata.SKIP_INDEXING, false); } else { LOGGER.debug(MessageCodes.DBG_104, aID); } sendMessage(jsonMessage, TileMasterVerticle.class.getName(), INGEST_TIMEOUT); } else { if (!aJsonObject.getBoolean(SolrMetadata.SKIP_INDEXING, false)) { jsonMessage.put(SolrMetadata.ACTION_TYPE, SolrMetadata.UPDATE_ACTION); sendMessage(jsonMessage, ImageIndexVerticle.class.getName(), INGEST_TIMEOUT); } else { LOGGER.debug(MessageCodes.DBG_104, aID); } LOGGER.debug(MessageCodes.DBG_106, aID); } }
/** * Dispatches messages for activities requested by the user at point of ingest. * * @param aProperties A properties file * @param aImageFile An image file for ingest * @param aID An ID for the image to ingest * @param aJsonObject A configuration file for the ingest activity */
Dispatches messages for activities requested by the user at point of ingest
messageIngestListeners
{ "repo_name": "ksclarke/jiiify", "path": "src/main/java/info/freelibrary/jiiify/verticles/ImageIngestVerticle.java", "license": "bsd-3-clause", "size": 7845 }
[ "info.freelibrary.jiiify.MessageCodes", "info.freelibrary.jiiify.SolrMetadata", "info.freelibrary.util.FileUtils", "io.vertx.core.json.JsonObject", "java.io.File", "java.util.Properties" ]
import info.freelibrary.jiiify.MessageCodes; import info.freelibrary.jiiify.SolrMetadata; import info.freelibrary.util.FileUtils; import io.vertx.core.json.JsonObject; import java.io.File; import java.util.Properties;
import info.freelibrary.jiiify.*; import info.freelibrary.util.*; import io.vertx.core.json.*; import java.io.*; import java.util.*;
[ "info.freelibrary.jiiify", "info.freelibrary.util", "io.vertx.core", "java.io", "java.util" ]
info.freelibrary.jiiify; info.freelibrary.util; io.vertx.core; java.io; java.util;
2,335,024
public static ImageDescriptor getImageDescriptor(String path) { return imageDescriptorFromPlugin(PLUGIN_ID, path); }
static ImageDescriptor function(String path) { return imageDescriptorFromPlugin(PLUGIN_ID, path); }
/** * Returns an image descriptor for the image file at the given * plug-in relative path * * @param path the path * @return the image descriptor */
Returns an image descriptor for the image file at the given plug-in relative path
getImageDescriptor
{ "repo_name": "hosny1993/vogella", "path": "de.vogella.rcp.intro.fixedview/src/de/vogella/intro/rcp/fixedview/Activator.java", "license": "epl-1.0", "size": 1429 }
[ "org.eclipse.jface.resource.ImageDescriptor" ]
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.jface.resource.*;
[ "org.eclipse.jface" ]
org.eclipse.jface;
869,847
public void handleAction(Action action, Object sender, Object target); } public interface Container extends Serializable {
void function(Action action, Object sender, Object target); } public interface Container extends Serializable {
/** * Handles an action for the given target. The handler method may just * discard the action if it's not suitable. * * @param action * the action to be handled. * @param sender * the sender of the action. This is most often the action * container. * @param target * the target of the action. For item containers this is the * item id. */
Handles an action for the given target. The handler method may just discard the action if it's not suitable
handleAction
{ "repo_name": "Legioth/vaadin", "path": "server/src/main/java/com/vaadin/event/Action.java", "license": "apache-2.0", "size": 6083 }
[ "java.io.Serializable" ]
import java.io.Serializable;
import java.io.*;
[ "java.io" ]
java.io;
1,047,495
Map<String, InBandBytestreamSession> getSessions() { return sessions; }
Map<String, InBandBytestreamSession> getSessions() { return sessions; }
/** * Returns the sessions map. * * @return the sessions map */
Returns the sessions map
getSessions
{ "repo_name": "igniterealtime/Smack", "path": "smack-extensions/src/main/java/org/jivesoftware/smackx/bytestreams/ibb/InBandBytestreamManager.java", "license": "apache-2.0", "size": 20753 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
1,448,654
public ServiceCall<WordData> getWord(String customizationId, String wordName) { Validator.notNull(customizationId, "customizationId cannot be null"); Validator.notNull(wordName, "wordName cannot be null"); RequestBuilder requestBuilder = RequestBuilder.get(String.format(PATH_WORD, customizationId, wordName)); return createServiceCall(requestBuilder.build(), ResponseConverterUtils.getObject(WordData.class)); }
ServiceCall<WordData> function(String customizationId, String wordName) { Validator.notNull(customizationId, STR); Validator.notNull(wordName, STR); RequestBuilder requestBuilder = RequestBuilder.get(String.format(PATH_WORD, customizationId, wordName)); return createServiceCall(requestBuilder.build(), ResponseConverterUtils.getObject(WordData.class)); }
/** * Gets information about a word from a custom language model. * * @param customizationId The GUID of the custom language model containing the word being queried. You must make the * request with the service credentials of the model's owner. * @param wordName the word name * @return the words */
Gets information about a word from a custom language model
getWord
{ "repo_name": "estebanmorales1594/PruebaXE", "path": "src/main/java/com/ibm/watson/developer_cloud/speech_to_text/v1/SpeechToText.java", "license": "apache-2.0", "size": 41747 }
[ "com.ibm.watson.developer_cloud.http.RequestBuilder", "com.ibm.watson.developer_cloud.http.ServiceCall", "com.ibm.watson.developer_cloud.speech_to_text.v1.model.WordData", "com.ibm.watson.developer_cloud.util.ResponseConverterUtils", "com.ibm.watson.developer_cloud.util.Validator" ]
import com.ibm.watson.developer_cloud.http.RequestBuilder; import com.ibm.watson.developer_cloud.http.ServiceCall; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.WordData; import com.ibm.watson.developer_cloud.util.ResponseConverterUtils; import com.ibm.watson.developer_cloud.util.Validator;
import com.ibm.watson.developer_cloud.http.*; import com.ibm.watson.developer_cloud.speech_to_text.v1.model.*; import com.ibm.watson.developer_cloud.util.*;
[ "com.ibm.watson" ]
com.ibm.watson;
2,040,536
public boolean setAccountRegistration(SipProfile account, int renew, boolean forceReAdd) throws SameThreadException { int status = -1; if (!created || account == null) { Log.e(THIS_FILE, "PJSIP is not started here, nothing can be done"); return false; } if (account.id == SipProfile.INVALID_ID) { Log.w(THIS_FILE, "Trying to set registration on a deleted account"); return false; } SipProfileState profileState = getProfileState(account); // If local account -- Ensure we are not deleting, because this would be // invalid if (profileState.getWizard().equalsIgnoreCase(WizardUtils.LOCAL_WIZARD_TAG)) { if (renew == 0) { return false; } } // In case of already added, we have to act finely // If it's local we can just consider that we have to re-add account // since it will actually just touch the account with a modify if (profileState != null && profileState.isAddedToStack() && !profileState.getWizard().equalsIgnoreCase(WizardUtils.LOCAL_WIZARD_TAG)) { // The account is already there in accounts list service.getContentResolver().delete( ContentUris.withAppendedId(SipProfile.ACCOUNT_STATUS_URI, account.id), null, null); Log.d(THIS_FILE, "Account already added to stack, remove and re-load or delete"); if (renew == 1) { if (forceReAdd) { status = pjsua.acc_del(profileState.getPjsuaId()); addAccount(account); } else { pjsua.acc_set_online_status(profileState.getPjsuaId(), getOnlineForStatus(service.getPresence())); status = pjsua.acc_set_registration(profileState.getPjsuaId(), renew); } } else { // if(status == pjsuaConstants.PJ_SUCCESS && renew == 0) { Log.d(THIS_FILE, "Delete account !!"); status = pjsua.acc_del(profileState.getPjsuaId()); } } else { if (renew == 1) { addAccount(account); } else { Log.w(THIS_FILE, "Ask to unregister an unexisting account !!" + account.id); } } // PJ_SUCCESS = 0 return status == 0; }
boolean function(SipProfile account, int renew, boolean forceReAdd) throws SameThreadException { int status = -1; if (!created account == null) { Log.e(THIS_FILE, STR); return false; } if (account.id == SipProfile.INVALID_ID) { Log.w(THIS_FILE, STR); return false; } SipProfileState profileState = getProfileState(account); if (profileState.getWizard().equalsIgnoreCase(WizardUtils.LOCAL_WIZARD_TAG)) { if (renew == 0) { return false; } } if (profileState != null && profileState.isAddedToStack() && !profileState.getWizard().equalsIgnoreCase(WizardUtils.LOCAL_WIZARD_TAG)) { service.getContentResolver().delete( ContentUris.withAppendedId(SipProfile.ACCOUNT_STATUS_URI, account.id), null, null); Log.d(THIS_FILE, STR); if (renew == 1) { if (forceReAdd) { status = pjsua.acc_del(profileState.getPjsuaId()); addAccount(account); } else { pjsua.acc_set_online_status(profileState.getPjsuaId(), getOnlineForStatus(service.getPresence())); status = pjsua.acc_set_registration(profileState.getPjsuaId(), renew); } } else { Log.d(THIS_FILE, STR); status = pjsua.acc_del(profileState.getPjsuaId()); } } else { if (renew == 1) { addAccount(account); } else { Log.w(THIS_FILE, STR + account.id); } } return status == 0; }
/** * Change account registration / adding state * * @param account The account to modify registration * @param renew if 0 we ask for deletion of this account; if 1 we ask for * registration of this account (and add if necessary) * @param forceReAdd if true, we will first remove the account and then * re-add it * @return true if the operation get completed without problem * @throws SameThreadException */
Change account registration / adding state
setAccountRegistration
{ "repo_name": "WonderFannn/EntranceSystem", "path": "src/com/csipsimple/pjsip/PjSipService.java", "license": "lgpl-3.0", "size": 97922 }
[ "android.content.ContentUris", "com.csipsimple.api.SipProfile", "com.csipsimple.api.SipProfileState", "com.csipsimple.service.SipService", "com.csipsimple.utils.Log", "com.csipsimple.wizards.WizardUtils" ]
import android.content.ContentUris; import com.csipsimple.api.SipProfile; import com.csipsimple.api.SipProfileState; import com.csipsimple.service.SipService; import com.csipsimple.utils.Log; import com.csipsimple.wizards.WizardUtils;
import android.content.*; import com.csipsimple.api.*; import com.csipsimple.service.*; import com.csipsimple.utils.*; import com.csipsimple.wizards.*;
[ "android.content", "com.csipsimple.api", "com.csipsimple.service", "com.csipsimple.utils", "com.csipsimple.wizards" ]
android.content; com.csipsimple.api; com.csipsimple.service; com.csipsimple.utils; com.csipsimple.wizards;
37,701
public String serialize(Object object) throws ConverterException { deep=0; StringBuilder sb=new StringBuilder(); _serialize(object,sb,new HashSet<Object>()); return sb.toString(); }
String function(Object object) throws ConverterException { deep=0; StringBuilder sb=new StringBuilder(); _serialize(object,sb,new HashSet<Object>()); return sb.toString(); }
/** * serialize a Object to his literal Format * @param object Object to serialize * @return serialized wddx package * @throws ConverterException */
serialize a Object to his literal Format
serialize
{ "repo_name": "lucee/unoffical-Lucee-no-jre", "path": "source/java/core/src/lucee/runtime/converter/ScriptConverter.java", "license": "lgpl-2.1", "size": 16333 }
[ "java.util.HashSet" ]
import java.util.HashSet;
import java.util.*;
[ "java.util" ]
java.util;
1,370,994
void writeTag(I tracker, Parcel dest) { if (tracker != null) { dest.writeInt(MAGIC_PARCEL_INT); dest.writeStrongBinder(tracker.asBinder()); } }
void writeTag(I tracker, Parcel dest) { if (tracker != null) { dest.writeInt(MAGIC_PARCEL_INT); dest.writeStrongBinder(tracker.asBinder()); } }
/** * Write tag so it can be later read by readTag */
Write tag so it can be later read by readTag
writeTag
{ "repo_name": "michalbednarski/IntentsLab", "path": "XposedHooks/src/main/java/com/github/michalbednarski/intentslab/xposedhooks/internal/trackers/ObjectTrackerModule.java", "license": "gpl-3.0", "size": 3665 }
[ "android.os.Parcel" ]
import android.os.Parcel;
import android.os.*;
[ "android.os" ]
android.os;
811,818
MetaData loadGlobalState() throws IOException { return MetaData.FORMAT.loadLatestState(logger, namedXContentRegistry, nodeEnv.nodeDataPaths()); }
MetaData loadGlobalState() throws IOException { return MetaData.FORMAT.loadLatestState(logger, namedXContentRegistry, nodeEnv.nodeDataPaths()); }
/** * Loads the global state, *without* index state, see {@link #loadFullState()} for that. */
Loads the global state, *without* index state, see <code>#loadFullState()</code> for that
loadGlobalState
{ "repo_name": "qwerty4030/elasticsearch", "path": "server/src/main/java/org/elasticsearch/gateway/MetaStateService.java", "license": "apache-2.0", "size": 5962 }
[ "java.io.IOException", "org.elasticsearch.cluster.metadata.MetaData" ]
import java.io.IOException; import org.elasticsearch.cluster.metadata.MetaData;
import java.io.*; import org.elasticsearch.cluster.metadata.*;
[ "java.io", "org.elasticsearch.cluster" ]
java.io; org.elasticsearch.cluster;
1,107,909
private void trimCandidatesNotMeetingMinimumRequirements() { Iterator<Entry<String, FunctionState>> i; for (i = fns.entrySet().iterator(); i.hasNext();) { FunctionState fs = i.next().getValue(); if (!fs.hasExistingFunctionDefinition() || !fs.canInline()) { i.remove(); } } }
void function() { Iterator<Entry<String, FunctionState>> i; for (i = fns.entrySet().iterator(); i.hasNext();) { FunctionState fs = i.next().getValue(); if (!fs.hasExistingFunctionDefinition() !fs.canInline()) { i.remove(); } } }
/** * Remove entries that aren't a valid inline candidates, from the list of * encountered names. */
Remove entries that aren't a valid inline candidates, from the list of encountered names
trimCandidatesNotMeetingMinimumRequirements
{ "repo_name": "superkonduktr/closure-compiler", "path": "src/com/google/javascript/jscomp/InlineFunctions.java", "license": "apache-2.0", "size": 35087 }
[ "java.util.Iterator", "java.util.Map" ]
import java.util.Iterator; import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
838,854
@Deprecated public SocketInterceptorConfig getSocketInterceptorConfig() { return networkConfig.getSocketInterceptorConfig(); }
SocketInterceptorConfig function() { return networkConfig.getSocketInterceptorConfig(); }
/** * Use {@link ClientNetworkConfig#getSocketInterceptorConfig} instead */
Use <code>ClientNetworkConfig#getSocketInterceptorConfig</code> instead
getSocketInterceptorConfig
{ "repo_name": "lmjacksoniii/hazelcast", "path": "hazelcast-client/src/main/java/com/hazelcast/client/config/ClientConfig.java", "license": "apache-2.0", "size": 23733 }
[ "com.hazelcast.config.SocketInterceptorConfig" ]
import com.hazelcast.config.SocketInterceptorConfig;
import com.hazelcast.config.*;
[ "com.hazelcast.config" ]
com.hazelcast.config;
1,245,912
Call<ResponseBody> putEmptyAsync(Map<String, String> arrayBody, final ServiceCallback<Void> serviceCallback);
Call<ResponseBody> putEmptyAsync(Map<String, String> arrayBody, final ServiceCallback<Void> serviceCallback);
/** * Set dictionary value empty {}. * * @param arrayBody the Map&lt;String, String&gt; value * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @return the {@link Call} object */
Set dictionary value empty {}
putEmptyAsync
{ "repo_name": "matt-gibbs/AutoRest", "path": "AutoRest/Generators/Java/Java.Tests/src/main/java/fixtures/bodydictionary/Dictionary.java", "license": "mit", "size": 63614 }
[ "com.microsoft.rest.ServiceCallback", "com.squareup.okhttp.ResponseBody", "java.util.Map" ]
import com.microsoft.rest.ServiceCallback; import com.squareup.okhttp.ResponseBody; import java.util.Map;
import com.microsoft.rest.*; import com.squareup.okhttp.*; import java.util.*;
[ "com.microsoft.rest", "com.squareup.okhttp", "java.util" ]
com.microsoft.rest; com.squareup.okhttp; java.util;
2,661,624
private void notifyHostsVmFailed() { if (!getRunVdssList().isEmpty()) { ThreadPoolUtil.execute(() -> { for (Guid vdsId : getRunVdssList()) { if (!vdsId.equals(getCurrentVdsId())) { runVdsCommand(VDSCommandType.FailedToRunVm, new FailedToRunVmVDSCommandParameters(vdsId)); } } }); } }
void function() { if (!getRunVdssList().isEmpty()) { ThreadPoolUtil.execute(() -> { for (Guid vdsId : getRunVdssList()) { if (!vdsId.equals(getCurrentVdsId())) { runVdsCommand(VDSCommandType.FailedToRunVm, new FailedToRunVmVDSCommandParameters(vdsId)); } } }); } }
/** * notify other hosts on a failed attempt to run a Vm in a non blocking matter * to avoid deadlock where other host's VdsManagers lock is taken and is awaiting the current vds lock. */
notify other hosts on a failed attempt to run a Vm in a non blocking matter to avoid deadlock where other host's VdsManagers lock is taken and is awaiting the current vds lock
notifyHostsVmFailed
{ "repo_name": "OpenUniversity/ovirt-engine", "path": "backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/RunVmCommandBase.java", "license": "apache-2.0", "size": 13952 }
[ "org.ovirt.engine.core.common.vdscommands.FailedToRunVmVDSCommandParameters", "org.ovirt.engine.core.common.vdscommands.VDSCommandType", "org.ovirt.engine.core.compat.Guid", "org.ovirt.engine.core.utils.threadpool.ThreadPoolUtil" ]
import org.ovirt.engine.core.common.vdscommands.FailedToRunVmVDSCommandParameters; import org.ovirt.engine.core.common.vdscommands.VDSCommandType; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.utils.threadpool.ThreadPoolUtil;
import org.ovirt.engine.core.common.vdscommands.*; import org.ovirt.engine.core.compat.*; import org.ovirt.engine.core.utils.threadpool.*;
[ "org.ovirt.engine" ]
org.ovirt.engine;
1,470,798
File getSingleFile() throws IllegalStateException;
File getSingleFile() throws IllegalStateException;
/** * Returns the content of this collection, asserting it contains exactly one file. * * @return The file. * @throws IllegalStateException when this collection does not contain exactly one file. */
Returns the content of this collection, asserting it contains exactly one file
getSingleFile
{ "repo_name": "gradle/gradle", "path": "subprojects/core-api/src/main/java/org/gradle/api/file/FileCollection.java", "license": "apache-2.0", "size": 8513 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
1,078,631
public void initGui() { this.buttonList.clear(); Keyboard.enableRepeatEvents(true); this.title = I18n.format("createWorld.customize.custom.presets.title", new Object[0]); this.shareText = I18n.format("createWorld.customize.presets.share", new Object[0]); this.listText = I18n.format("createWorld.customize.presets.list", new Object[0]); this.export = new GuiTextField(2, this.fontRendererObj, 50, 40, this.width - 100, 20); this.list = new GuiScreenCustomizePresets.ListPreset(); this.export.setMaxStringLength(2000); this.export.setText(this.parent.saveValues()); this.select = this.addButton(new GuiButton(0, this.width / 2 - 102, this.height - 27, 100, 20, I18n.format("createWorld.customize.presets.select", new Object[0]))); this.buttonList.add(new GuiButton(1, this.width / 2 + 3, this.height - 27, 100, 20, I18n.format("gui.cancel", new Object[0]))); this.updateButtonValidity(); }
void function() { this.buttonList.clear(); Keyboard.enableRepeatEvents(true); this.title = I18n.format(STR, new Object[0]); this.shareText = I18n.format(STR, new Object[0]); this.listText = I18n.format(STR, new Object[0]); this.export = new GuiTextField(2, this.fontRendererObj, 50, 40, this.width - 100, 20); this.list = new GuiScreenCustomizePresets.ListPreset(); this.export.setMaxStringLength(2000); this.export.setText(this.parent.saveValues()); this.select = this.addButton(new GuiButton(0, this.width / 2 - 102, this.height - 27, 100, 20, I18n.format(STR, new Object[0]))); this.buttonList.add(new GuiButton(1, this.width / 2 + 3, this.height - 27, 100, 20, I18n.format(STR, new Object[0]))); this.updateButtonValidity(); }
/** * Adds the buttons (and other controls) to the screen in question. Called when the GUI is displayed and when the * window resizes, the buttonList is cleared beforehand. */
Adds the buttons (and other controls) to the screen in question. Called when the GUI is displayed and when the window resizes, the buttonList is cleared beforehand
initGui
{ "repo_name": "lucemans/ShapeClient-SRC", "path": "net/minecraft/client/gui/GuiScreenCustomizePresets.java", "license": "mpl-2.0", "size": 16356 }
[ "net.minecraft.client.resources.I18n", "org.lwjgl.input.Keyboard" ]
import net.minecraft.client.resources.I18n; import org.lwjgl.input.Keyboard;
import net.minecraft.client.resources.*; import org.lwjgl.input.*;
[ "net.minecraft.client", "org.lwjgl.input" ]
net.minecraft.client; org.lwjgl.input;
1,082,039
protected NodeFigure createNodeFigure() { NodeFigure figure = createNodePlate(); figure.setLayoutManager(new StackLayout()); IFigure shape = createNodeShape(); figure.add(shape); contentPane = setupContentPane(shape); return figure; }
NodeFigure function() { NodeFigure figure = createNodePlate(); figure.setLayoutManager(new StackLayout()); IFigure shape = createNodeShape(); figure.add(shape); contentPane = setupContentPane(shape); return figure; }
/** * Creates figure for this edit part. * * Body of this method does not depend on settings in generation model * so you may safely remove <i>generated</i> tag and modify it. * * @generated */
Creates figure for this edit part. Body of this method does not depend on settings in generation model so you may safely remove generated tag and modify it
createNodeFigure
{ "repo_name": "mikesligo/visGrid", "path": "ie.tcd.gmf.visGrid.diagram/src/visGrid/diagram/edit/parts/AuctionEditPart.java", "license": "gpl-3.0", "size": 61186 }
[ "org.eclipse.draw2d.IFigure", "org.eclipse.draw2d.StackLayout", "org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure" ]
import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.StackLayout; import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure;
import org.eclipse.draw2d.*; import org.eclipse.gmf.runtime.gef.ui.figures.*;
[ "org.eclipse.draw2d", "org.eclipse.gmf" ]
org.eclipse.draw2d; org.eclipse.gmf;
2,242,837
@Override public void onViewCreated(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); ensureList(); }
void function(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); ensureList(); }
/** * Attach to list view once the view hierarchy has been created. */
Attach to list view once the view hierarchy has been created
onViewCreated
{ "repo_name": "Splaktar/frisbee", "path": "app/src/main/java/org/gdg/frisbee/android/common/GdgListFragment.java", "license": "apache-2.0", "size": 11369 }
[ "android.os.Bundle", "android.view.View" ]
import android.os.Bundle; import android.view.View;
import android.os.*; import android.view.*;
[ "android.os", "android.view" ]
android.os; android.view;
333,136
public CacheConfig getCacheConfig() { final CacheConfig copy = new CacheConfig(); copy.setDeclarativeConfig(this.cacheConfig); return copy; }
CacheConfig function() { final CacheConfig copy = new CacheConfig(); copy.setDeclarativeConfig(this.cacheConfig); return copy; }
/** * Gets the CacheConfig object used to configure additional GemFire Cache components and features * (e.g. PDX). * * @return a CacheConfig object with additional GemFire Cache configuration meta-data used on * startup to configure the Cache. */
Gets the CacheConfig object used to configure additional GemFire Cache components and features (e.g. PDX)
getCacheConfig
{ "repo_name": "deepakddixit/incubator-geode", "path": "geode-core/src/main/java/org/apache/geode/distributed/ServerLauncher.java", "license": "apache-2.0", "size": 102097 }
[ "org.apache.geode.internal.cache.CacheConfig" ]
import org.apache.geode.internal.cache.CacheConfig;
import org.apache.geode.internal.cache.*;
[ "org.apache.geode" ]
org.apache.geode;
2,361,726
public static AttributeNodeFilter readAttributeNodeFilter(BinaryRawReader in) { if (!in.readBoolean()) return null; int cnt = in.readInt(); Map<String, Object> attrs = new HashMap<>(cnt); for (int i = 0; i < cnt; i++) attrs.put(in.readString(), in.readObject()); return new AttributeNodeFilter(attrs); }
static AttributeNodeFilter function(BinaryRawReader in) { if (!in.readBoolean()) return null; int cnt = in.readInt(); Map<String, Object> attrs = new HashMap<>(cnt); for (int i = 0; i < cnt; i++) attrs.put(in.readString(), in.readObject()); return new AttributeNodeFilter(attrs); }
/** * Reads the node filter config. * * @param in Stream. * @return AttributeNodeFilter. */
Reads the node filter config
readAttributeNodeFilter
{ "repo_name": "NSAmelchev/ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/processors/platform/utils/PlatformConfigurationUtils.java", "license": "apache-2.0", "size": 87032 }
[ "java.util.HashMap", "java.util.Map", "org.apache.ignite.binary.BinaryRawReader", "org.apache.ignite.util.AttributeNodeFilter" ]
import java.util.HashMap; import java.util.Map; import org.apache.ignite.binary.BinaryRawReader; import org.apache.ignite.util.AttributeNodeFilter;
import java.util.*; import org.apache.ignite.binary.*; import org.apache.ignite.util.*;
[ "java.util", "org.apache.ignite" ]
java.util; org.apache.ignite;
1,923,252
void shutdown() { if (!shouldRun) { LOG.warn("DirectoryScanner: shutdown has been called, but periodic scanner not started"); } else { LOG.warn("DirectoryScanner: shutdown has been called"); } shouldRun = false; if (masterThread != null) masterThread.shutdown(); if (reportCompileThreadPool != null) { reportCompileThreadPool.shutdownNow(); } if (masterThread != null) { try { masterThread.awaitTermination(1, TimeUnit.MINUTES); } catch (InterruptedException e) { LOG.error("interrupted while waiting for masterThread to " + "terminate", e); } } if (reportCompileThreadPool != null) { try { reportCompileThreadPool.awaitTermination(1, TimeUnit.MINUTES); } catch (InterruptedException e) { LOG.error("interrupted while waiting for reportCompileThreadPool to " + "terminate", e); } } if (!retainDiffs) clear(); }
void shutdown() { if (!shouldRun) { LOG.warn(STR); } else { LOG.warn(STR); } shouldRun = false; if (masterThread != null) masterThread.shutdown(); if (reportCompileThreadPool != null) { reportCompileThreadPool.shutdownNow(); } if (masterThread != null) { try { masterThread.awaitTermination(1, TimeUnit.MINUTES); } catch (InterruptedException e) { LOG.error(STR + STR, e); } } if (reportCompileThreadPool != null) { try { reportCompileThreadPool.awaitTermination(1, TimeUnit.MINUTES); } catch (InterruptedException e) { LOG.error(STR + STR, e); } } if (!retainDiffs) clear(); }
/** * Stops the directory scanner. This method will wait for 1 minute for the * main thread to exit and an additional 1 minute for the report compilation * threads to exit. If a thread does not exit in that time period, it is * left running, and an error is logged. */
Stops the directory scanner. This method will wait for 1 minute for the main thread to exit and an additional 1 minute for the report compilation threads to exit. If a thread does not exit in that time period, it is left running, and an error is logged
shutdown
{ "repo_name": "jaypatil/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java", "license": "gpl-3.0", "size": 23661 }
[ "java.util.concurrent.TimeUnit" ]
import java.util.concurrent.TimeUnit;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
1,218,694
private void compareNodeInstWithArcMakeExport(NodeInst ni, ArcInst ai, Map<Long,List<PolyConnection>> overlapMap, GatherNetworksVisitor gatherNetworks) { // get the polygon and layer that needs to connect Poly arcPoly = null; Poly [] arcPolys = ai.getProto().getTechnology().getShapeOfArc(ai); int aTot = arcPolys.length; for(int i=0; i<aTot; i++) { arcPoly = arcPolys[i]; Layer arcLayer = arcPoly.getLayer(); Layer.Function arcLayerFun = arcLayer.getFunction(); if (arcLayerFun.isMetal() || arcLayerFun.isDiff() || arcLayerFun.isPoly()) break; arcPoly = null; } if (arcPoly == null) return; // this is the arc ai at the top level int netID = gatherNetworks.getGlobalNetworkID(VarContext.globalContext, ai.getHeadPortInst()); SubPolygon sp2 = new SubPolygon(arcPoly, VarContext.globalContext, netID, ai, null); // look for geometry inside the cell that touches the arc, and make an export so it can connect ArcTouchVisitor atv = new ArcTouchVisitor(ai, arcPoly, ni, false, gatherNetworks); HierarchyEnumerator.enumerateCell(ni.getParent(), VarContext.globalContext, atv); SubPolygon sp = atv.getExportDrillLocation(); if (sp != null) { registerPoly(overlapMap, new PolyConnection(sp, sp2)); //makeExportDrill((NodeInst)sp.theObj, sp.poly.getPort(), sp.context, null, null); return; } // try arcs atv.setDoArcs(true); HierarchyEnumerator.enumerateCell(ni.getParent(), VarContext.globalContext, atv); sp = atv.getExportDrillLocation(); if (sp != null) { registerPoly(overlapMap, new PolyConnection(sp, sp2)); // // get arc transformed to top-level // ArcInst breakArc = (ArcInst)sp.theObj; // if (!breakArc.isLinked()) return; // Point2D head = breakArc.getHeadLocation(); // head = new Point2D.Double(head.getX(), head.getY()); // Point2D tail = breakArc.getTailLocation(); // tail = new Point2D.Double(tail.getX(), tail.getY()); // sp.xfToTop.transform(head, head); // sp.xfToTop.transform(tail, tail); // int angle = DBMath.figureAngle(head, tail); // // // find where it intersects the top-level arc // Point2D breakPt = null; // if (angle%1800 == ai.getAngle()%1800) // { // if (DBMath.distToLine(head, tail, ai.getHeadLocation()) < // DBMath.distToLine(head, tail, ai.getTailLocation())) // { // breakPt = DBMath.intersect(head, angle, ai.getHeadLocation(), (ai.getAngle()+900)%3600); // } else // { // breakPt = DBMath.intersect(head, angle, ai.getTailLocation(), (ai.getAngle()+900)%3600); // } // } else // { // breakPt = DBMath.intersect(head, angle, ai.getHeadLocation(), ai.getAngle()); // } // if (breakPt == null) return; // // // transform the intersection point back down into low-level // try // { // sp.xfToTop.inverseTransform(breakPt, breakPt); // } catch (NoninvertibleTransformException e) { return; } // // // break the arc at that point // PrimitiveNode pinType = breakArc.getProto().findPinProto(); // NodeInst pin = NodeInst.newInstance(pinType, breakPt, pinType.getDefaultLambdaBaseWidth(ep), // pinType.getDefaultLambdaBaseHeight(ep), breakArc.getParent()); // if (pin == null) return; // // PortInst pi = pin.getOnlyPortInst(); // PortInst headPort = breakArc.getHeadPortInst(); // PortInst tailPort = breakArc.getTailPortInst(); // Point2D headPt = breakArc.getHeadLocation(); // Point2D tailPt = breakArc.getTailLocation(); // double width = breakArc.getLambdaBaseWidth(); // String arcName = breakArc.getName(); // // // create the new arcs // ArcInst newAi1 = ArcInst.makeInstanceBase(breakArc.getProto(), width, headPort, pi, headPt, breakPt, null); // ArcInst newAi2 = ArcInst.makeInstanceBase(breakArc.getProto(), width, pi, tailPort, breakPt, tailPt, null); // if (newAi1 == null || newAi2 == null) return; // newAi1.setHeadNegated(breakArc.isHeadNegated()); // newAi1.setHeadExtended(breakArc.isHeadExtended()); // newAi1.setHeadArrowed(breakArc.isHeadArrowed()); // newAi2.setTailNegated(breakArc.isTailNegated()); // newAi2.setTailExtended(breakArc.isTailExtended()); // newAi2.setTailArrowed(breakArc.isTailArrowed()); // breakArc.kill(); // if (arcName != null) // { // if (headPt.distance(breakPt) > tailPt.distance(breakPt)) // { // newAi1.setName(arcName); // newAi1.copyTextDescriptorFrom(breakArc, ArcInst.ARC_NAME); // } else // { // newAi2.setName(arcName); // newAi2.copyTextDescriptorFrom(breakArc, ArcInst.ARC_NAME); // } // } // // // now drill the break pin to the top // makeExportDrill(pin, pi.getPortProto(), sp.context, null, null); } }
void function(NodeInst ni, ArcInst ai, Map<Long,List<PolyConnection>> overlapMap, GatherNetworksVisitor gatherNetworks) { Poly arcPoly = null; Poly [] arcPolys = ai.getProto().getTechnology().getShapeOfArc(ai); int aTot = arcPolys.length; for(int i=0; i<aTot; i++) { arcPoly = arcPolys[i]; Layer arcLayer = arcPoly.getLayer(); Layer.Function arcLayerFun = arcLayer.getFunction(); if (arcLayerFun.isMetal() arcLayerFun.isDiff() arcLayerFun.isPoly()) break; arcPoly = null; } if (arcPoly == null) return; int netID = gatherNetworks.getGlobalNetworkID(VarContext.globalContext, ai.getHeadPortInst()); SubPolygon sp2 = new SubPolygon(arcPoly, VarContext.globalContext, netID, ai, null); ArcTouchVisitor atv = new ArcTouchVisitor(ai, arcPoly, ni, false, gatherNetworks); HierarchyEnumerator.enumerateCell(ni.getParent(), VarContext.globalContext, atv); SubPolygon sp = atv.getExportDrillLocation(); if (sp != null) { registerPoly(overlapMap, new PolyConnection(sp, sp2)); return; } atv.setDoArcs(true); HierarchyEnumerator.enumerateCell(ni.getParent(), VarContext.globalContext, atv); sp = atv.getExportDrillLocation(); if (sp != null) { registerPoly(overlapMap, new PolyConnection(sp, sp2)); } }
/** * Method to compare a node instance and an arc to see if they touch and should be connected and an export created. * @param ni the NodeInst to compare. * @param ai the ArcInst to compare. */
Method to compare a node instance and an arc to see if they touch and should be connected and an export created
compareNodeInstWithArcMakeExport
{ "repo_name": "imr/Electric8", "path": "com/sun/electric/tool/routing/AutoStitch.java", "license": "gpl-3.0", "size": 116435 }
[ "com.sun.electric.database.geometry.Poly", "com.sun.electric.database.hierarchy.HierarchyEnumerator", "com.sun.electric.database.topology.ArcInst", "com.sun.electric.database.topology.NodeInst", "com.sun.electric.database.variable.VarContext", "com.sun.electric.technology.Layer", "java.util.List", "java.util.Map" ]
import com.sun.electric.database.geometry.Poly; import com.sun.electric.database.hierarchy.HierarchyEnumerator; import com.sun.electric.database.topology.ArcInst; import com.sun.electric.database.topology.NodeInst; import com.sun.electric.database.variable.VarContext; import com.sun.electric.technology.Layer; import java.util.List; import java.util.Map;
import com.sun.electric.database.geometry.*; import com.sun.electric.database.hierarchy.*; import com.sun.electric.database.topology.*; import com.sun.electric.database.variable.*; import com.sun.electric.technology.*; import java.util.*;
[ "com.sun.electric", "java.util" ]
com.sun.electric; java.util;
293,101
public Column getIsConnectedColumn() { ColumnDescription columndesc = new ColumnDescription(ManagerColumn.ISCONNECTED.columnName(), "getIsConnectedColumn", VersionNum.VERSION110); return (Column) super.getColumnHandler(columndesc); }
Column function() { ColumnDescription columndesc = new ColumnDescription(ManagerColumn.ISCONNECTED.columnName(), STR, VersionNum.VERSION110); return (Column) super.getColumnHandler(columndesc); }
/** * Get the Column entity which column name is "is_connected" from the Row * entity of attributes. * @return the Column entity which column name is "is_connected" */
Get the Column entity which column name is "is_connected" from the Row entity of attributes
getIsConnectedColumn
{ "repo_name": "kuangrewawa/OnosFw", "path": "ovsdb/rfc/src/main/java/org/onosproject/ovsdb/rfc/table/Manager.java", "license": "apache-2.0", "size": 10757 }
[ "org.onosproject.ovsdb.rfc.notation.Column", "org.onosproject.ovsdb.rfc.tableservice.ColumnDescription" ]
import org.onosproject.ovsdb.rfc.notation.Column; import org.onosproject.ovsdb.rfc.tableservice.ColumnDescription;
import org.onosproject.ovsdb.rfc.notation.*; import org.onosproject.ovsdb.rfc.tableservice.*;
[ "org.onosproject.ovsdb" ]
org.onosproject.ovsdb;
2,509,373
@Test public void testConstructor() { final List<Element> elements = Collections .singletonList((Element) new BooleanElement("1", false)); final RootDocument element = new RootDocument(elements); assertEquals(elements, element.getElements()); }
void function() { final List<Element> elements = Collections .singletonList((Element) new BooleanElement("1", false)); final RootDocument element = new RootDocument(elements); assertEquals(elements, element.getElements()); }
/** * Test method for {@link RootDocument#RootDocument(java.util.List)} . */
Test method for <code>RootDocument#RootDocument(java.util.List)</code>
testConstructor
{ "repo_name": "allanbank/mongodb-async-driver", "path": "src/test/java/com/allanbank/mongodb/bson/impl/RootDocumentTest.java", "license": "apache-2.0", "size": 19399 }
[ "com.allanbank.mongodb.bson.Element", "com.allanbank.mongodb.bson.element.BooleanElement", "java.util.Collections", "java.util.List", "org.junit.Assert" ]
import com.allanbank.mongodb.bson.Element; import com.allanbank.mongodb.bson.element.BooleanElement; import java.util.Collections; import java.util.List; import org.junit.Assert;
import com.allanbank.mongodb.bson.*; import com.allanbank.mongodb.bson.element.*; import java.util.*; import org.junit.*;
[ "com.allanbank.mongodb", "java.util", "org.junit" ]
com.allanbank.mongodb; java.util; org.junit;
2,074,034
@Test public void testMultipleFilesSmallerThanOneBlock() throws Exception { Configuration conf = new HdfsConfiguration(); final int BLOCK_SIZE = 6 * 1024; conf.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE); conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true); // Make it relinquish locks. When run serially, the result should // be identical. conf.setInt(DFSConfigKeys.DFS_CONTENT_SUMMARY_LIMIT_KEY, 2); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build(); cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); DFSAdmin admin = new DFSAdmin(conf); final String nnAddr = conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY); final String webhdfsuri = WebHdfsFileSystem.SCHEME + "://" + nnAddr; System.out.println("webhdfsuri=" + webhdfsuri); final FileSystem webhdfs = new Path(webhdfsuri).getFileSystem(conf); try { Path dir = new Path("/test"); boolean exceededQuota = false; ContentSummary c; // 1kb file // 6kb block // 192kb quota final int FILE_SIZE = 1024; final int QUOTA_SIZE = 32 * (int) fs.getDefaultBlockSize(dir); assertEquals(6 * 1024, fs.getDefaultBlockSize(dir)); assertEquals(192 * 1024, QUOTA_SIZE); // Create the dir and set the quota. We need to enable the quota before // writing the files as setting the quota afterwards will over-write // the cached disk space used for quota verification with the actual // amount used as calculated by INode#spaceConsumedInTree. assertTrue(fs.mkdirs(dir)); runCommand(admin, false, "-setSpaceQuota", Integer.toString(QUOTA_SIZE), dir.toString()); // We can create at most 59 files because block allocation is // conservative and initially assumes a full block is used, so we // need to leave at least 3 * BLOCK_SIZE free space when allocating // the last block: (58 * 3 * 1024) (3 * 6 * 1024) = 192kb for (int i = 0; i < 59; i++) { Path file = new Path("/test/test"+i); DFSTestUtil.createFile(fs, file, FILE_SIZE, (short) 3, 1L); DFSTestUtil.waitReplication(fs, file, (short) 3); } // Should account for all 59 files (almost QUOTA_SIZE) c = fs.getContentSummary(dir); checkContentSummary(c, webhdfs.getContentSummary(dir)); assertEquals("Invalid space consumed", 59 * FILE_SIZE * 3, c.getSpaceConsumed()); assertEquals("Invalid space consumed", QUOTA_SIZE - (59 * FILE_SIZE * 3), 3 * (fs.getDefaultBlockSize(dir) - FILE_SIZE)); // Now check that trying to create another file violates the quota try { Path file = new Path("/test/test59"); DFSTestUtil.createFile(fs, file, FILE_SIZE, (short) 3, 1L); DFSTestUtil.waitReplication(fs, file, (short) 3); } catch (QuotaExceededException e) { exceededQuota = true; } assertTrue("Quota not exceeded", exceededQuota); assertEquals(2, cluster.getNamesystem().getFSDirectory().getYieldCount()); } finally { cluster.shutdown(); } }
void function() throws Exception { Configuration conf = new HdfsConfiguration(); final int BLOCK_SIZE = 6 * 1024; conf.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE); conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true); conf.setInt(DFSConfigKeys.DFS_CONTENT_SUMMARY_LIMIT_KEY, 2); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build(); cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); DFSAdmin admin = new DFSAdmin(conf); final String nnAddr = conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY); final String webhdfsuri = WebHdfsFileSystem.SCHEME + STRwebhdfsuri=STR/testSTR-setSpaceQuotaSTR/test/testSTRInvalid space consumedSTRInvalid space consumedSTR/test/test59STRQuota not exceeded", exceededQuota); assertEquals(2, cluster.getNamesystem().getFSDirectory().getYieldCount()); } finally { cluster.shutdown(); } }
/** * Like the previous test but create many files. This covers bugs where * the quota adjustment is incorrect but it takes many files to accrue * a big enough accounting error to violate the quota. */
Like the previous test but create many files. This covers bugs where the quota adjustment is incorrect but it takes many files to accrue a big enough accounting error to violate the quota
testMultipleFilesSmallerThanOneBlock
{ "repo_name": "jsrudani/HadoopHDFSProject", "path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java", "license": "apache-2.0", "size": 36885 }
[ "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.fs.FileSystem", "org.apache.hadoop.hdfs.tools.DFSAdmin", "org.apache.hadoop.hdfs.web.WebHdfsFileSystem", "org.junit.Assert" ]
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hdfs.tools.DFSAdmin; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.junit.Assert;
import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.tools.*; import org.apache.hadoop.hdfs.web.*; import org.junit.*;
[ "org.apache.hadoop", "org.junit" ]
org.apache.hadoop; org.junit;
2,680,689
@InternalApi public com.google.bigtable.admin.v2.Cluster.State toProto() { return proto; } } @Nonnull private final com.google.bigtable.admin.v2.Cluster stateProto;
com.google.bigtable.admin.v2.Cluster.State function() { return proto; } } @Nonnull private final com.google.bigtable.admin.v2.Cluster stateProto;
/** * Creates the request protobuf. This method is considered an internal implementation detail and * not meant to be used by applications. */
Creates the request protobuf. This method is considered an internal implementation detail and not meant to be used by applications
toProto
{ "repo_name": "vam-google/google-cloud-java", "path": "google-cloud-clients/google-cloud-bigtable/src/main/java/com/google/cloud/bigtable/admin/v2/models/Cluster.java", "license": "apache-2.0", "size": 6136 }
[ "javax.annotation.Nonnull" ]
import javax.annotation.Nonnull;
import javax.annotation.*;
[ "javax.annotation" ]
javax.annotation;
2,570,812
if (StringUtils.isEmpty(str)) { return false; } String test = str.trim(); int sz = test.length(); for (int i = 0; i < sz; i++) { if(( i==0 ) && test.charAt(i)=='-') continue; if (Character.isDigit(test.charAt(i)) == false) { return false; } } return true; }
if (StringUtils.isEmpty(str)) { return false; } String test = str.trim(); int sz = test.length(); for (int i = 0; i < sz; i++) { if(( i==0 ) && test.charAt(i)=='-') continue; if (Character.isDigit(test.charAt(i)) == false) { return false; } } return true; }
/** * Test if the given string is numerical * @param str * @return */
Test if the given string is numerical
isNumeric
{ "repo_name": "condast/AieonF", "path": "Workspace/org.aieonf.commons/src/org/aieonf/commons/number/NumberUtils.java", "license": "apache-2.0", "size": 865 }
[ "org.aieonf.commons.strings.StringUtils" ]
import org.aieonf.commons.strings.StringUtils;
import org.aieonf.commons.strings.*;
[ "org.aieonf.commons" ]
org.aieonf.commons;
942,993
static @Nullable FunctionResultTemplate createResultTemplate( DataTypeFactory typeFactory, @Nullable DataTypeHint hint) { if (hint == null) { return null; } final DataTypeTemplate template; try { template = DataTypeTemplate.fromAnnotation(typeFactory, hint); } catch (Throwable t) { throw extractionError(t, "Error in data type hint annotation."); } if (template.dataType != null) { return FunctionResultTemplate.of(template.dataType); } throw extractionError( "Data type hint does not specify a data type for use as function result."); }
static @Nullable FunctionResultTemplate createResultTemplate( DataTypeFactory typeFactory, @Nullable DataTypeHint hint) { if (hint == null) { return null; } final DataTypeTemplate template; try { template = DataTypeTemplate.fromAnnotation(typeFactory, hint); } catch (Throwable t) { throw extractionError(t, STR); } if (template.dataType != null) { return FunctionResultTemplate.of(template.dataType); } throw extractionError( STR); }
/** * Creates an instance of {@link FunctionResultTemplate} from a {@link DataTypeHint}. */
Creates an instance of <code>FunctionResultTemplate</code> from a <code>DataTypeHint</code>
createResultTemplate
{ "repo_name": "darionyaphet/flink", "path": "flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/extraction/FunctionTemplate.java", "license": "apache-2.0", "size": 5865 }
[ "javax.annotation.Nullable", "org.apache.flink.table.annotation.DataTypeHint", "org.apache.flink.table.catalog.DataTypeFactory", "org.apache.flink.table.types.extraction.ExtractionUtils" ]
import javax.annotation.Nullable; import org.apache.flink.table.annotation.DataTypeHint; import org.apache.flink.table.catalog.DataTypeFactory; import org.apache.flink.table.types.extraction.ExtractionUtils;
import javax.annotation.*; import org.apache.flink.table.annotation.*; import org.apache.flink.table.catalog.*; import org.apache.flink.table.types.extraction.*;
[ "javax.annotation", "org.apache.flink" ]
javax.annotation; org.apache.flink;
2,146,809
public static Response createSeparateResponse(Request request, ResponseCode code) { Response response = new Response(code); response.setDestination(request.getSource()); response.setDestinationPort(request.getSourcePort()); response.setToken(request.getToken()); return response; }
static Response function(Request request, ResponseCode code) { Response response = new Response(code); response.setDestination(request.getSource()); response.setDestinationPort(request.getSourcePort()); response.setToken(request.getToken()); return response; }
/** * Creates a separate response with the specified response code to the * specified request. The destination address of the response is the source * address of the request. The response has the same token as the request * but needs another MID from the CoAP network stack. * * @param request the request * @param code the code * @return the response */
Creates a separate response with the specified response code to the specified request. The destination address of the response is the source address of the request. The response has the same token as the request but needs another MID from the CoAP network stack
createSeparateResponse
{ "repo_name": "tucanae47/CoAp-Android-MsgPack", "path": "app/src/main/java/org/eclipse/californium/core/coap/Response.java", "license": "mit", "size": 4445 }
[ "org.eclipse.californium.core.coap.CoAP" ]
import org.eclipse.californium.core.coap.CoAP;
import org.eclipse.californium.core.coap.*;
[ "org.eclipse.californium" ]
org.eclipse.californium;
1,206,440
public JsonObject getSomejsonobject(); // ------------------------------------------------------------------------- // FROM and INTO // -------------------------------------------------------------------------
JsonObject function();
/** * Getter for <code>somethingComposite.someJsonObject</code>. */
Getter for <code>somethingComposite.someJsonObject</code>
getSomejsonobject
{ "repo_name": "jklingsporn/vertx-jooq-async", "path": "vertx-jooq-async-generate/src/test/java/generated/classic/async/vertx/tables/interfaces/ISomethingcomposite.java", "license": "mit", "size": 2615 }
[ "io.vertx.core.json.JsonObject" ]
import io.vertx.core.json.JsonObject;
import io.vertx.core.json.*;
[ "io.vertx.core" ]
io.vertx.core;
98,889
public static void drawPicture2(Graphics2D g2) { // Draw some boats. Boat large = new Boat(100,200,225,150); Boat smallCC = new Boat(20,50,40,30); Boat tallSkinny = new Boat(20,150,20,40); Boat shortFat = new Boat(20,250,40,20); g2.setColor(Color.RED); g2.draw(large); g2.setColor(Color.GREEN); g2.draw(smallCC); g2.setColor(Color.BLUE); g2.draw(tallSkinny); g2.setColor(Color.MAGENTA); g2.draw(shortFat); Boat b1 = new Boat(100,250,50,75); g2.setColor(Color.CYAN); g2.draw(b1); // Make a black boat that's half the size, // and moved over 150 pixels in x direction Shape b2 = ShapeTransforms.scaledCopyOfLL(b1,0.5,0.5); b2 = ShapeTransforms.translatedCopyOf(b2,150,0); g2.setColor(Color.BLACK); g2.draw(b2); // Here's a boat that's 4x as big (2x the original) // and moved over 150 more pixels to right. b2 = ShapeTransforms.scaledCopyOfLL(b2,4,4); b2 = ShapeTransforms.translatedCopyOf(b2,150,0); // We'll draw this with a thicker stroke Stroke thick = new BasicStroke (4.0f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL); // for hex colors, see (e.g.) http://en.wikipedia.org/wiki/List_of_colors // #002FA7 is "International Klein Blue" according to Wikipedia // In HTML we use #, but in Java (and C/C++) its 0x Stroke orig=g2.getStroke(); g2.setStroke(thick); g2.setColor(new Color(0x002FA7)); g2.draw(b2); // Draw two boats with Chimney BoatWithChimney bc1 = new BoatWithChimney(50,350,40,75); BoatWithChimney bc2 = new BoatWithChimney(200,350,200,100); g2.draw(bc1); g2.setColor(new Color(0x8F00FF)); // Rotate the second house 45 degrees around its center. Shape bc3 = ShapeTransforms.rotatedCopyOf(bc2, Math.PI/4.0); g2.draw(bc3); // @@@ FINALLY, SIGN AND LABEL YOUR DRAWING g2.setStroke(orig); g2.setColor(Color.BLACK); g2.drawString("A bunch of Boats by Yuxiang Zhu", 20,20); }
static void function(Graphics2D g2) { Boat large = new Boat(100,200,225,150); Boat smallCC = new Boat(20,50,40,30); Boat tallSkinny = new Boat(20,150,20,40); Boat shortFat = new Boat(20,250,40,20); g2.setColor(Color.RED); g2.draw(large); g2.setColor(Color.GREEN); g2.draw(smallCC); g2.setColor(Color.BLUE); g2.draw(tallSkinny); g2.setColor(Color.MAGENTA); g2.draw(shortFat); Boat b1 = new Boat(100,250,50,75); g2.setColor(Color.CYAN); g2.draw(b1); Shape b2 = ShapeTransforms.scaledCopyOfLL(b1,0.5,0.5); b2 = ShapeTransforms.translatedCopyOf(b2,150,0); g2.setColor(Color.BLACK); g2.draw(b2); b2 = ShapeTransforms.scaledCopyOfLL(b2,4,4); b2 = ShapeTransforms.translatedCopyOf(b2,150,0); Stroke thick = new BasicStroke (4.0f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL); Stroke orig=g2.getStroke(); g2.setStroke(thick); g2.setColor(new Color(0x002FA7)); g2.draw(b2); BoatWithChimney bc1 = new BoatWithChimney(50,350,40,75); BoatWithChimney bc2 = new BoatWithChimney(200,350,200,100); g2.draw(bc1); g2.setColor(new Color(0x8F00FF)); Shape bc3 = ShapeTransforms.rotatedCopyOf(bc2, Math.PI/4.0); g2.draw(bc3); g2.setStroke(orig); g2.setColor(Color.BLACK); g2.drawString(STR, 20,20); }
/** Draw a picture with a few boats */
Draw a picture with a few boats
drawPicture2
{ "repo_name": "UCSB-CS56-W15/W15-lab04", "path": "src/edu/ucsb/cs56/w15/drawings/zhu/advanced/AllMyDrawings.java", "license": "mit", "size": 4895 }
[ "edu.ucsb.cs56.w15.drawings.utilities.ShapeTransforms", "java.awt.BasicStroke", "java.awt.Graphics2D", "java.awt.Stroke" ]
import edu.ucsb.cs56.w15.drawings.utilities.ShapeTransforms; import java.awt.BasicStroke; import java.awt.Graphics2D; import java.awt.Stroke;
import edu.ucsb.cs56.w15.drawings.utilities.*; import java.awt.*;
[ "edu.ucsb.cs56", "java.awt" ]
edu.ucsb.cs56; java.awt;
2,563,141
public List<long[]> calculateOutputShape() { List<long[]> ret = new ArrayList<>(); if (larg().getShape() != null && rarg().getShape() != null) ret.add(Shape.broadcastOutputShape(larg().getShape(), rarg().getShape())); else if(larg().getShape() != null) ret.add(larg().getShape()); return ret; }
List<long[]> function() { List<long[]> ret = new ArrayList<>(); if (larg().getShape() != null && rarg().getShape() != null) ret.add(Shape.broadcastOutputShape(larg().getShape(), rarg().getShape())); else if(larg().getShape() != null) ret.add(larg().getShape()); return ret; }
/** * Calculate the output shape for this op * * @return */
Calculate the output shape for this op
calculateOutputShape
{ "repo_name": "gagatust/nd4j", "path": "nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/ops/BaseBroadcastOp.java", "license": "apache-2.0", "size": 5642 }
[ "java.util.ArrayList", "java.util.List", "org.nd4j.linalg.api.shape.Shape" ]
import java.util.ArrayList; import java.util.List; import org.nd4j.linalg.api.shape.Shape;
import java.util.*; import org.nd4j.linalg.api.shape.*;
[ "java.util", "org.nd4j.linalg" ]
java.util; org.nd4j.linalg;
117,738
ImmutableList<SchemaOrgType> getLanguageList();
ImmutableList<SchemaOrgType> getLanguageList();
/** * Returns the value list of property language. Empty list is returned if the property not set in * current object. */
Returns the value list of property language. Empty list is returned if the property not set in current object
getLanguageList
{ "repo_name": "google/schemaorg-java", "path": "src/main/java/com/google/schemaorg/core/CommunicateAction.java", "license": "apache-2.0", "size": 10775 }
[ "com.google.common.collect.ImmutableList", "com.google.schemaorg.SchemaOrgType" ]
import com.google.common.collect.ImmutableList; import com.google.schemaorg.SchemaOrgType;
import com.google.common.collect.*; import com.google.schemaorg.*;
[ "com.google.common", "com.google.schemaorg" ]
com.google.common; com.google.schemaorg;
1,570,081
public Set<ServerName> getSplittingServersFromWALDir() throws IOException { return getServerNamesFromWALDirPath( p -> p.getName().endsWith(AbstractFSWALProvider.SPLITTING_EXT)); }
Set<ServerName> function() throws IOException { return getServerNamesFromWALDirPath( p -> p.getName().endsWith(AbstractFSWALProvider.SPLITTING_EXT)); }
/** * Get Servernames which are currently splitting; paths have a '-splitting' suffix. * @return ServerName * @throws IOException IOException */
Get Servernames which are currently splitting; paths have a '-splitting' suffix
getSplittingServersFromWALDir
{ "repo_name": "HubSpot/hbase", "path": "hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java", "license": "apache-2.0", "size": 15968 }
[ "java.io.IOException", "java.util.Set", "org.apache.hadoop.hbase.ServerName", "org.apache.hadoop.hbase.wal.AbstractFSWALProvider" ]
import java.io.IOException; import java.util.Set; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import java.io.*; import java.util.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.wal.*;
[ "java.io", "java.util", "org.apache.hadoop" ]
java.io; java.util; org.apache.hadoop;
2,475,949
@SuppressWarnings("unchecked") public static void drawMonospacedFontString(String text, float x, float y, int size, Color color) { UnicodeFont font = null; for (UnicodeFont f : monospacedFonts) { if (f.getFont().getSize() == size) { font = f; break; } } if (font == null) { try { font = new UnicodeFont(new java.awt.Font("Monospaced", 0, 1), size, false, false); font.addGlyphs(new String(Constants.acceptedCharacters)); font.getEffects().add(new ColorEffect()); font.loadGlyphs(); monospacedFonts.add(font); } catch (SlickException e) { e.printStackTrace(); } } font.drawString(x, y, text, color); }
@SuppressWarnings(STR) static void function(String text, float x, float y, int size, Color color) { UnicodeFont font = null; for (UnicodeFont f : monospacedFonts) { if (f.getFont().getSize() == size) { font = f; break; } } if (font == null) { try { font = new UnicodeFont(new java.awt.Font(STR, 0, 1), size, false, false); font.addGlyphs(new String(Constants.acceptedCharacters)); font.getEffects().add(new ColorEffect()); font.loadGlyphs(); monospacedFonts.add(font); } catch (SlickException e) { e.printStackTrace(); } } font.drawString(x, y, text, color); }
/** * This method will draw a monospaced string with the specified size, color, and location. * @param text the string to be drawn * @param x the x coordinate of the drawing location * @param y the u coordinate of the drawing location * @param size the size of the font to be drawn (any size) * @param color the color of the font to be drawn (any color) */
This method will draw a monospaced string with the specified size, color, and location
drawMonospacedFontString
{ "repo_name": "zadjii/demigods", "path": "src/util/Font.java", "license": "mit", "size": 18747 }
[ "org.newdawn.slick.Color", "org.newdawn.slick.SlickException", "org.newdawn.slick.UnicodeFont", "org.newdawn.slick.font.effects.ColorEffect" ]
import org.newdawn.slick.Color; import org.newdawn.slick.SlickException; import org.newdawn.slick.UnicodeFont; import org.newdawn.slick.font.effects.ColorEffect;
import org.newdawn.slick.*; import org.newdawn.slick.font.effects.*;
[ "org.newdawn.slick" ]
org.newdawn.slick;
120,940
private void handleWithReferrals( LdapSession session, SearchRequest req ) throws LdapException { LdapResult result = req.getResultResponse().getLdapResult(); Entry entry = null; boolean isReferral = false; boolean isparentReferral = false; DirectoryService directoryService = session.getCoreSession().getDirectoryService(); ReferralManager referralManager = directoryService.getReferralManager(); Dn reqTargetDn = req.getBase(); // MyVD doesn't care //reqTargetDn.apply( directoryService.getSchemaManager() ); // Check if the entry itself is a referral referralManager.lockRead(); try { isReferral = referralManager.isReferral( reqTargetDn ); if ( !isReferral ) { // Check if the entry has a parent which is a referral isparentReferral = referralManager.hasParentReferral( reqTargetDn ); } } finally { // Unlock the ReferralManager referralManager.unlock(); } if ( !isReferral && !isparentReferral ) { // This is not a referral and it does not have a parent which // is a referral : standard case, just deal with the request if ( IS_DEBUG ) { LOG.debug( "Entry {} is NOT a referral.", reqTargetDn ); } handleIgnoringReferrals( session, req ); return; } else { // ------------------------------------------------------------------- // Lookup Entry // ------------------------------------------------------------------- // try to lookup the entry but ignore exceptions when it does not // exist since entry may not exist but may have an ancestor that is a // referral - would rather attempt a lookup that fails then do check // for existence than have to do another lookup to get entry info try { entry = session.getCoreSession().lookup( reqTargetDn ); if ( IS_DEBUG ) { LOG.debug( "Entry for {} was found: ", reqTargetDn, entry ); } } catch ( LdapException e ) { LOG.debug( "Entry for {} not found.", reqTargetDn ); } catch ( Exception e ) { handleException( session, req, e ); return; } // ------------------------------------------------------------------- // Handle Existing Entry // ------------------------------------------------------------------- if ( entry != null ) { try { if ( IS_DEBUG ) { LOG.debug( "Entry is a referral: {}", entry ); } handleReferralEntryForSearch( session, req, entry ); return; } catch ( Exception e ) { handleException( session, req, e ); } } // ------------------------------------------------------------------- // Handle Non-existing Entry // ------------------------------------------------------------------- // if the entry is null we still have to check for a referral ancestor // also the referrals need to be adjusted based on the ancestor's ref // values to yield the correct path to the entry in the target DSAs else { // The entry is null : it has a parent referral. Entry referralAncestor = null; try { referralAncestor = getFarthestReferralAncestor( session, reqTargetDn ); } catch ( Exception e ) { handleException( session, req, e ); return; } if ( referralAncestor == null ) { result.setDiagnosticMessage( "Entry not found." ); result.setResultCode( ResultCodeEnum.NO_SUCH_OBJECT ); session.getIoSession().write( req.getResultResponse() ); return; } // if we get here then we have a valid referral ancestor try { Referral referral = getReferralOnAncestorForSearch( session, req, referralAncestor ); result.setResultCode( ResultCodeEnum.REFERRAL ); result.setReferral( referral ); session.getIoSession().write( req.getResultResponse() ); } catch ( Exception e ) { handleException( session, req, e ); } } } }
void function( LdapSession session, SearchRequest req ) throws LdapException { LdapResult result = req.getResultResponse().getLdapResult(); Entry entry = null; boolean isReferral = false; boolean isparentReferral = false; DirectoryService directoryService = session.getCoreSession().getDirectoryService(); ReferralManager referralManager = directoryService.getReferralManager(); Dn reqTargetDn = req.getBase(); referralManager.lockRead(); try { isReferral = referralManager.isReferral( reqTargetDn ); if ( !isReferral ) { isparentReferral = referralManager.hasParentReferral( reqTargetDn ); } } finally { referralManager.unlock(); } if ( !isReferral && !isparentReferral ) { if ( IS_DEBUG ) { LOG.debug( STR, reqTargetDn ); } handleIgnoringReferrals( session, req ); return; } else { try { entry = session.getCoreSession().lookup( reqTargetDn ); if ( IS_DEBUG ) { LOG.debug( STR, reqTargetDn, entry ); } } catch ( LdapException e ) { LOG.debug( STR, reqTargetDn ); } catch ( Exception e ) { handleException( session, req, e ); return; } if ( entry != null ) { try { if ( IS_DEBUG ) { LOG.debug( STR, entry ); } handleReferralEntryForSearch( session, req, entry ); return; } catch ( Exception e ) { handleException( session, req, e ); } } else { Entry referralAncestor = null; try { referralAncestor = getFarthestReferralAncestor( session, reqTargetDn ); } catch ( Exception e ) { handleException( session, req, e ); return; } if ( referralAncestor == null ) { result.setDiagnosticMessage( STR ); result.setResultCode( ResultCodeEnum.NO_SUCH_OBJECT ); session.getIoSession().write( req.getResultResponse() ); return; } try { Referral referral = getReferralOnAncestorForSearch( session, req, referralAncestor ); result.setResultCode( ResultCodeEnum.REFERRAL ); result.setReferral( referral ); session.getIoSession().write( req.getResultResponse() ); } catch ( Exception e ) { handleException( session, req, e ); } } } }
/** * Handles processing with referrals without ManageDsaIT decorator. */
Handles processing with referrals without ManageDsaIT decorator
handleWithReferrals
{ "repo_name": "TremoloSecurity/MyVirtualDirectory", "path": "server/src/main/java/org/apache/directory/server/ldap/handlers/request/SearchRequestHandler.java", "license": "apache-2.0", "size": 63370 }
[ "org.apache.directory.api.ldap.model.entry.Entry", "org.apache.directory.api.ldap.model.exception.LdapException", "org.apache.directory.api.ldap.model.message.LdapResult", "org.apache.directory.api.ldap.model.message.Referral", "org.apache.directory.api.ldap.model.message.ResultCodeEnum", "org.apache.directory.api.ldap.model.message.SearchRequest", "org.apache.directory.api.ldap.model.name.Dn", "org.apache.directory.server.core.api.DirectoryService", "org.apache.directory.server.core.api.ReferralManager", "org.apache.directory.server.ldap.LdapSession" ]
import org.apache.directory.api.ldap.model.entry.Entry; import org.apache.directory.api.ldap.model.exception.LdapException; import org.apache.directory.api.ldap.model.message.LdapResult; import org.apache.directory.api.ldap.model.message.Referral; import org.apache.directory.api.ldap.model.message.ResultCodeEnum; import org.apache.directory.api.ldap.model.message.SearchRequest; import org.apache.directory.api.ldap.model.name.Dn; import org.apache.directory.server.core.api.DirectoryService; import org.apache.directory.server.core.api.ReferralManager; import org.apache.directory.server.ldap.LdapSession;
import org.apache.directory.api.ldap.model.entry.*; import org.apache.directory.api.ldap.model.exception.*; import org.apache.directory.api.ldap.model.message.*; import org.apache.directory.api.ldap.model.name.*; import org.apache.directory.server.core.api.*; import org.apache.directory.server.ldap.*;
[ "org.apache.directory" ]
org.apache.directory;
624,541
private String determineAuthPassPhrase(final Definition def) { final String authPassPhrase = (def.getAuthPassphrase() == null ? m_config.getAuthPassphrase() : def.getAuthPassphrase()); if (authPassPhrase == null) { return SnmpAgentConfig.DEFAULT_AUTH_PASS_PHRASE; } return authPassPhrase; }
String function(final Definition def) { final String authPassPhrase = (def.getAuthPassphrase() == null ? m_config.getAuthPassphrase() : def.getAuthPassphrase()); if (authPassPhrase == null) { return SnmpAgentConfig.DEFAULT_AUTH_PASS_PHRASE; } return authPassPhrase; }
/** * Helper method to find a authentication passphrase to use from the snmp-config. If v3 has * been specified and one can't be found, then a default is used for this * is a required option for v3 operations. * @param def * @return */
Helper method to find a authentication passphrase to use from the snmp-config. If v3 has been specified and one can't be found, then a default is used for this is a required option for v3 operations
determineAuthPassPhrase
{ "repo_name": "peternixon/opennms-mirror", "path": "opennms-config/src/main/java/org/opennms/netmgt/config/SnmpPeerFactory.java", "license": "gpl-2.0", "size": 27915 }
[ "org.opennms.netmgt.config.snmp.Definition", "org.opennms.netmgt.snmp.SnmpAgentConfig" ]
import org.opennms.netmgt.config.snmp.Definition; import org.opennms.netmgt.snmp.SnmpAgentConfig;
import org.opennms.netmgt.config.snmp.*; import org.opennms.netmgt.snmp.*;
[ "org.opennms.netmgt" ]
org.opennms.netmgt;
2,269,221
protected Map<String, String> filterCamelHeadersForResponseHeaders(Map<String, Object> headers, org.apache.camel.Exchange camelExchange) { Map<String, String> answer = new HashMap<>(); for (Map.Entry<String, Object> entry : headers.entrySet()) { if (getHeaderFilterStrategy().applyFilterToCamelHeaders(entry.getKey(), entry.getValue(), camelExchange)) { continue; } // skip content-length as the simple binding with Response will set correct content-length based // on the entity set as the Response if ("content-length".equalsIgnoreCase(entry.getKey())) { continue; } answer.put(entry.getKey(), entry.getValue().toString()); } return answer; }
Map<String, String> function(Map<String, Object> headers, org.apache.camel.Exchange camelExchange) { Map<String, String> answer = new HashMap<>(); for (Map.Entry<String, Object> entry : headers.entrySet()) { if (getHeaderFilterStrategy().applyFilterToCamelHeaders(entry.getKey(), entry.getValue(), camelExchange)) { continue; } if (STR.equalsIgnoreCase(entry.getKey())) { continue; } answer.put(entry.getKey(), entry.getValue().toString()); } return answer; }
/** * Filters the response headers that will be sent back to the client. * <p /> * The {@link DefaultCxfRsBinding} doesn't filter the response headers according to the {@link HeaderFilterStrategy}, * so we handle this task in this binding. */
Filters the response headers that will be sent back to the client. The <code>DefaultCxfRsBinding</code> doesn't filter the response headers according to the <code>HeaderFilterStrategy</code>, so we handle this task in this binding
filterCamelHeadersForResponseHeaders
{ "repo_name": "kevinearls/camel", "path": "components/camel-cxf/src/main/java/org/apache/camel/component/cxf/jaxrs/SimpleCxfRsBinding.java", "license": "apache-2.0", "size": 16789 }
[ "java.util.HashMap", "java.util.Map", "org.apache.cxf.message.Exchange" ]
import java.util.HashMap; import java.util.Map; import org.apache.cxf.message.Exchange;
import java.util.*; import org.apache.cxf.message.*;
[ "java.util", "org.apache.cxf" ]
java.util; org.apache.cxf;
1,602,234
@Override protected void setUp() { DlimTestUtils.cleanTestOutputDirectories(); root = DlimPackage.eINSTANCE.getDlimFactory().createSequence(); }
void function() { DlimTestUtils.cleanTestOutputDirectories(); root = DlimPackage.eINSTANCE.getDlimFactory().createSequence(); }
/** * Sets up the test. Creates model to extract into. Cleans output * directories. */
Sets up the test. Creates model to extract into. Cleans output directories
setUp
{ "repo_name": "joakimkistowski/LIMBO", "path": "tools.descartes.dlim.generator.tests/src/tools/descartes/dlim/extractor/tests/ModelExtractorTest.java", "license": "epl-1.0", "size": 4710 }
[ "tools.descartes.dlim.DlimPackage", "tools.descartes.dlim.tests.DlimTestUtils" ]
import tools.descartes.dlim.DlimPackage; import tools.descartes.dlim.tests.DlimTestUtils;
import tools.descartes.dlim.*; import tools.descartes.dlim.tests.*;
[ "tools.descartes.dlim" ]
tools.descartes.dlim;
423,220
public static IPv4NeighborAddressSubTlv read(ChannelBuffer c) { return IPv4NeighborAddressSubTlv.of(c.readInt()); }
static IPv4NeighborAddressSubTlv function(ChannelBuffer c) { return IPv4NeighborAddressSubTlv.of(c.readInt()); }
/** * Reads the channel buffer and returns object of IPv4-Neighbor-Address-Tlv. * * @param c input channel buffer * @return object of IPv4-Neighbor-Address-Tlv */
Reads the channel buffer and returns object of IPv4-Neighbor-Address-Tlv
read
{ "repo_name": "donNewtonAlpha/onos", "path": "protocols/pcep/pcepio/src/main/java/org/onosproject/pcepio/types/IPv4NeighborAddressSubTlv.java", "license": "apache-2.0", "size": 3971 }
[ "org.jboss.netty.buffer.ChannelBuffer" ]
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.*;
[ "org.jboss.netty" ]
org.jboss.netty;
961,874
public void setActivateOnItemClick(boolean activateOnItemClick) { // When setting CHOICE_MODE_SINGLE, ListView will automatically // give items the 'activated' state when touched. getListView().setChoiceMode(activateOnItemClick ? ListView.CHOICE_MODE_SINGLE : ListView.CHOICE_MODE_NONE); }
void function(boolean activateOnItemClick) { getListView().setChoiceMode(activateOnItemClick ? ListView.CHOICE_MODE_SINGLE : ListView.CHOICE_MODE_NONE); }
/** * Turns on activate-on-click mode. When this mode is on, list items will be * given the 'activated' state when touched. */
Turns on activate-on-click mode. When this mode is on, list items will be given the 'activated' state when touched
setActivateOnItemClick
{ "repo_name": "egore/smsmanager", "path": "SMSManager/src/main/java/de/egore911/smsmanager/MessageListFragment.java", "license": "mit", "size": 6937 }
[ "android.widget.ListView" ]
import android.widget.ListView;
import android.widget.*;
[ "android.widget" ]
android.widget;
1,190,864
JSONObject getPublishedFiles(MimeType mt);
JSONObject getPublishedFiles(MimeType mt);
/** * Accesses pending file data. * * @param mt Class for determining MIME types. * @return pending files data. */
Accesses pending file data
getPublishedFiles
{ "repo_name": "adamfisk/littleshoot-client", "path": "client/services/src/main/java/org/lastbamboo/client/services/PublishedFilesTracker.java", "license": "gpl-2.0", "size": 699 }
[ "org.json.JSONObject", "org.littleshoot.util.MimeType" ]
import org.json.JSONObject; import org.littleshoot.util.MimeType;
import org.json.*; import org.littleshoot.util.*;
[ "org.json", "org.littleshoot.util" ]
org.json; org.littleshoot.util;
1,882,687
public Type inOnly(String uri) { return to(ExchangePattern.InOnly, uri); }
Type function(String uri) { return to(ExchangePattern.InOnly, uri); }
/** * Sends the message to the given endpoint using an * <a href="http://camel.apache.org/event-message.html">Event Message</a> or * <a href="http://camel.apache.org/exchange-pattern.html">InOnly exchange pattern</a> * <p/> * Notice the existing MEP is restored after the message has been sent to the given endpoint. * * @param uri The endpoint uri which is used for sending the exchange * @return the builder */
Sends the message to the given endpoint using an Event Message or InOnly exchange pattern Notice the existing MEP is restored after the message has been sent to the given endpoint
inOnly
{ "repo_name": "grange74/camel", "path": "camel-core/src/main/java/org/apache/camel/model/ProcessorDefinition.java", "license": "apache-2.0", "size": 152533 }
[ "org.apache.camel.ExchangePattern" ]
import org.apache.camel.ExchangePattern;
import org.apache.camel.*;
[ "org.apache.camel" ]
org.apache.camel;
1,797,693
private RowIntf convertRow(RowIntf value, int dimOrdinalMax) { Object[] record = new Object[value.size()]; String memberString; for (int i = 0; i < dimColEvaluatorInfoList.size(); i++) { DimColumnResolvedFilterInfo dimColumnEvaluatorInfo = dimColEvaluatorInfoList.get(i); int index = dimColumnEvaluatorInfo.getDimension().getOrdinal(); // if filter dimension is not present in the current add its default value if (!isDimensionPresentInCurrentBlock[i]) { // fill default value here record[index] = getDimensionDefaultValue(dimColumnEvaluatorInfo); // already set value, so continue to set next dimension continue; } if (!dimColumnEvaluatorInfo.getDimension().getDataType().isComplexType()) { if (!dimColumnEvaluatorInfo.isDimensionExistsInCurrentSilce()) { record[index] = dimColumnEvaluatorInfo.getDimension().getDefaultValue(); } byte[] memberBytes = (byte[]) value.getVal(index); if (null != memberBytes) { if (Arrays.equals(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, memberBytes)) { memberBytes = null; } else if (memberBytes.length == 0) { memberBytes = null; } record[index] = DataTypeUtil.getDataBasedOnDataTypeForNoDictionaryColumn(memberBytes, dimColumnEvaluatorInfo.getDimension().getDataType()); } } else { // complex record[index] = value.getVal(index); } } for (int i = 0; i < msrColEvalutorInfoList.size(); i++) { MeasureColumnResolvedFilterInfo msrColumnEvalutorInfo = msrColEvalutorInfoList.get(i); int index = msrColumnEvalutorInfo.getMeasure().getOrdinal() + dimOrdinalMax; // add default value for the measure in case filter measure is not present // in the current block measure list if (!isMeasurePresentInCurrentBlock[i]) { byte[] defaultValue = msrColumnEvalutorInfo.getCarbonColumn().getDefaultValue(); record[index] = RestructureUtil.getMeasureDefaultValue( msrColumnEvalutorInfo.getCarbonColumn().getColumnSchema(), defaultValue); // already set value, so continue to set next measure continue; } // measure record[index] = value.getVal(index); } RowIntf row = new RowImpl(); row.setValues(record); return row; }
RowIntf function(RowIntf value, int dimOrdinalMax) { Object[] record = new Object[value.size()]; String memberString; for (int i = 0; i < dimColEvaluatorInfoList.size(); i++) { DimColumnResolvedFilterInfo dimColumnEvaluatorInfo = dimColEvaluatorInfoList.get(i); int index = dimColumnEvaluatorInfo.getDimension().getOrdinal(); if (!isDimensionPresentInCurrentBlock[i]) { record[index] = getDimensionDefaultValue(dimColumnEvaluatorInfo); continue; } if (!dimColumnEvaluatorInfo.getDimension().getDataType().isComplexType()) { if (!dimColumnEvaluatorInfo.isDimensionExistsInCurrentSilce()) { record[index] = dimColumnEvaluatorInfo.getDimension().getDefaultValue(); } byte[] memberBytes = (byte[]) value.getVal(index); if (null != memberBytes) { if (Arrays.equals(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, memberBytes)) { memberBytes = null; } else if (memberBytes.length == 0) { memberBytes = null; } record[index] = DataTypeUtil.getDataBasedOnDataTypeForNoDictionaryColumn(memberBytes, dimColumnEvaluatorInfo.getDimension().getDataType()); } } else { record[index] = value.getVal(index); } } for (int i = 0; i < msrColEvalutorInfoList.size(); i++) { MeasureColumnResolvedFilterInfo msrColumnEvalutorInfo = msrColEvalutorInfoList.get(i); int index = msrColumnEvalutorInfo.getMeasure().getOrdinal() + dimOrdinalMax; if (!isMeasurePresentInCurrentBlock[i]) { byte[] defaultValue = msrColumnEvalutorInfo.getCarbonColumn().getDefaultValue(); record[index] = RestructureUtil.getMeasureDefaultValue( msrColumnEvalutorInfo.getCarbonColumn().getColumnSchema(), defaultValue); continue; } record[index] = value.getVal(index); } RowIntf row = new RowImpl(); row.setValues(record); return row; }
/** * convert encoded row to actual value row for filter to evaluate expression * @param value this row will be converted to actual value * @param dimOrdinalMax for measure column, its index in row = dimOrdinalMax + its ordinal * @return actual value row */
convert encoded row to actual value row for filter to evaluate expression
convertRow
{ "repo_name": "jackylk/incubator-carbondata", "path": "core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java", "license": "apache-2.0", "size": 22649 }
[ "java.util.Arrays", "org.apache.carbondata.core.constants.CarbonCommonConstants", "org.apache.carbondata.core.scan.executor.util.RestructureUtil", "org.apache.carbondata.core.scan.filter.intf.RowImpl", "org.apache.carbondata.core.scan.filter.intf.RowIntf", "org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo", "org.apache.carbondata.core.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo", "org.apache.carbondata.core.util.DataTypeUtil" ]
import java.util.Arrays; import org.apache.carbondata.core.constants.CarbonCommonConstants; import org.apache.carbondata.core.scan.executor.util.RestructureUtil; import org.apache.carbondata.core.scan.filter.intf.RowImpl; import org.apache.carbondata.core.scan.filter.intf.RowIntf; import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo; import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.MeasureColumnResolvedFilterInfo; import org.apache.carbondata.core.util.DataTypeUtil;
import java.util.*; import org.apache.carbondata.core.constants.*; import org.apache.carbondata.core.scan.executor.util.*; import org.apache.carbondata.core.scan.filter.intf.*; import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.*; import org.apache.carbondata.core.util.*;
[ "java.util", "org.apache.carbondata" ]
java.util; org.apache.carbondata;
1,856,108
public TemplateModelEvent adapter(EventObject ieo) throws ActionException { if (ieo instanceof TemplateModelEvent) { return (TemplateModelEvent) ieo; } else if (ieo instanceof FileSystemEvent) { Map<String, Object> map = new HashMap<String, Object>(); final File file = ((FileSystemEvent) ieo).getSource() .getAbsoluteFile(); map.put(FILE_EVENT_PARENTFILE_KEY, file.getParent()); map.put(FILE_EVENT_NAMEFILE_KEY, FilenameUtils.getBaseName(file.getName())); map.put(FILE_EVENT_EXTENSION_KEY, FilenameUtils.getExtension(file.getName())); return new TemplateModelEvent(map); } else { try { return new TemplateModelEvent(processor.wrapRoot(ieo.getSource())); } catch (NullPointerException npe) { // NullPointerException - if tm is null if (LOGGER.isErrorEnabled()) LOGGER.error("The passed event object is null"); } catch (TemplateModelException tme) { // TemplateModelException - if defined objectWrapper can't wrap // the passed object if (LOGGER.isErrorEnabled()) LOGGER.error("Default wrapper can't wrap the passed object"); } } return null; }
TemplateModelEvent function(EventObject ieo) throws ActionException { if (ieo instanceof TemplateModelEvent) { return (TemplateModelEvent) ieo; } else if (ieo instanceof FileSystemEvent) { Map<String, Object> map = new HashMap<String, Object>(); final File file = ((FileSystemEvent) ieo).getSource() .getAbsoluteFile(); map.put(FILE_EVENT_PARENTFILE_KEY, file.getParent()); map.put(FILE_EVENT_NAMEFILE_KEY, FilenameUtils.getBaseName(file.getName())); map.put(FILE_EVENT_EXTENSION_KEY, FilenameUtils.getExtension(file.getName())); return new TemplateModelEvent(map); } else { try { return new TemplateModelEvent(processor.wrapRoot(ieo.getSource())); } catch (NullPointerException npe) { if (LOGGER.isErrorEnabled()) LOGGER.error(STR); } catch (TemplateModelException tme) { if (LOGGER.isErrorEnabled()) LOGGER.error(STR); } } return null; }
/** * Act as a Gateway interface (EIP):<br> * Try to adapt the effective input EventObject to the expected input a * TemplateDataModel * * @param ieo * The Event Object to test or to transform * @return Adapted data model or null if event cannot be adapted */
Act as a Gateway interface (EIP): Try to adapt the effective input EventObject to the expected input a TemplateDataModel
adapter
{ "repo_name": "xandros6/geobatch", "path": "src/actions/freemarker/src/main/java/it/geosolutions/geobatch/actions/freemarker/FreeMarkerAction.java", "license": "gpl-3.0", "size": 15278 }
[ "freemarker.template.TemplateModelException", "it.geosolutions.filesystemmonitor.monitor.FileSystemEvent", "it.geosolutions.geobatch.flow.event.action.ActionException", "java.io.File", "java.util.EventObject", "java.util.HashMap", "java.util.Map", "org.apache.commons.io.FilenameUtils" ]
import freemarker.template.TemplateModelException; import it.geosolutions.filesystemmonitor.monitor.FileSystemEvent; import it.geosolutions.geobatch.flow.event.action.ActionException; import java.io.File; import java.util.EventObject; import java.util.HashMap; import java.util.Map; import org.apache.commons.io.FilenameUtils;
import freemarker.template.*; import it.geosolutions.filesystemmonitor.monitor.*; import it.geosolutions.geobatch.flow.event.action.*; import java.io.*; import java.util.*; import org.apache.commons.io.*;
[ "freemarker.template", "it.geosolutions.filesystemmonitor", "it.geosolutions.geobatch", "java.io", "java.util", "org.apache.commons" ]
freemarker.template; it.geosolutions.filesystemmonitor; it.geosolutions.geobatch; java.io; java.util; org.apache.commons;
145,931
public static void close(Closeable... closeables) { for (Closeable closeable : closeables) { close(closeable); } } private static final int DEFAULT_BUFFER_SIZE = 1024 * 4; private static final String DEFAULT_DELIMITER = ",";
static void function(Closeable... closeables) { for (Closeable closeable : closeables) { close(closeable); } } private static final int DEFAULT_BUFFER_SIZE = 1024 * 4; private static final String DEFAULT_DELIMITER = ",";
/** * Closes the given resources if they are available. * * @param closeables the objects to close */
Closes the given resources if they are available
close
{ "repo_name": "MeiSheng/aries", "path": "blueprint/blueprint-cm/src/test/java/org/apache/aries/blueprint/compendium/cm/Helper.java", "license": "apache-2.0", "size": 31088 }
[ "java.io.Closeable" ]
import java.io.Closeable;
import java.io.*;
[ "java.io" ]
java.io;
2,567,675
public List<Status> getHomeTimeline() throws TwitterException;
List<Status> function() throws TwitterException;
/** * Devuelve los 20 (o numero definido por maxResults) Status mas recientes posteados por el usuario y sus amigos, incluyendo retweets. * @return Lista de los 20 (o numero definido por maxResults) Status mas recientes del timeline * @throws TwitterException Si no se ha podido obtener el timeline (P.E: el usuario no esta logueado) */
Devuelve los 20 (o numero definido por maxResults) Status mas recientes posteados por el usuario y sus amigos, incluyendo retweets
getHomeTimeline
{ "repo_name": "AlejandroVera/ProjectTwitter", "path": "src/interfacesComunes/Twitter.java", "license": "apache-2.0", "size": 10902 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
781,824
public final JsArray<Touch> getTargetTouches() { return DOMImpl.impl.getTargetTouches(this); }
final JsArray<Touch> function() { return DOMImpl.impl.getTargetTouches(this); }
/** * Get an array of touches which have changed since the last touch event. * * @return array of touches which have changed since the last touch event */
Get an array of touches which have changed since the last touch event
getTargetTouches
{ "repo_name": "syntelos/gwtcc", "path": "src/com/google/gwt/dom/client/NativeEvent.java", "license": "apache-2.0", "size": 7829 }
[ "com.google.gwt.core.client.JsArray" ]
import com.google.gwt.core.client.JsArray;
import com.google.gwt.core.client.*;
[ "com.google.gwt" ]
com.google.gwt;
1,376,931
public void genericEncryptTest(String encryptAlgForBuilder, String builderId, String decryptAlgForRS, String appName, String[] msgs, List<NameValuePair> parms) throws Exception { Log.info(thisClass, _testName, "********************************************************************************************************************"); Log.info(thisClass, _testName, "******** Testing with Jwt builder using encryption algorithm: " + encryptAlgForBuilder + " and RS using encryption algorithm: " + decryptAlgForRS + " ********"); Log.info(thisClass, _testName, "********************************************************************************************************************"); TestSettings updatedTestSettings = rsTools.updateRSProtectedResource(testSettings, appName); String jwtToken = createTokenWithSubject(builderId, parms); if (msgs == null) { if (encryptAlgForBuilder.equals(decryptAlgForRS) || Constants.SIGALG_NONE.equals(encryptAlgForBuilder)) { positiveTest(updatedTestSettings, jwtToken); } else { negativeTest(updatedTestSettings, jwtToken, new String[] { MessageConstants.CWWKS1737E_JWT_VALIDATION_FAILURE }); } } else { negativeTest(updatedTestSettings, jwtToken, msgs); } }
void function(String encryptAlgForBuilder, String builderId, String decryptAlgForRS, String appName, String[] msgs, List<NameValuePair> parms) throws Exception { Log.info(thisClass, _testName, STR); Log.info(thisClass, _testName, STR + encryptAlgForBuilder + STR + decryptAlgForRS + STR); Log.info(thisClass, _testName, STR); TestSettings updatedTestSettings = rsTools.updateRSProtectedResource(testSettings, appName); String jwtToken = createTokenWithSubject(builderId, parms); if (msgs == null) { if (encryptAlgForBuilder.equals(decryptAlgForRS) Constants.SIGALG_NONE.equals(encryptAlgForBuilder)) { positiveTest(updatedTestSettings, jwtToken); } else { negativeTest(updatedTestSettings, jwtToken, new String[] { MessageConstants.CWWKS1737E_JWT_VALIDATION_FAILURE }); } } else { negativeTest(updatedTestSettings, jwtToken, msgs); } }
/** * Generic method to create a JWT and use it to try to access a protected app * * @param encryptAlgForBuilder * - the encryption algorithm used by the builder * @param builderId * - the builderId to use to build the JWT * @param decryptAlgForRS * - the encryption algorithm used by the RS server * @param appName * - the app to invoke * @param msgs * - error messages that should searched for in the server side logs * @param parms * - additional parms to use when building the token * @throws Exception */
Generic method to create a JWT and use it to try to access a protected app
genericEncryptTest
{ "repo_name": "OpenLiberty/open-liberty", "path": "dev/com.ibm.ws.security.oidc.server_fat.jaxrs.config/fat/src/com/ibm/ws/security/openidconnect/server/fat/jaxrs/config/noOP/NoOPEncryptionRSServerTests.java", "license": "epl-1.0", "size": 53199 }
[ "com.gargoylesoftware.htmlunit.util.NameValuePair", "com.ibm.websphere.simplicity.log.Log", "com.ibm.ws.security.oauth_oidc.fat.commonTest.Constants", "com.ibm.ws.security.oauth_oidc.fat.commonTest.MessageConstants", "com.ibm.ws.security.oauth_oidc.fat.commonTest.TestSettings", "java.util.List" ]
import com.gargoylesoftware.htmlunit.util.NameValuePair; import com.ibm.websphere.simplicity.log.Log; import com.ibm.ws.security.oauth_oidc.fat.commonTest.Constants; import com.ibm.ws.security.oauth_oidc.fat.commonTest.MessageConstants; import com.ibm.ws.security.oauth_oidc.fat.commonTest.TestSettings; import java.util.List;
import com.gargoylesoftware.htmlunit.util.*; import com.ibm.websphere.simplicity.log.*; import com.ibm.ws.security.oauth_oidc.fat.*; import java.util.*;
[ "com.gargoylesoftware.htmlunit", "com.ibm.websphere", "com.ibm.ws", "java.util" ]
com.gargoylesoftware.htmlunit; com.ibm.websphere; com.ibm.ws; java.util;
1,943,312
private JavaRDD<BoxData> partitionBoxes(final JavaSparkContext sparkContext, final boolean excludeAlreadyRenderedBoxes) throws IOException, URISyntaxException { final JavaRDD<Double> zValuesRdd = sparkContext.parallelize(zValues); final JavaPairRDD<Double, BoxDataPyramidForLayer> zToPyramidPairRdd = zValuesRdd.mapPartitionsToPair( (PairFlatMapFunction<Iterator<Double>, Double, BoxDataPyramidForLayer>) zIterator -> { LogUtilities.setupExecutorLog4j("partition " + TaskContext.getPartitionId()); final List<Tuple2<Double, BoxDataPyramidForLayer>> zToPyramidList = new ArrayList<>(); final RenderDataClient localDataClient = new RenderDataClient(parameters.renderWeb.baseDataUrl, parameters.renderWeb.owner, parameters.renderWeb.project); final String stack = parameters.box.stack; double z; while (zIterator.hasNext()) { z = zIterator.next(); final Bounds layerBounds = localDataClient.getLayerBounds(stack, z); final List<TileBounds> tileBoundsList = localDataClient.getTileBounds(stack, z); final BoxDataPyramidForLayer boxPyramid = new BoxDataPyramidForLayer(z, layerBounds, parameters.box.width, parameters.box.height, tileBoundsList, parameters.box.maxLevel, excludeAlreadyRenderedBoxes, boxGenerator.getBaseBoxPath(), boxGenerator.getBoxPathSuffix()); zToPyramidList.add(new Tuple2<>(z, boxPyramid)); // if DMG iGrid files have been requested and this is a first run, // create iGrid file for level zero boxes if (parameters.box.createIGrid && (! excludeAlreadyRenderedBoxes)) { int maxRow = 0; int maxColumn = 0; final List<BoxData> levelZeroBoxes = new ArrayList<>(); for (final BoxData boxData : boxPyramid.getPyramidList()) { if (boxData.getLevel() == 0) { levelZeroBoxes.add(boxData); maxRow = Math.max(boxData.getRow(), maxRow); maxColumn = Math.max(boxData.getColumn(), maxColumn); } } final String baseBoxPath = boxGenerator.getBaseBoxPath(); final IGridPaths iGridPaths = new IGridPaths(maxRow + 1, maxColumn + 1); iGridPaths.addBoxes(baseBoxPath, boxGenerator.getBoxPathSuffix(), levelZeroBoxes); final Path iGridDirectory = Paths.get(baseBoxPath, "iGrid"); iGridPaths.saveToFile(iGridDirectory.toFile(), z, boxGenerator.getEmptyImageFile()); } } return zToPyramidList.iterator(); }); // cache pyramid data in worker memory so that is doesn't have to be rebuilt when we partition everything later zToPyramidPairRdd.cache(); final JavaPairRDD<Double, List<Integer>> zToLevelBoxCountsPairRdd = zToPyramidPairRdd.mapValues( (Function<BoxDataPyramidForLayer, List<Integer>>) pyramid -> { LogUtilities.setupExecutorLog4j("partition " + TaskContext.getPartitionId()); final Logger log = LoggerFactory.getLogger(BoxData.class); final List<Integer> levelBucketCounts = pyramid.getLevelBoxCounts(); log.info("layer {} has {} total boxes, level box counts are {}", pyramid.getZ(), pyramid.getSize(), levelBucketCounts); return levelBucketCounts; } ); final Map<Double, List<Integer>> zToLevelBoxCountsMap = zToLevelBoxCountsPairRdd.collectAsMap(); LOG.info(""); // empty statement adds newline to lengthy unterminated stage progress lines in log LOG.info("partitionBoxes: collected level box counts"); final BoxPartitioner boxPartitioner = new BoxPartitioner(sparkContext.defaultParallelism(), zToLevelBoxCountsMap); LOG.info("partitionBoxes: built {}", boxPartitioner); final JavaPairRDD<BoxData, BoxData> boxKeyPairRdd = zToPyramidPairRdd.mapPartitionsToPair( (PairFlatMapFunction<Iterator<Tuple2<Double, BoxDataPyramidForLayer>>, BoxData, BoxData>) pyramidIterator -> { final List<Tuple2<BoxData, BoxData>> list = new ArrayList<>(); BoxDataPyramidForLayer pyramid; while (pyramidIterator.hasNext()) { pyramid = pyramidIterator.next()._2; list.addAll(pyramid.getPyramidList() .stream() .map(boxKey -> new Tuple2<>(boxKey, boxKey)) .collect(Collectors.toList())); } return list.iterator(); } ); final JavaPairRDD<BoxData, BoxData> repartitionedBoxKeyPairRdd = boxKeyPairRdd.partitionBy(boxPartitioner); final JavaRDD<BoxData> repartitionedKeysRdd = repartitionedBoxKeyPairRdd.keys(); // write the partitioned data to disk so that Spark will redistribute it properly // and ignore the partitions we used to derive the data repartitionedKeysRdd.saveAsTextFile(partitionedBoxDataDirectory.getAbsolutePath()); LOG.info(""); // empty statement adds newline to lengthy unterminated stage progress lines in log LOG.info("partitionBoxes: saved partitioned box data in {}", partitionedBoxDataDirectory.getAbsolutePath()); // remove cached pyramid data from worker memory since we no longer need it zToPyramidPairRdd.unpersist(false); // load the partitioned serialized box data from disk ... final JavaRDD<String> redistributedBoxDataStringsRdd = sparkContext.textFile(partitionedBoxDataDirectory.getAbsolutePath()); // and deserialize it into the RDD we want for rendering return redistributedBoxDataStringsRdd.mapPartitions( (FlatMapFunction<Iterator<String>, BoxData>) stringIterator -> { final List<BoxData> boxDataList = new ArrayList<>(); while (stringIterator.hasNext()) { boxDataList.add(BoxData.fromString(stringIterator.next())); } return boxDataList.iterator(); } ); }
JavaRDD<BoxData> function(final JavaSparkContext sparkContext, final boolean excludeAlreadyRenderedBoxes) throws IOException, URISyntaxException { final JavaRDD<Double> zValuesRdd = sparkContext.parallelize(zValues); final JavaPairRDD<Double, BoxDataPyramidForLayer> zToPyramidPairRdd = zValuesRdd.mapPartitionsToPair( (PairFlatMapFunction<Iterator<Double>, Double, BoxDataPyramidForLayer>) zIterator -> { LogUtilities.setupExecutorLog4j(STR + TaskContext.getPartitionId()); final List<Tuple2<Double, BoxDataPyramidForLayer>> zToPyramidList = new ArrayList<>(); final RenderDataClient localDataClient = new RenderDataClient(parameters.renderWeb.baseDataUrl, parameters.renderWeb.owner, parameters.renderWeb.project); final String stack = parameters.box.stack; double z; while (zIterator.hasNext()) { z = zIterator.next(); final Bounds layerBounds = localDataClient.getLayerBounds(stack, z); final List<TileBounds> tileBoundsList = localDataClient.getTileBounds(stack, z); final BoxDataPyramidForLayer boxPyramid = new BoxDataPyramidForLayer(z, layerBounds, parameters.box.width, parameters.box.height, tileBoundsList, parameters.box.maxLevel, excludeAlreadyRenderedBoxes, boxGenerator.getBaseBoxPath(), boxGenerator.getBoxPathSuffix()); zToPyramidList.add(new Tuple2<>(z, boxPyramid)); if (parameters.box.createIGrid && (! excludeAlreadyRenderedBoxes)) { int maxRow = 0; int maxColumn = 0; final List<BoxData> levelZeroBoxes = new ArrayList<>(); for (final BoxData boxData : boxPyramid.getPyramidList()) { if (boxData.getLevel() == 0) { levelZeroBoxes.add(boxData); maxRow = Math.max(boxData.getRow(), maxRow); maxColumn = Math.max(boxData.getColumn(), maxColumn); } } final String baseBoxPath = boxGenerator.getBaseBoxPath(); final IGridPaths iGridPaths = new IGridPaths(maxRow + 1, maxColumn + 1); iGridPaths.addBoxes(baseBoxPath, boxGenerator.getBoxPathSuffix(), levelZeroBoxes); final Path iGridDirectory = Paths.get(baseBoxPath, "iGrid"); iGridPaths.saveToFile(iGridDirectory.toFile(), z, boxGenerator.getEmptyImageFile()); } } return zToPyramidList.iterator(); }); zToPyramidPairRdd.cache(); final JavaPairRDD<Double, List<Integer>> zToLevelBoxCountsPairRdd = zToPyramidPairRdd.mapValues( (Function<BoxDataPyramidForLayer, List<Integer>>) pyramid -> { LogUtilities.setupExecutorLog4j(STR + TaskContext.getPartitionId()); final Logger log = LoggerFactory.getLogger(BoxData.class); final List<Integer> levelBucketCounts = pyramid.getLevelBoxCounts(); log.info(STR, pyramid.getZ(), pyramid.getSize(), levelBucketCounts); return levelBucketCounts; } ); final Map<Double, List<Integer>> zToLevelBoxCountsMap = zToLevelBoxCountsPairRdd.collectAsMap(); LOG.info(STRpartitionBoxes: collected level box countsSTRpartitionBoxes: built {}", boxPartitioner); final JavaPairRDD<BoxData, BoxData> boxKeyPairRdd = zToPyramidPairRdd.mapPartitionsToPair( (PairFlatMapFunction<Iterator<Tuple2<Double, BoxDataPyramidForLayer>>, BoxData, BoxData>) pyramidIterator -> { final List<Tuple2<BoxData, BoxData>> list = new ArrayList<>(); BoxDataPyramidForLayer pyramid; while (pyramidIterator.hasNext()) { pyramid = pyramidIterator.next()._2; list.addAll(pyramid.getPyramidList() .stream() .map(boxKey -> new Tuple2<>(boxKey, boxKey)) .collect(Collectors.toList())); } return list.iterator(); } ); final JavaPairRDD<BoxData, BoxData> repartitionedBoxKeyPairRdd = boxKeyPairRdd.partitionBy(boxPartitioner); final JavaRDD<BoxData> repartitionedKeysRdd = repartitionedBoxKeyPairRdd.keys(); repartitionedKeysRdd.saveAsTextFile(partitionedBoxDataDirectory.getAbsolutePath()); LOG.info(STRpartitionBoxes: saved partitioned box data in {}", partitionedBoxDataDirectory.getAbsolutePath()); zToPyramidPairRdd.unpersist(false); final JavaRDD<String> redistributedBoxDataStringsRdd = sparkContext.textFile(partitionedBoxDataDirectory.getAbsolutePath()); return redistributedBoxDataStringsRdd.mapPartitions( (FlatMapFunction<Iterator<String>, BoxData>) stringIterator -> { final List<BoxData> boxDataList = new ArrayList<>(); while (stringIterator.hasNext()) { boxDataList.add(BoxData.fromString(stringIterator.next())); } return boxDataList.iterator(); } ); }
/** * On workers (in parallel), build box data pyramids using layer tile bounds. * Partition the data so that the box data for each mipmap level is evenly distributed across the cluster. * * @param sparkContext context for current run. * * @param excludeAlreadyRenderedBoxes indicates whether existing rendered box images (presumably from prior runs) * should be excluded from this run. * * @return optimally partitioned box data set for rendering. */
On workers (in parallel), build box data pyramids using layer tile bounds. Partition the data so that the box data for each mipmap level is evenly distributed across the cluster
partitionBoxes
{ "repo_name": "fcollman/render", "path": "render-ws-spark-client/src/main/java/org/janelia/render/client/spark/betterbox/BoxClient.java", "license": "gpl-2.0", "size": 35641 }
[ "java.io.IOException", "java.net.URISyntaxException", "java.nio.file.Path", "java.nio.file.Paths", "java.util.ArrayList", "java.util.Iterator", "java.util.List", "java.util.Map", "java.util.stream.Collectors", "org.apache.spark.TaskContext", "org.apache.spark.api.java.JavaPairRDD", "org.apache.spark.api.java.JavaRDD", "org.apache.spark.api.java.JavaSparkContext", "org.apache.spark.api.java.function.FlatMapFunction", "org.apache.spark.api.java.function.Function", "org.apache.spark.api.java.function.PairFlatMapFunction", "org.janelia.alignment.betterbox.BoxData", "org.janelia.alignment.betterbox.BoxDataPyramidForLayer", "org.janelia.alignment.spec.Bounds", "org.janelia.alignment.spec.TileBounds", "org.janelia.render.client.IGridPaths", "org.janelia.render.client.RenderDataClient", "org.janelia.render.client.spark.LogUtilities", "org.slf4j.Logger", "org.slf4j.LoggerFactory" ]
import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.apache.spark.TaskContext; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.FlatMapFunction; import org.apache.spark.api.java.function.Function; import org.apache.spark.api.java.function.PairFlatMapFunction; import org.janelia.alignment.betterbox.BoxData; import org.janelia.alignment.betterbox.BoxDataPyramidForLayer; import org.janelia.alignment.spec.Bounds; import org.janelia.alignment.spec.TileBounds; import org.janelia.render.client.IGridPaths; import org.janelia.render.client.RenderDataClient; import org.janelia.render.client.spark.LogUtilities; import org.slf4j.Logger; import org.slf4j.LoggerFactory;
import java.io.*; import java.net.*; import java.nio.file.*; import java.util.*; import java.util.stream.*; import org.apache.spark.*; import org.apache.spark.api.java.*; import org.apache.spark.api.java.function.*; import org.janelia.alignment.betterbox.*; import org.janelia.alignment.spec.*; import org.janelia.render.client.*; import org.janelia.render.client.spark.*; import org.slf4j.*;
[ "java.io", "java.net", "java.nio", "java.util", "org.apache.spark", "org.janelia.alignment", "org.janelia.render", "org.slf4j" ]
java.io; java.net; java.nio; java.util; org.apache.spark; org.janelia.alignment; org.janelia.render; org.slf4j;
1,812,179
private int compareTimestamps(Timestamp t1, Timestamp t2) { int v = 0; int r = t1.compareTo(t2); if (r < 0) v = -1; else if (r > 0) v = 1; return v; }
int function(Timestamp t1, Timestamp t2) { int v = 0; int r = t1.compareTo(t2); if (r < 0) v = -1; else if (r > 0) v = 1; return v; }
/** * * Compares two {@link Timestamp}s. * * @param t1 The first object to compare. * @param t2 The second object to compare. * @return See below. */
Compares two <code>Timestamp</code>s
compareTimestamps
{ "repo_name": "tp81/openmicroscopy", "path": "components/insight/SRC/org/openmicroscopy/shoola/agents/util/ViewerSorter.java", "license": "gpl-2.0", "size": 19704 }
[ "java.sql.Timestamp" ]
import java.sql.Timestamp;
import java.sql.*;
[ "java.sql" ]
java.sql;
2,510,390
public Object get( String key ) throws MethodInvocationException { return ( active && loopVariableKey.equals(key) ) ? null : super.get(key); }
Object function( String key ) throws MethodInvocationException { return ( active && loopVariableKey.equals(key) ) ? null : super.get(key); }
/** * Get an object from the context, or null if the key is equal to the loop variable * @see org.apache.velocity.context.InternalContextAdapter#get(java.lang.String) * @exception MethodInvocationException passes on potential exception from reference method call */
Get an object from the context, or null if the key is equal to the loop variable
get
{ "repo_name": "zhoupan/min-velocity", "path": "src/main/java/com/github/pfmiles/org/apache/velocity/runtime/directive/Foreach.java", "license": "apache-2.0", "size": 16867 }
[ "com.github.pfmiles.org.apache.velocity.exception.MethodInvocationException" ]
import com.github.pfmiles.org.apache.velocity.exception.MethodInvocationException;
import com.github.pfmiles.org.apache.velocity.exception.*;
[ "com.github.pfmiles" ]
com.github.pfmiles;
639,047
protected String getValueAsString() { Attr attr = element.getAttributeNodeNS(namespaceURI, localName); if (attr == null) { return defaultValue; } return attr.getValue(); }
String function() { Attr attr = element.getAttributeNodeNS(namespaceURI, localName); if (attr == null) { return defaultValue; } return attr.getValue(); }
/** * Returns the value of the DOM attribute containing the point list. */
Returns the value of the DOM attribute containing the point list
getValueAsString
{ "repo_name": "Uni-Sol/batik", "path": "sources/org/apache/batik/dom/svg/SVGOMAnimatedPoints.java", "license": "apache-2.0", "size": 12331 }
[ "org.w3c.dom.Attr" ]
import org.w3c.dom.Attr;
import org.w3c.dom.*;
[ "org.w3c.dom" ]
org.w3c.dom;
613,657
public void postSetSymbologyInfo(DeviceInfo deviceInfo, int Symbology, boolean Status, ICommandContextCallback callback) { ISktScanDevice device = deviceInfo.getSktScanDevice(); ISktScanObject newScanObj = SktClassFactory.createScanObject(); newScanObj.getProperty().setID(ISktScanProperty.propId.kSktScanPropIdSymbologyDevice); newScanObj.getProperty().setType(ISktScanProperty.types.kSktScanPropTypeSymbology); newScanObj.getProperty().getSymbology().setFlags( ISktScanSymbology.flags.kSktScanSymbologyFlagStatus); newScanObj.getProperty().getSymbology().setID(Symbology); if (Status) { newScanObj.getProperty().getSymbology() .setStatus(ISktScanSymbology.status.kSktScanSymbologyStatusEnable); } else { newScanObj.getProperty().getSymbology() .setStatus(ISktScanSymbology.status.kSktScanSymbologyStatusDisable); } CommandContext command = new CommandContext(false, newScanObj, device, null, callback); command.setSymbologyId( Symbology);// keep the symbology ID because the Set Complete won't return it addCommand(command); }
void function(DeviceInfo deviceInfo, int Symbology, boolean Status, ICommandContextCallback callback) { ISktScanDevice device = deviceInfo.getSktScanDevice(); ISktScanObject newScanObj = SktClassFactory.createScanObject(); newScanObj.getProperty().setID(ISktScanProperty.propId.kSktScanPropIdSymbologyDevice); newScanObj.getProperty().setType(ISktScanProperty.types.kSktScanPropTypeSymbology); newScanObj.getProperty().getSymbology().setFlags( ISktScanSymbology.flags.kSktScanSymbologyFlagStatus); newScanObj.getProperty().getSymbology().setID(Symbology); if (Status) { newScanObj.getProperty().getSymbology() .setStatus(ISktScanSymbology.status.kSktScanSymbologyStatusEnable); } else { newScanObj.getProperty().getSymbology() .setStatus(ISktScanSymbology.status.kSktScanSymbologyStatusDisable); } CommandContext command = new CommandContext(false, newScanObj, device, null, callback); command.setSymbologyId( Symbology); addCommand(command); }
/** * postSetSymbologyInfo * * Constructs a request object for setting the Symbology Info in the scanner */
postSetSymbologyInfo Constructs a request object for setting the Symbology Info in the scanner
postSetSymbologyInfo
{ "repo_name": "SocketMobile/samples-android", "path": "warrantyChecker/src/main/java/com/example/socketmobile/android/scanapi/ScanApiHelper.java", "license": "apache-2.0", "size": 54630 }
[ "com.socketmobile.scanapi.ISktScanDevice", "com.socketmobile.scanapi.ISktScanObject", "com.socketmobile.scanapi.ISktScanProperty", "com.socketmobile.scanapi.ISktScanSymbology", "com.socketmobile.scanapi.SktClassFactory" ]
import com.socketmobile.scanapi.ISktScanDevice; import com.socketmobile.scanapi.ISktScanObject; import com.socketmobile.scanapi.ISktScanProperty; import com.socketmobile.scanapi.ISktScanSymbology; import com.socketmobile.scanapi.SktClassFactory;
import com.socketmobile.scanapi.*;
[ "com.socketmobile.scanapi" ]
com.socketmobile.scanapi;
1,101,568
public Bundle getExtras() { return mUserExtras; }
Bundle function() { return mUserExtras; }
/** * Get the current metadata Bundle used by this notification Builder. * * <p>The returned Bundle is shared with this Builder. * * <p>The current contents of this Bundle are copied into the Notification each time * {@link #build()} is called. * * @see Notification#extras */
Get the current metadata Bundle used by this notification Builder. The returned Bundle is shared with this Builder. The current contents of this Bundle are copied into the Notification each time <code>#build()</code> is called
getExtras
{ "repo_name": "daiqiquan/framework-base", "path": "core/java/android/app/Notification.java", "license": "apache-2.0", "size": 264892 }
[ "android.os.Bundle" ]
import android.os.Bundle;
import android.os.*;
[ "android.os" ]
android.os;
1,576,812