method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
public void shutDown() {
for (ITmmModule module : modules) {
if (module.isEnabled()) {
try {
module.shutDown();
}
catch (Exception e) {
LOGGER.error("problem shutting down " + module.getModuleTitle() + ": " + e.getMessage());
}
}
}
try {
EntityManagerFactory emf = entityManager.getEntityManagerFactory();
entityManager.close();
emf.close();
}
catch (Exception e) {
}
} | void function() { for (ITmmModule module : modules) { if (module.isEnabled()) { try { module.shutDown(); } catch (Exception e) { LOGGER.error(STR + module.getModuleTitle() + STR + e.getMessage()); } } } try { EntityManagerFactory emf = entityManager.getEntityManagerFactory(); entityManager.close(); emf.close(); } catch (Exception e) { } } | /**
* shutdown tmm - forces all registered modules to shut down
*/ | shutdown tmm - forces all registered modules to shut down | shutDown | {
"repo_name": "mlaggner/tinyMediaManager",
"path": "src/org/tinymediamanager/core/TmmModuleManager.java",
"license": "apache-2.0",
"size": 4089
} | [
"javax.persistence.EntityManagerFactory"
] | import javax.persistence.EntityManagerFactory; | import javax.persistence.*; | [
"javax.persistence"
] | javax.persistence; | 847,952 |
public ScriptEngine getSharedScriptEngine() {
return this.sharedScriptEngine;
}
| ScriptEngine function() { return this.sharedScriptEngine; } | /**
* ScriptEngines can be used multithreaded, except that they should not share Bindings
*
* @return the sharedScriptEngine
*/ | ScriptEngines can be used multithreaded, except that they should not share Bindings | getSharedScriptEngine | {
"repo_name": "WesGilster/Photonic3D",
"path": "host/src/main/java/org/area515/resinprinter/server/HostProperties.java",
"license": "gpl-3.0",
"size": 40557
} | [
"javax.script.ScriptEngine"
] | import javax.script.ScriptEngine; | import javax.script.*; | [
"javax.script"
] | javax.script; | 2,871,685 |
@Test
public void flappingTest() throws Exception
{
final CuratorFramework client =
CuratorFrameworkFactory.builder()
.connectString(server.getConnectString())
.retryPolicy(new RetryNTimes(1, 500))
.sessionTimeoutMs(30000)
.build();
final TestLeaderSelectorListener listener = new TestLeaderSelectorListener();
LeaderSelector leaderSelector1 =
new LeaderSelector(client, ChaosMonkeyCnxnFactory.CHAOS_ZNODE, listener);
LeaderSelector leaderSelector2 = null;
client.start();
try
{
client.create().forPath(ChaosMonkeyCnxnFactory.CHAOS_ZNODE);
leaderSelector1.start();
// At this point the ChaosMonkeyZookeeperServer must close the connection
// right after the lock znode is created.
assertTrue(listener.reconnected.await(10, TimeUnit.SECONDS), "Connection has not been lost");
// Check that leader ship has failed
assertEquals(listener.takeLeadership.getCount(), 1);
// Wait FailedDelete
Thread.sleep(ChaosMonkeyCnxnFactory.LOCKOUT_DURATION_MS * 2);
// Check that there is no znode
final int children = client.getChildren().forPath(ChaosMonkeyCnxnFactory.CHAOS_ZNODE).size();
assertEquals(children, 0,
"Still " + children + " znodes under " + ChaosMonkeyCnxnFactory.CHAOS_ZNODE + " lock");
// Check that a new LeaderSelector can be started
leaderSelector2 = new LeaderSelector(client, ChaosMonkeyCnxnFactory.CHAOS_ZNODE,
listener);
leaderSelector2.start();
assertTrue(listener.takeLeadership.await(1, TimeUnit.SECONDS));
}
finally
{
try
{
leaderSelector1.close();
}
catch ( IllegalStateException e )
{
fail(e.getMessage());
}
try
{
if ( leaderSelector2 != null )
{
leaderSelector2.close();
}
}
catch ( IllegalStateException e )
{
fail(e.getMessage());
}
client.close();
}
}
private class TestLeaderSelectorListener implements LeaderSelectorListener
{
final CountDownLatch takeLeadership = new CountDownLatch(1);
final CountDownLatch reconnected = new CountDownLatch(1); | void function() throws Exception { final CuratorFramework client = CuratorFrameworkFactory.builder() .connectString(server.getConnectString()) .retryPolicy(new RetryNTimes(1, 500)) .sessionTimeoutMs(30000) .build(); final TestLeaderSelectorListener listener = new TestLeaderSelectorListener(); LeaderSelector leaderSelector1 = new LeaderSelector(client, ChaosMonkeyCnxnFactory.CHAOS_ZNODE, listener); LeaderSelector leaderSelector2 = null; client.start(); try { client.create().forPath(ChaosMonkeyCnxnFactory.CHAOS_ZNODE); leaderSelector1.start(); assertTrue(listener.reconnected.await(10, TimeUnit.SECONDS), STR); assertEquals(listener.takeLeadership.getCount(), 1); Thread.sleep(ChaosMonkeyCnxnFactory.LOCKOUT_DURATION_MS * 2); final int children = client.getChildren().forPath(ChaosMonkeyCnxnFactory.CHAOS_ZNODE).size(); assertEquals(children, 0, STR + children + STR + ChaosMonkeyCnxnFactory.CHAOS_ZNODE + STR); leaderSelector2 = new LeaderSelector(client, ChaosMonkeyCnxnFactory.CHAOS_ZNODE, listener); leaderSelector2.start(); assertTrue(listener.takeLeadership.await(1, TimeUnit.SECONDS)); } finally { try { leaderSelector1.close(); } catch ( IllegalStateException e ) { fail(e.getMessage()); } try { if ( leaderSelector2 != null ) { leaderSelector2.close(); } } catch ( IllegalStateException e ) { fail(e.getMessage()); } client.close(); } } private class TestLeaderSelectorListener implements LeaderSelectorListener { final CountDownLatch takeLeadership = new CountDownLatch(1); final CountDownLatch reconnected = new CountDownLatch(1); | /**
* Create a LeaderSelector but close the connection right after the "lock" znode
* has been created.
*
* @throws Exception
*/ | Create a LeaderSelector but close the connection right after the "lock" znode has been created | flappingTest | {
"repo_name": "mosoft521/curator",
"path": "curator-recipes/src/test/java/org/apache/curator/framework/recipes/leader/TestLeaderSelectorEdges.java",
"license": "apache-2.0",
"size": 9767
} | [
"java.util.concurrent.CountDownLatch",
"java.util.concurrent.TimeUnit",
"org.apache.curator.framework.CuratorFramework",
"org.apache.curator.framework.CuratorFrameworkFactory",
"org.apache.curator.retry.RetryNTimes",
"org.junit.jupiter.api.Assertions"
] | import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.retry.RetryNTimes; import org.junit.jupiter.api.Assertions; | import java.util.concurrent.*; import org.apache.curator.framework.*; import org.apache.curator.retry.*; import org.junit.jupiter.api.*; | [
"java.util",
"org.apache.curator",
"org.junit.jupiter"
] | java.util; org.apache.curator; org.junit.jupiter; | 821,151 |
private void writeIfNotNull(@Nullable String value) throws IOException {
if (value != null) {
writeLine(value);
}
} | void function(@Nullable String value) throws IOException { if (value != null) { writeLine(value); } } | /**
* Writes {@code value} followed by a newline character if {@code value} is not null.
*/ | Writes value followed by a newline character if value is not null | writeIfNotNull | {
"repo_name": "chamikaramj/incubator-beam",
"path": "sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java",
"license": "apache-2.0",
"size": 41618
} | [
"java.io.IOException",
"javax.annotation.Nullable"
] | import java.io.IOException; import javax.annotation.Nullable; | import java.io.*; import javax.annotation.*; | [
"java.io",
"javax.annotation"
] | java.io; javax.annotation; | 2,224,566 |
public static FunctionInfo of(Signature signature, List<DataType<?>> argumentTypes, DataType<?> returnType) {
return new FunctionInfo(
new FunctionIdent(signature.getName(), argumentTypes),
returnType,
signature.getKind(),
signature.getFeatures()
);
}
public FunctionInfo(FunctionIdent ident, DataType<?> returnType, FunctionType type, Set<Scalar.Feature> features) {
assert features.size() < 32 : "features size must not exceed 32";
this.ident = ident;
this.returnType = returnType;
this.type = type;
this.features = features;
} | static FunctionInfo function(Signature signature, List<DataType<?>> argumentTypes, DataType<?> returnType) { return new FunctionInfo( new FunctionIdent(signature.getName(), argumentTypes), returnType, signature.getKind(), signature.getFeatures() ); } public FunctionInfo(FunctionIdent ident, DataType<?> returnType, FunctionType type, Set<Scalar.Feature> features) { assert features.size() < 32 : STR; this.ident = ident; this.returnType = returnType; this.type = type; this.features = features; } | /**
* Create FunctionInfo based on a declared signature which may contain type variable constraints.
* Thus concrete argument types and return type arguments are needed.
*/ | Create FunctionInfo based on a declared signature which may contain type variable constraints. Thus concrete argument types and return type arguments are needed | of | {
"repo_name": "EvilMcJerkface/crate",
"path": "server/src/main/java/io/crate/metadata/FunctionInfo.java",
"license": "apache-2.0",
"size": 5120
} | [
"io.crate.metadata.functions.Signature",
"io.crate.types.DataType",
"java.util.List",
"java.util.Set"
] | import io.crate.metadata.functions.Signature; import io.crate.types.DataType; import java.util.List; import java.util.Set; | import io.crate.metadata.functions.*; import io.crate.types.*; import java.util.*; | [
"io.crate.metadata",
"io.crate.types",
"java.util"
] | io.crate.metadata; io.crate.types; java.util; | 1,742,037 |
public Language getHmiDisplayLanguageDesired() {
return (Language) getObject(Language.class, KEY_HMI_DISPLAY_LANGUAGE_DESIRED);
}
| Language function() { return (Language) getObject(Language.class, KEY_HMI_DISPLAY_LANGUAGE_DESIRED); } | /**
* Gets an enumeration indicating what language the application intends to
* use for user interaction ( Display)
*
* @return Language - a Language value representing an enumeration
* indicating what language the application intends to use for user
* interaction ( Display)
* @since SmartDeviceLink 2.0
*/ | Gets an enumeration indicating what language the application intends to use for user interaction ( Display) | getHmiDisplayLanguageDesired | {
"repo_name": "smartdevicelink/sdl_android",
"path": "base/src/main/java/com/smartdevicelink/proxy/rpc/RegisterAppInterface.java",
"license": "bsd-3-clause",
"size": 34039
} | [
"com.smartdevicelink.proxy.rpc.enums.Language"
] | import com.smartdevicelink.proxy.rpc.enums.Language; | import com.smartdevicelink.proxy.rpc.enums.*; | [
"com.smartdevicelink.proxy"
] | com.smartdevicelink.proxy; | 2,806,414 |
@Override
public void visitNewArrayFunctionRef(ENewArrayFunctionRef userNewArrayFunctionRefNode, SemanticScope semanticScope) {
String canonicalTypeName = userNewArrayFunctionRefNode.getCanonicalTypeName();
if (semanticScope.getCondition(userNewArrayFunctionRefNode, Write.class)) {
throw userNewArrayFunctionRefNode.createError(new IllegalArgumentException(
"cannot assign a value to new array function reference with target type [ + " + canonicalTypeName + "]"));
}
if (semanticScope.getCondition(userNewArrayFunctionRefNode, Read.class) == false) {
throw userNewArrayFunctionRefNode.createError(new IllegalArgumentException(
"not a statement: new array function reference with target type [" + canonicalTypeName + "] not used"));
}
ScriptScope scriptScope = semanticScope.getScriptScope();
TargetType targetType = semanticScope.getDecoration(userNewArrayFunctionRefNode, TargetType.class);
Class<?> valueType;
Class<?> clazz = scriptScope.getPainlessLookup().canonicalTypeNameToType(canonicalTypeName);
semanticScope.putDecoration(userNewArrayFunctionRefNode, new ReturnType(clazz));
if (clazz == null) {
throw userNewArrayFunctionRefNode.createError(new IllegalArgumentException("Not a type [" + canonicalTypeName + "]."));
}
String name = scriptScope.getNextSyntheticName("newarray");
scriptScope.getFunctionTable().addFunction(name, clazz, Collections.singletonList(int.class), true, true);
semanticScope.putDecoration(userNewArrayFunctionRefNode, new MethodNameDecoration(name));
if (targetType == null) {
String defReferenceEncoding = "Sthis." + name + ",0";
valueType = String.class;
scriptScope.putDecoration(userNewArrayFunctionRefNode, new EncodingDecoration(defReferenceEncoding));
} else {
FunctionRef ref = FunctionRef.create(scriptScope.getPainlessLookup(), scriptScope.getFunctionTable(),
userNewArrayFunctionRefNode.getLocation(), targetType.getTargetType(), "this", name, 0,
scriptScope.getCompilerSettings().asMap());
valueType = targetType.getTargetType();
semanticScope.putDecoration(userNewArrayFunctionRefNode, new ReferenceDecoration(ref));
}
semanticScope.putDecoration(userNewArrayFunctionRefNode, new ValueType(valueType));
} | void function(ENewArrayFunctionRef userNewArrayFunctionRefNode, SemanticScope semanticScope) { String canonicalTypeName = userNewArrayFunctionRefNode.getCanonicalTypeName(); if (semanticScope.getCondition(userNewArrayFunctionRefNode, Write.class)) { throw userNewArrayFunctionRefNode.createError(new IllegalArgumentException( STR + canonicalTypeName + "]")); } if (semanticScope.getCondition(userNewArrayFunctionRefNode, Read.class) == false) { throw userNewArrayFunctionRefNode.createError(new IllegalArgumentException( STR + canonicalTypeName + STR)); } ScriptScope scriptScope = semanticScope.getScriptScope(); TargetType targetType = semanticScope.getDecoration(userNewArrayFunctionRefNode, TargetType.class); Class<?> valueType; Class<?> clazz = scriptScope.getPainlessLookup().canonicalTypeNameToType(canonicalTypeName); semanticScope.putDecoration(userNewArrayFunctionRefNode, new ReturnType(clazz)); if (clazz == null) { throw userNewArrayFunctionRefNode.createError(new IllegalArgumentException(STR + canonicalTypeName + "].")); } String name = scriptScope.getNextSyntheticName(STR); scriptScope.getFunctionTable().addFunction(name, clazz, Collections.singletonList(int.class), true, true); semanticScope.putDecoration(userNewArrayFunctionRefNode, new MethodNameDecoration(name)); if (targetType == null) { String defReferenceEncoding = STR + name + ",0"; valueType = String.class; scriptScope.putDecoration(userNewArrayFunctionRefNode, new EncodingDecoration(defReferenceEncoding)); } else { FunctionRef ref = FunctionRef.create(scriptScope.getPainlessLookup(), scriptScope.getFunctionTable(), userNewArrayFunctionRefNode.getLocation(), targetType.getTargetType(), "this", name, 0, scriptScope.getCompilerSettings().asMap()); valueType = targetType.getTargetType(); semanticScope.putDecoration(userNewArrayFunctionRefNode, new ReferenceDecoration(ref)); } semanticScope.putDecoration(userNewArrayFunctionRefNode, new ValueType(valueType)); } | /**
* Visits a new array function ref expression which covers only a new array function reference
* and generates an internal method to define the new array.
* Checks: type validation
*/ | Visits a new array function ref expression which covers only a new array function reference and generates an internal method to define the new array. Checks: type validation | visitNewArrayFunctionRef | {
"repo_name": "nknize/elasticsearch",
"path": "modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/DefaultSemanticAnalysisPhase.java",
"license": "apache-2.0",
"size": 149793
} | [
"java.util.Collections",
"org.elasticsearch.painless.FunctionRef",
"org.elasticsearch.painless.node.ENewArrayFunctionRef",
"org.elasticsearch.painless.symbol.Decorations",
"org.elasticsearch.painless.symbol.ScriptScope",
"org.elasticsearch.painless.symbol.SemanticScope"
] | import java.util.Collections; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.node.ENewArrayFunctionRef; import org.elasticsearch.painless.symbol.Decorations; import org.elasticsearch.painless.symbol.ScriptScope; import org.elasticsearch.painless.symbol.SemanticScope; | import java.util.*; import org.elasticsearch.painless.*; import org.elasticsearch.painless.node.*; import org.elasticsearch.painless.symbol.*; | [
"java.util",
"org.elasticsearch.painless"
] | java.util; org.elasticsearch.painless; | 470,076 |
protected String checkContentType(RIDRequestObject reqObj, String[] types) {
List allowed = reqObj.getAllowedContentTypes();
String s;
if ( log.isDebugEnabled() ) log.debug(" check against:"+allowed);
for ( int i = 0, len = types.length ; i < len ; i++ ) {
s = types[i];
if ( log.isDebugEnabled() ) log.debug(" check "+s+":"+allowed.contains( s )+" ,"+s.substring( 0, s.indexOf( "/") )+"*:"+allowed.contains("**") ) {
return types[0];
}
return null;
} | String function(RIDRequestObject reqObj, String[] types) { List allowed = reqObj.getAllowedContentTypes(); String s; if ( log.isDebugEnabled() ) log.debug(STR+allowed); for ( int i = 0, len = types.length ; i < len ; i++ ) { s = types[i]; if ( log.isDebugEnabled() ) log.debug(STR+s+":"+allowed.contains( s )+STR+s.substring( 0, s.indexOf( "/") )+"*:"+allowed.contains("**") ) { return types[0]; } return null; } | /**
* Checks if one of the given content types are allowed.
* <p>
*
*
* @param reqObj The RID request object.
* @param types Array of content types that can be used.
*
* @return The content type that is allowed by the request or null.
*/ | Checks if one of the given content types are allowed. | checkContentType | {
"repo_name": "medicayun/medicayundicom",
"path": "dcm4jboss-all/tags/DCM4JBOSS_2_4_7/dcm4jboss-wado/src/java/org/dcm4chex/wado/mbean/RIDSupport.java",
"license": "apache-2.0",
"size": 26825
} | [
"java.util.List",
"org.dcm4chex.wado.common.RIDRequestObject"
] | import java.util.List; import org.dcm4chex.wado.common.RIDRequestObject; | import java.util.*; import org.dcm4chex.wado.common.*; | [
"java.util",
"org.dcm4chex.wado"
] | java.util; org.dcm4chex.wado; | 2,463,123 |
public Serializable context_getEJBLocalObject() throws RemoteException; | Serializable function() throws RemoteException; | /**
* Insert the method's description here.
* Creation date: (08/07/2002)
*/ | Insert the method's description here. Creation date: (08/07/2002) | context_getEJBLocalObject | {
"repo_name": "OpenLiberty/open-liberty",
"path": "dev/com.ibm.ws.ejbcontainer.legacy_fat/test-applications/EJB2XSFRemoteSpecEJB.jar/src/com/ibm/ejb2x/base/spec/sfr/ejb/SFRa.java",
"license": "epl-1.0",
"size": 10102
} | [
"java.io.Serializable",
"java.rmi.RemoteException"
] | import java.io.Serializable; import java.rmi.RemoteException; | import java.io.*; import java.rmi.*; | [
"java.io",
"java.rmi"
] | java.io; java.rmi; | 2,204,146 |
public static final String getQueryString(Map params) {
if(params == null) {
return null;
}
StringBuffer queryString = new StringBuffer();
for(Iterator keys = params.keySet().iterator(); keys.hasNext();) {
String key = (String) keys.next();
String value = (String) params.get(key);
if (queryString.length() == 0) {
queryString.append("?");
} else {
queryString.append("&");
}
queryString.append(key);
queryString.append("=");
queryString.append(value);
}
return queryString.toString();
}
| static final String function(Map params) { if(params == null) { return null; } StringBuffer queryString = new StringBuffer(); for(Iterator keys = params.keySet().iterator(); keys.hasNext();) { String key = (String) keys.next(); String value = (String) params.get(key); if (queryString.length() == 0) { queryString.append("?"); } else { queryString.append("&"); } queryString.append(key); queryString.append("="); queryString.append(value); } return queryString.toString(); } | /**
* Compose a map of key=value params into a query string.
*/ | Compose a map of key=value params into a query string | getQueryString | {
"repo_name": "paulnguyen/cmpe279",
"path": "eclipse/Roller/src/org/apache/roller/util/URLUtilities.java",
"license": "apache-2.0",
"size": 13040
} | [
"java.util.Iterator",
"java.util.Map"
] | import java.util.Iterator; import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 12,261 |
@Test
public void testStatementWritesVariable() throws Exception {
VPMGraph graph = TestUtil.prepareVPMGraph(BASE_PATH_EXTENDED + "StatementWritesVariable/");
VPMAnalyzerResult result = analyzer.analyze(graph);
assertNodeCount(graph, 4);
assertDependency(result, DependencyType.StatementWritesVariable, 2);
} | void function() throws Exception { VPMGraph graph = TestUtil.prepareVPMGraph(BASE_PATH_EXTENDED + STR); VPMAnalyzerResult result = analyzer.analyze(graph); assertNodeCount(graph, 4); assertDependency(result, DependencyType.StatementWritesVariable, 2); } | /**
* Test method writing a new value to a variable.
*
* @throws Exception
* Identifies a failed diffing.
*/ | Test method writing a new value to a variable | testStatementWritesVariable | {
"repo_name": "kopl/SPLevo",
"path": "JaMoPPCartridge/org.splevo.jamopp.vpm.analyzer.programdependency.tests/src/org/splevo/jamopp/vpm/analyzer/programdependency/tests/RobillardExtendedSelectorTest.java",
"license": "epl-1.0",
"size": 22481
} | [
"org.splevo.jamopp.vpm.analyzer.programdependency.references.DependencyType",
"org.splevo.jamopp.vpm.analyzer.programdependency.tests.TestUtil",
"org.splevo.vpm.analyzer.VPMAnalyzerResult",
"org.splevo.vpm.analyzer.graph.VPMGraph"
] | import org.splevo.jamopp.vpm.analyzer.programdependency.references.DependencyType; import org.splevo.jamopp.vpm.analyzer.programdependency.tests.TestUtil; import org.splevo.vpm.analyzer.VPMAnalyzerResult; import org.splevo.vpm.analyzer.graph.VPMGraph; | import org.splevo.jamopp.vpm.analyzer.programdependency.references.*; import org.splevo.jamopp.vpm.analyzer.programdependency.tests.*; import org.splevo.vpm.analyzer.*; import org.splevo.vpm.analyzer.graph.*; | [
"org.splevo.jamopp",
"org.splevo.vpm"
] | org.splevo.jamopp; org.splevo.vpm; | 2,268,156 |
@Test
public void updateSearchIndexTest() throws Exception {
final SearchIndexer searchIndexer = Mockito.mock(SearchIndexer.class);
final JobContextHolderImpl jobContextHolderImpl = new JobContextHolderImpl(null,searchIndexer,null,null);
jobContextHolderImpl.updateSearchIndex();
Mockito.verify(searchIndexer,Mockito.only()).updateSearchIndex();
} | void function() throws Exception { final SearchIndexer searchIndexer = Mockito.mock(SearchIndexer.class); final JobContextHolderImpl jobContextHolderImpl = new JobContextHolderImpl(null,searchIndexer,null,null); jobContextHolderImpl.updateSearchIndex(); Mockito.verify(searchIndexer,Mockito.only()).updateSearchIndex(); } | /**
* Update search index test.
*
* @throws Exception the exception
*/ | Update search index test | updateSearchIndexTest | {
"repo_name": "Hack23/cia",
"path": "service.impl/src/test/java/com/hack23/cia/service/impl/task/JobContextHolderImplTest.java",
"license": "apache-2.0",
"size": 2806
} | [
"com.hack23.cia.service.data.api.SearchIndexer",
"org.mockito.Mockito"
] | import com.hack23.cia.service.data.api.SearchIndexer; import org.mockito.Mockito; | import com.hack23.cia.service.data.api.*; import org.mockito.*; | [
"com.hack23.cia",
"org.mockito"
] | com.hack23.cia; org.mockito; | 2,386,318 |
private final void setDefaultAntiAliasingState() {
// Most accurate technique, but not available on all OSes.
aaHints = RSyntaxUtilities.getDesktopAntiAliasHints();
if (aaHints==null) {
Map<RenderingHints.Key, Object> temp =
new HashMap<RenderingHints.Key, Object>();
// In Java 6+, you can figure out what text AA hint Swing uses for
// JComponents...
JLabel label = new JLabel();
FontMetrics fm = label.getFontMetrics(label.getFont());
Object hint = null;
//FontRenderContext frc = fm.getFontRenderContext();
//hint = fm.getAntiAliasingHint();
try {
Method m = FontMetrics.class.getMethod("getFontRenderContext");
FontRenderContext frc = (FontRenderContext)m.invoke(fm);
m = FontRenderContext.class.getMethod("getAntiAliasingHint");
hint = m.invoke(frc);
} catch (RuntimeException re) {
throw re; // FindBugs
} catch (Exception e) {
// Swallow, either Java 1.5, or running in an applet
}
// If not running Java 6+, default to AA enabled on Windows where
// the software AA is pretty fast, and default (e.g. disabled) on
// non-Windows. Note that OS X always uses AA no matter what
// rendering hints you give it, so this is a moot point there.
//System.out.println("Rendering hint: " + hint);
if (hint==null) {
String os = System.getProperty("os.name").toLowerCase();
if (os.contains("windows")) {
hint = RenderingHints.VALUE_TEXT_ANTIALIAS_ON;
}
else {
hint = RenderingHints.VALUE_TEXT_ANTIALIAS_DEFAULT;
}
}
temp.put(RenderingHints.KEY_TEXT_ANTIALIASING, hint);
aaHints = temp;
}
// We must be connected to a screen resource for our graphics
// to be non-null.
if (isDisplayable()) {
refreshFontMetrics(getGraphics2D(getGraphics()));
}
repaint();
} | final void function() { aaHints = RSyntaxUtilities.getDesktopAntiAliasHints(); if (aaHints==null) { Map<RenderingHints.Key, Object> temp = new HashMap<RenderingHints.Key, Object>(); JLabel label = new JLabel(); FontMetrics fm = label.getFontMetrics(label.getFont()); Object hint = null; try { Method m = FontMetrics.class.getMethod(STR); FontRenderContext frc = (FontRenderContext)m.invoke(fm); m = FontRenderContext.class.getMethod(STR); hint = m.invoke(frc); } catch (RuntimeException re) { throw re; } catch (Exception e) { } if (hint==null) { String os = System.getProperty(STR).toLowerCase(); if (os.contains(STR)) { hint = RenderingHints.VALUE_TEXT_ANTIALIAS_ON; } else { hint = RenderingHints.VALUE_TEXT_ANTIALIAS_DEFAULT; } } temp.put(RenderingHints.KEY_TEXT_ANTIALIASING, hint); aaHints = temp; } if (isDisplayable()) { refreshFontMetrics(getGraphics2D(getGraphics())); } repaint(); } | /**
* Sets anti-aliasing to whatever the user's desktop value is.
*
* @see #getAntiAliasingEnabled()
*/ | Sets anti-aliasing to whatever the user's desktop value is | setDefaultAntiAliasingState | {
"repo_name": "buffis/ESPlorer",
"path": "ESPlorer/src/org/fife/ui/rsyntaxtextarea/RSyntaxTextArea.java",
"license": "gpl-2.0",
"size": 92142
} | [
"java.awt.FontMetrics",
"java.awt.RenderingHints",
"java.awt.font.FontRenderContext",
"java.lang.reflect.Method",
"java.util.HashMap",
"java.util.Map",
"javax.swing.JLabel"
] | import java.awt.FontMetrics; import java.awt.RenderingHints; import java.awt.font.FontRenderContext; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; import javax.swing.JLabel; | import java.awt.*; import java.awt.font.*; import java.lang.reflect.*; import java.util.*; import javax.swing.*; | [
"java.awt",
"java.lang",
"java.util",
"javax.swing"
] | java.awt; java.lang; java.util; javax.swing; | 269,566 |
public static OneResponse delVnet(Client client, int id, int zoneId, int vnetId)
{
return client.call(DELVNET, id, zoneId, vnetId);
} | static OneResponse function(Client client, int id, int zoneId, int vnetId) { return client.call(DELVNET, id, zoneId, vnetId); } | /**
* Deletes a vnet from this VDC
*
* @param client XML-RPC Client.
* @param id The vdc id of the target vdc
* @param zoneId The vnet's zone
* @param vnetId The vnet to delete
* @return If an error occurs the error message contains the reason.
*/ | Deletes a vnet from this VDC | delVnet | {
"repo_name": "baby-gnu/one",
"path": "src/oca/java/src/org/opennebula/client/vdc/Vdc.java",
"license": "apache-2.0",
"size": 14690
} | [
"org.opennebula.client.Client",
"org.opennebula.client.OneResponse"
] | import org.opennebula.client.Client; import org.opennebula.client.OneResponse; | import org.opennebula.client.*; | [
"org.opennebula.client"
] | org.opennebula.client; | 2,532,451 |
@Test
public void testIsAroundOK() {
try {
final Calendar calendar1 = Calendar.getInstance();
final Calendar calendar2 = Calendar.getInstance();
calendar1.set(2016, 05, 29, 5, 5, 6);
calendar2.set(2016, 05, 29, 5, 5, 5);
LocalDateTime localDateTime1 = DateUtils.getLocalDateTime(calendar1);
final LocalDateTime localDateTime2 = DateUtils.getLocalDateTime(calendar2);
Assertor.<ChronoLocalDateTime<?>> ofTemporal().isAround(localDateTime2, Duration.ofSeconds(5)).that(localDateTime1)
.orElseThrow();
Assertor.<ChronoLocalDateTime<?>> ofTemporal().not().isAround(localDateTime2, Duration.of(5, ChronoUnit.MILLIS))
.that(localDateTime1).orElseThrow();
calendar1.set(2016, 05, 29, 5, 5, 1);
localDateTime1 = DateUtils.getLocalDateTime(calendar1);
Assertor.<ChronoLocalDateTime<?>> ofTemporal().isAround(localDateTime2, Duration.ofSeconds(5)).that(localDateTime1)
.orElseThrow();
assertFalse(Assertor.<ChronoLocalDateTime<?>> ofTemporal().isAround(localDateTime2, null).that(localDateTime1).isOK());
} catch (IllegalArgumentException e) {
fail("The test isn't correct");
}
}
| void function() { try { final Calendar calendar1 = Calendar.getInstance(); final Calendar calendar2 = Calendar.getInstance(); calendar1.set(2016, 05, 29, 5, 5, 6); calendar2.set(2016, 05, 29, 5, 5, 5); LocalDateTime localDateTime1 = DateUtils.getLocalDateTime(calendar1); final LocalDateTime localDateTime2 = DateUtils.getLocalDateTime(calendar2); Assertor.<ChronoLocalDateTime<?>> ofTemporal().isAround(localDateTime2, Duration.ofSeconds(5)).that(localDateTime1) .orElseThrow(); Assertor.<ChronoLocalDateTime<?>> ofTemporal().not().isAround(localDateTime2, Duration.of(5, ChronoUnit.MILLIS)) .that(localDateTime1).orElseThrow(); calendar1.set(2016, 05, 29, 5, 5, 1); localDateTime1 = DateUtils.getLocalDateTime(calendar1); Assertor.<ChronoLocalDateTime<?>> ofTemporal().isAround(localDateTime2, Duration.ofSeconds(5)).that(localDateTime1) .orElseThrow(); assertFalse(Assertor.<ChronoLocalDateTime<?>> ofTemporal().isAround(localDateTime2, null).that(localDateTime1).isOK()); } catch (IllegalArgumentException e) { fail(STR); } } | /**
* Test method for {@link AssertCalendar#isAround}.
*/ | Test method for <code>AssertCalendar#isAround</code> | testIsAroundOK | {
"repo_name": "Gilandel/utils-assertor",
"path": "src/test/java/fr/landel/utils/assertor/predicate/PredicateAssertorTemporalTest.java",
"license": "apache-2.0",
"size": 31319
} | [
"fr.landel.utils.assertor.Assertor",
"fr.landel.utils.commons.DateUtils",
"java.time.Duration",
"java.time.LocalDateTime",
"java.time.chrono.ChronoLocalDateTime",
"java.time.temporal.ChronoUnit",
"java.util.Calendar",
"org.junit.Assert"
] | import fr.landel.utils.assertor.Assertor; import fr.landel.utils.commons.DateUtils; import java.time.Duration; import java.time.LocalDateTime; import java.time.chrono.ChronoLocalDateTime; import java.time.temporal.ChronoUnit; import java.util.Calendar; import org.junit.Assert; | import fr.landel.utils.assertor.*; import fr.landel.utils.commons.*; import java.time.*; import java.time.chrono.*; import java.time.temporal.*; import java.util.*; import org.junit.*; | [
"fr.landel.utils",
"java.time",
"java.util",
"org.junit"
] | fr.landel.utils; java.time; java.util; org.junit; | 2,832,222 |
@Override
public void uid(Field uid) {
this.uid = uid;
} | void function(Field uid) { this.uid = uid; } | /**
* Really, just the uid mapper should set this.
*/ | Really, just the uid mapper should set this | uid | {
"repo_name": "jpountz/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java",
"license": "apache-2.0",
"size": 22520
} | [
"org.apache.lucene.document.Field"
] | import org.apache.lucene.document.Field; | import org.apache.lucene.document.*; | [
"org.apache.lucene"
] | org.apache.lucene; | 448,274 |
public List getAll() throws Exception
{
log.trace("In OrganManagerImpl.getAll");
return super.getAll(Organ.class);
} | List function() throws Exception { log.trace(STR); return super.getAll(Organ.class); } | /**
* Get all Organ objects
*
*
* @return the matching Organ objects, or null if not found.
*
* @exception Exception
* when anything goes wrong.
*/ | Get all Organ objects | getAll | {
"repo_name": "NCIP/camod",
"path": "software/camod/src/gov/nih/nci/camod/service/impl/OrganManagerImpl.java",
"license": "bsd-3-clause",
"size": 5248
} | [
"gov.nih.nci.camod.domain.Organ",
"java.util.List"
] | import gov.nih.nci.camod.domain.Organ; import java.util.List; | import gov.nih.nci.camod.domain.*; import java.util.*; | [
"gov.nih.nci",
"java.util"
] | gov.nih.nci; java.util; | 1,662,022 |
void setDefaultBacklogDebugger(InterceptStrategy backlogDebugger); | void setDefaultBacklogDebugger(InterceptStrategy backlogDebugger); | /**
* Sets a custom backlog debugger to be used as the default backlog debugger.
* <p/>
* <b>Note:</b> This must be set before any routes are created,
* changing the default backlog debugger for existing routes is not supported.
*
* @param backlogDebugger the custom debugger to use as default backlog debugger
*/ | Sets a custom backlog debugger to be used as the default backlog debugger. Note: This must be set before any routes are created, changing the default backlog debugger for existing routes is not supported | setDefaultBacklogDebugger | {
"repo_name": "ssharma/camel",
"path": "camel-core/src/main/java/org/apache/camel/CamelContext.java",
"license": "apache-2.0",
"size": 72840
} | [
"org.apache.camel.spi.InterceptStrategy"
] | import org.apache.camel.spi.InterceptStrategy; | import org.apache.camel.spi.*; | [
"org.apache.camel"
] | org.apache.camel; | 1,641,449 |
public Entity getEntity(World world) {
return handle.getEntityModifier(world).read(0);
} | Entity function(World world) { return handle.getEntityModifier(world).read(0); } | /**
* Retrieve the entity.
* @param world - the current world of the entity.
* @return The entity.
*/ | Retrieve the entity | getEntity | {
"repo_name": "codebucketdev/HoloAPI",
"path": "src/com/comphenix/packetwrapper/WrapperPlayServerUpdateAttributes.java",
"license": "gpl-3.0",
"size": 2069
} | [
"org.bukkit.World",
"org.bukkit.entity.Entity"
] | import org.bukkit.World; import org.bukkit.entity.Entity; | import org.bukkit.*; import org.bukkit.entity.*; | [
"org.bukkit",
"org.bukkit.entity"
] | org.bukkit; org.bukkit.entity; | 2,197,157 |
public boolean result(final Object iRecord) {
final ORecordAbstract<?> record = (ORecordAbstract<?>) iRecord;
if (record.getIdentity().isValid()) {
// RESET VERSION TO DISABLE MVCC AVOIDING THE CONCURRENT EXCEPTION IF LOCAL CACHE IS NOT UPDATED
record.getRecordVersion().disable();
record.delete();
recordCount++;
return true;
}
return false;
} | boolean function(final Object iRecord) { final ORecordAbstract<?> record = (ORecordAbstract<?>) iRecord; if (record.getIdentity().isValid()) { record.getRecordVersion().disable(); record.delete(); recordCount++; return true; } return false; } | /**
* Delete the current record.
*/ | Delete the current record | result | {
"repo_name": "delebash/orientdb-parent",
"path": "core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDelete.java",
"license": "apache-2.0",
"size": 7950
} | [
"com.orientechnologies.orient.core.record.ORecordAbstract"
] | import com.orientechnologies.orient.core.record.ORecordAbstract; | import com.orientechnologies.orient.core.record.*; | [
"com.orientechnologies.orient"
] | com.orientechnologies.orient; | 946,860 |
@Test
public void testRpcValues() {
// Test Values
ButtonName testButtonName = ((SubscribeButton) msg).getButtonName();
// Valid Tests
assertEquals("Data didn't match input data.", TestValues.GENERAL_BUTTONNAME, testButtonName);
// Invalid/Null Tests
SubscribeButton msg = new SubscribeButton();
assertNotNull(TestValues.NOT_NULL, msg);
testNullBase(msg);
assertNull(TestValues.NULL, msg.getButtonName());
} | void function() { ButtonName testButtonName = ((SubscribeButton) msg).getButtonName(); assertEquals(STR, TestValues.GENERAL_BUTTONNAME, testButtonName); SubscribeButton msg = new SubscribeButton(); assertNotNull(TestValues.NOT_NULL, msg); testNullBase(msg); assertNull(TestValues.NULL, msg.getButtonName()); } | /**
* Tests the expected values of the RPC message.
*/ | Tests the expected values of the RPC message | testRpcValues | {
"repo_name": "smartdevicelink/sdl_android",
"path": "android/sdl_android/src/androidTest/java/com/smartdevicelink/test/rpc/requests/SubscribeButtonTests.java",
"license": "bsd-3-clause",
"size": 3763
} | [
"com.smartdevicelink.proxy.rpc.SubscribeButton",
"com.smartdevicelink.proxy.rpc.enums.ButtonName",
"com.smartdevicelink.test.TestValues",
"junit.framework.TestCase"
] | import com.smartdevicelink.proxy.rpc.SubscribeButton; import com.smartdevicelink.proxy.rpc.enums.ButtonName; import com.smartdevicelink.test.TestValues; import junit.framework.TestCase; | import com.smartdevicelink.proxy.rpc.*; import com.smartdevicelink.proxy.rpc.enums.*; import com.smartdevicelink.test.*; import junit.framework.*; | [
"com.smartdevicelink.proxy",
"com.smartdevicelink.test",
"junit.framework"
] | com.smartdevicelink.proxy; com.smartdevicelink.test; junit.framework; | 2,422,005 |
private static SearchResults doUnfilteredSearch(SearchTarget target, SearchRepository userRepo)
{
LOG.info("unfiltered search");
Map<String, WebSearchable> wsMap = new HashMap<String, WebSearchable>();
Map<WebSearchable, Float> hitMap = new HashMap<WebSearchable, Float>();
Map<WebSearchable, String> descs = new HashMap<WebSearchable, String>();
Map<WebSearchable, Set<String>> tags = new HashMap<WebSearchable, Set<String>>();
Set<WebSearchable> items;
if (target.isUserOnly()) {
items = userRepo.getSearchItems();
} else {
items = new HashSet<WebSearchable>();
for (SearchRepository sr : SearchRepository.getGlobalSearchRepositories()) {
items.addAll(sr.getSearchItems());
}
if (target.isAll()) {
items.addAll(userRepo.getSearchItems());
}
}
TagManager tm = new TagManagerFactory(userRepo.getProfile().getProfileManager())
.getTagManager();
for (WebSearchable ws: items) {
if (target.getType().equals(ws.getTagType())) {
wsMap.put(ws.getName(), ws);
descs.put(ws, ws.getDescription());
}
tags.put(ws, tm.getObjectTagNames(ws, userRepo.getProfile()));
}
return new SearchResults(hitMap, wsMap, descs, tags);
} | static SearchResults function(SearchTarget target, SearchRepository userRepo) { LOG.info(STR); Map<String, WebSearchable> wsMap = new HashMap<String, WebSearchable>(); Map<WebSearchable, Float> hitMap = new HashMap<WebSearchable, Float>(); Map<WebSearchable, String> descs = new HashMap<WebSearchable, String>(); Map<WebSearchable, Set<String>> tags = new HashMap<WebSearchable, Set<String>>(); Set<WebSearchable> items; if (target.isUserOnly()) { items = userRepo.getSearchItems(); } else { items = new HashSet<WebSearchable>(); for (SearchRepository sr : SearchRepository.getGlobalSearchRepositories()) { items.addAll(sr.getSearchItems()); } if (target.isAll()) { items.addAll(userRepo.getSearchItems()); } } TagManager tm = new TagManagerFactory(userRepo.getProfile().getProfileManager()) .getTagManager(); for (WebSearchable ws: items) { if (target.getType().equals(ws.getTagType())) { wsMap.put(ws.getName(), ws); descs.put(ws, ws.getDescription()); } tags.put(ws, tm.getObjectTagNames(ws, userRepo.getProfile())); } return new SearchResults(hitMap, wsMap, descs, tags); } | /**
* Return all the available objects in the requested scope in a set of search results.
* @param target a parameter object containing information about the scope of the search.
* @param userRepo The current user's search repository.
* @return A set of search results.
*/ | Return all the available objects in the requested scope in a set of search results | doUnfilteredSearch | {
"repo_name": "zebrafishmine/intermine",
"path": "intermine/api/main/src/org/intermine/api/search/SearchResults.java",
"license": "lgpl-2.1",
"size": 15662
} | [
"java.util.HashMap",
"java.util.HashSet",
"java.util.Map",
"java.util.Set",
"org.intermine.api.profile.TagManager",
"org.intermine.api.profile.TagManagerFactory"
] | import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.intermine.api.profile.TagManager; import org.intermine.api.profile.TagManagerFactory; | import java.util.*; import org.intermine.api.profile.*; | [
"java.util",
"org.intermine.api"
] | java.util; org.intermine.api; | 1,467,000 |
public PDFileSpecification getFile() throws IOException {
return PDFileSpecification.createFS(fdf.getDictionaryObject(COSName.F));
} | PDFileSpecification function() throws IOException { return PDFileSpecification.createFS(fdf.getDictionaryObject(COSName.F)); } | /**
* The source file or target file: the PDF document file that this FDF file was exported from or is intended to be
* imported into.
*
* @return The F entry of the FDF dictionary.
* @throws IOException If there is an error creating the file spec.
*/ | The source file or target file: the PDF document file that this FDF file was exported from or is intended to be imported into | getFile | {
"repo_name": "gavanx/pdflearn",
"path": "pdfbox/src/main/java/org/apache/pdfbox/pdmodel/fdf/FDFDictionary.java",
"license": "apache-2.0",
"size": 15646
} | [
"java.io.IOException",
"org.apache.pdfbox.cos.COSName",
"org.apache.pdfbox.pdmodel.common.filespecification.PDFileSpecification"
] | import java.io.IOException; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.pdmodel.common.filespecification.PDFileSpecification; | import java.io.*; import org.apache.pdfbox.cos.*; import org.apache.pdfbox.pdmodel.common.filespecification.*; | [
"java.io",
"org.apache.pdfbox"
] | java.io; org.apache.pdfbox; | 2,814,025 |
public static GemFireCacheImpl getForPdx(String reason) {
GemFireCacheImpl result = pdxInstance;
if (result == null) {
throw new CacheClosedException(reason);
}
return result;
} | static GemFireCacheImpl function(String reason) { GemFireCacheImpl result = pdxInstance; if (result == null) { throw new CacheClosedException(reason); } return result; } | /**
* Pdx is allowed to obtain the cache even while it is being closed
*/ | Pdx is allowed to obtain the cache even while it is being closed | getForPdx | {
"repo_name": "charliemblack/geode",
"path": "geode-core/src/main/java/org/apache/geode/internal/cache/GemFireCacheImpl.java",
"license": "apache-2.0",
"size": 186222
} | [
"org.apache.geode.cache.CacheClosedException"
] | import org.apache.geode.cache.CacheClosedException; | import org.apache.geode.cache.*; | [
"org.apache.geode"
] | org.apache.geode; | 1,073,245 |
return new BracePair[] {
new BracePair(CourierTypes.OPEN_BRACE, CourierTypes.CLOSE_BRACE, true),
new BracePair(CourierTypes.OPEN_BRACKET, CourierTypes.CLOSE_BRACKET, false),
new BracePair(CourierTypes.OPEN_PAREN, CourierTypes.CLOSE_PAREN, false)
//new BracePair(SchemadocTypes.DOC_COMMENT_START, SchemadocTypes.DOC_COMMENT_END, false)
};
} | return new BracePair[] { new BracePair(CourierTypes.OPEN_BRACE, CourierTypes.CLOSE_BRACE, true), new BracePair(CourierTypes.OPEN_BRACKET, CourierTypes.CLOSE_BRACKET, false), new BracePair(CourierTypes.OPEN_PAREN, CourierTypes.CLOSE_PAREN, false) }; } | /**
* Returns the array of definitions for brace pairs that need to be matched when
* editing code in the language.
*
* @return the array of brace pair definitions.
*/ | Returns the array of definitions for brace pairs that need to be matched when editing code in the language | getPairs | {
"repo_name": "coursera/courier",
"path": "idea-plugin/src/org/coursera/courier/CourierBraceMatcher.java",
"license": "apache-2.0",
"size": 2195
} | [
"com.intellij.lang.BracePair",
"org.coursera.courier.psi.CourierTypes"
] | import com.intellij.lang.BracePair; import org.coursera.courier.psi.CourierTypes; | import com.intellij.lang.*; import org.coursera.courier.psi.*; | [
"com.intellij.lang",
"org.coursera.courier"
] | com.intellij.lang; org.coursera.courier; | 1,511,777 |
public Timestamp getCreated();
public static final String COLUMNNAME_CreatedBy = "CreatedBy"; | Timestamp function(); public static final String COLUMNNAME_CreatedBy = STR; | /** Get Created.
* Date this record was created
*/ | Get Created. Date this record was created | getCreated | {
"repo_name": "pplatek/adempiere",
"path": "base/src/org/compiere/model/I_PA_Report.java",
"license": "gpl-2.0",
"size": 9202
} | [
"java.sql.Timestamp"
] | import java.sql.Timestamp; | import java.sql.*; | [
"java.sql"
] | java.sql; | 2,272,507 |
public Object decode(Object pObject) throws DecoderException {
if (pObject instanceof byte[]) {
return decode((byte[]) pObject);
} else if (pObject instanceof String) {
return decode((String) pObject);
} else {
throw new DecoderException("Parameter supplied to Base64 decode is not a byte[] or a String");
}
}
| Object function(Object pObject) throws DecoderException { if (pObject instanceof byte[]) { return decode((byte[]) pObject); } else if (pObject instanceof String) { return decode((String) pObject); } else { throw new DecoderException(STR); } } | /**
* Decodes an Object using the base64 algorithm. This method is provided in order to satisfy the requirements of the
* Decoder interface, and will throw a DecoderException if the supplied object is not of type byte[] or String.
*
* @param pObject
* Object to decode
* @return An object (of type byte[]) containing the binary data which corresponds to the byte[] or String supplied.
* @throws DecoderException
* if the parameter supplied is not of type byte[]
*/ | Decodes an Object using the base64 algorithm. This method is provided in order to satisfy the requirements of the Decoder interface, and will throw a DecoderException if the supplied object is not of type byte[] or String | decode | {
"repo_name": "jdgiotta/braintree_java",
"path": "src/main/java/com/braintreegateway/org/apache/commons/codec/binary/Base64.java",
"license": "mit",
"size": 41863
} | [
"com.braintreegateway.org.apache.commons.codec.DecoderException"
] | import com.braintreegateway.org.apache.commons.codec.DecoderException; | import com.braintreegateway.org.apache.commons.codec.*; | [
"com.braintreegateway.org"
] | com.braintreegateway.org; | 1,013,258 |
private String defaultLookAndFeel() {
return (System.getProperty("os.name", "").toLowerCase().indexOf("linux") != -1) ?
UIManager.getCrossPlatformLookAndFeelClassName()
:
UIManager.getSystemLookAndFeelClassName();
}
| String function() { return (System.getProperty(STR, STRlinux") != -1) ? UIManager.getCrossPlatformLookAndFeelClassName() : UIManager.getSystemLookAndFeelClassName(); } | /**
* Returns the default LAF. Under Linux, GTK+ is the default but because it
* has too many problems, the Metal LAF is used instead.
*/ | Returns the default LAF. Under Linux, GTK+ is the default but because it has too many problems, the Metal LAF is used instead | defaultLookAndFeel | {
"repo_name": "bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs",
"path": "utils/eclipselink.utils.workbench/framework/source/org/eclipse/persistence/tools/workbench/framework/internal/FrameworkApplication.java",
"license": "epl-1.0",
"size": 35396
} | [
"javax.swing.UIManager"
] | import javax.swing.UIManager; | import javax.swing.*; | [
"javax.swing"
] | javax.swing; | 2,547,697 |
public final BatchRequest batch() {
return batch(null);
} | final BatchRequest function() { return batch(null); } | /**
* Create an {@link BatchRequest} object from this Google API client instance.
*
* <p>Sample usage:
*
* <pre>{@code
* client.batch()
* .queue(...)
* .queue(...)
* .execute();
* }</pre>
*
* @return newly created Batch request
*/ | Create an <code>BatchRequest</code> object from this Google API client instance. Sample usage: <code>client.batch() .queue(...) .queue(...) .execute(); </code> | batch | {
"repo_name": "googleapis/google-api-java-client",
"path": "google-api-client/src/main/java/com/google/api/client/googleapis/services/AbstractGoogleClient.java",
"license": "apache-2.0",
"size": 17451
} | [
"com.google.api.client.googleapis.batch.BatchRequest"
] | import com.google.api.client.googleapis.batch.BatchRequest; | import com.google.api.client.googleapis.batch.*; | [
"com.google.api"
] | com.google.api; | 2,311,353 |
public Intent offerSuperUser(Activity activity, int requestCode) {
RootTools.log("Launching Market for SuperUser");
Intent i = new Intent(Intent.ACTION_VIEW,
Uri.parse("market://details?id=com.noshufou.android.su"));
activity.startActivityForResult(i, requestCode);
return i;
} | Intent function(Activity activity, int requestCode) { RootTools.log(STR); Intent i = new Intent(Intent.ACTION_VIEW, Uri.parse("market: activity.startActivityForResult(i, requestCode); return i; } | /**
* This will launch the Android market looking for SuperUser, but will return the intent fired
* and starts the activity with startActivityForResult
*
* @param activity pass in your Activity
* @param requestCode pass in the request code
* @return intent fired
*/ | This will launch the Android market looking for SuperUser, but will return the intent fired and starts the activity with startActivityForResult | offerSuperUser | {
"repo_name": "Morlunk/Mountie",
"path": "app/src/main/java/com/stericson/RootTools/internal/RootToolsInternalMethods.java",
"license": "gpl-3.0",
"size": 54666
} | [
"android.app.Activity",
"android.content.Intent",
"android.net.Uri",
"com.stericson.RootTools"
] | import android.app.Activity; import android.content.Intent; import android.net.Uri; import com.stericson.RootTools; | import android.app.*; import android.content.*; import android.net.*; import com.stericson.*; | [
"android.app",
"android.content",
"android.net",
"com.stericson"
] | android.app; android.content; android.net; com.stericson; | 2,009,177 |
public Observable<ServiceResponse<Void>> beginRestartWithServiceResponseAsync(String resourceGroupName, String serverName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (serverName == null) {
throw new IllegalArgumentException("Parameter serverName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
} | Observable<ServiceResponse<Void>> function(String resourceGroupName, String serverName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (serverName == null) { throw new IllegalArgumentException(STR); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException(STR); } | /**
* Restarts a server.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/ | Restarts a server | beginRestartWithServiceResponseAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/postgresql/mgmt-v2017_12_01/src/main/java/com/microsoft/azure/management/postgresql/v2017_12_01/implementation/ServersInner.java",
"license": "mit",
"size": 61570
} | [
"com.microsoft.rest.ServiceResponse"
] | import com.microsoft.rest.ServiceResponse; | import com.microsoft.rest.*; | [
"com.microsoft.rest"
] | com.microsoft.rest; | 2,886,040 |
public Map<String,ProteinData> getProteins(String ref, RpcContext... jsonRpcContext) throws IOException, JsonClientException {
List<Object> args = new ArrayList<Object>();
args.add(ref);
TypeReference<List<Map<String,ProteinData>>> retType = new TypeReference<List<Map<String,ProteinData>>>() {};
List<Map<String,ProteinData>> res = caller.jsonrpcCall("genome_annotaiton_api.get_proteins", args, retType, true, true, jsonRpcContext);
return res.get(0);
}
/**
* <p>Original spec-file function name: get_feature_locations</p>
* <pre>
* *
* * Retrieve Feature locations in this GenomeAnnotation.
* *
* </pre>
* @param arg1 instance of original type "ObjectReference"
* @param arg2 instance of list of String
* @return instance of mapping from String to list of type {@link us.kbase.genomeannotaitonapi.Region Region} | Map<String,ProteinData> function(String ref, RpcContext... jsonRpcContext) throws IOException, JsonClientException { List<Object> args = new ArrayList<Object>(); args.add(ref); TypeReference<List<Map<String,ProteinData>>> retType = new TypeReference<List<Map<String,ProteinData>>>() {}; List<Map<String,ProteinData>> res = caller.jsonrpcCall(STR, args, retType, true, true, jsonRpcContext); return res.get(0); } /** * <p>Original spec-file function name: get_feature_locations</p> * <pre> * * * * Retrieve Feature locations in this GenomeAnnotation. * * * </pre> * @param arg1 instance of original type STR * @param arg2 instance of list of String * @return instance of mapping from String to list of type {@link us.kbase.genomeannotaitonapi.Region Region} | /**
* <p>Original spec-file function name: get_proteins</p>
* <pre>
* *
* * Retrieve Protein data available in this GenomeAnnotation.
* *
* </pre>
* @param ref instance of original type "ObjectReference"
* @return instance of mapping from String to type {@link us.kbase.genomeannotaitonapi.ProteinData ProteinData} (original type "Protein_data")
* @throws IOException if an IO exception occurs
* @throws JsonClientException if a JSON RPC exception occurs
*/ | Original spec-file function name: get_proteins <code> Retrieve Protein data available in this GenomeAnnotation. </code> | getProteins | {
"repo_name": "scanon/data_api2",
"path": "lib/src/us/kbase/genomeannotaitonapi/GenomeAnnotaitonApiClient.java",
"license": "mit",
"size": 24864
} | [
"com.fasterxml.jackson.core.type.TypeReference",
"java.io.IOException",
"java.util.ArrayList",
"java.util.List",
"java.util.Map",
"us.kbase.common.service.JsonClientException",
"us.kbase.common.service.RpcContext"
] | import com.fasterxml.jackson.core.type.TypeReference; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import us.kbase.common.service.JsonClientException; import us.kbase.common.service.RpcContext; | import com.fasterxml.jackson.core.type.*; import java.io.*; import java.util.*; import us.kbase.common.service.*; | [
"com.fasterxml.jackson",
"java.io",
"java.util",
"us.kbase.common"
] | com.fasterxml.jackson; java.io; java.util; us.kbase.common; | 733,194 |
ServiceResponse<Void> updatePetWithForm(String petId) throws ServiceException, IOException, IllegalArgumentException; | ServiceResponse<Void> updatePetWithForm(String petId) throws ServiceException, IOException, IllegalArgumentException; | /**
* Updates a pet in the store with form data.
*
* @param petId ID of pet that needs to be updated
* @throws ServiceException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @throws IllegalArgumentException exception thrown from invalid parameters
* @return the {@link ServiceResponse} object if successful.
*/ | Updates a pet in the store with form data | updatePetWithForm | {
"repo_name": "xingwu1/autorest",
"path": "Samples/petstore/Java/SwaggerPetstore.java",
"license": "mit",
"size": 35698
} | [
"com.microsoft.rest.ServiceException",
"com.microsoft.rest.ServiceResponse",
"java.io.IOException"
] | import com.microsoft.rest.ServiceException; import com.microsoft.rest.ServiceResponse; import java.io.IOException; | import com.microsoft.rest.*; import java.io.*; | [
"com.microsoft.rest",
"java.io"
] | com.microsoft.rest; java.io; | 1,510,970 |
public static JButton button(String name, Action action) {
JButton ret = button(name);
ret.setAction(action);
return ret;
} | static JButton function(String name, Action action) { JButton ret = button(name); ret.setAction(action); return ret; } | /**
* A convenience method to initialize a button with an action
*
* @param name
* @param action
* @return
*/ | A convenience method to initialize a button with an action | button | {
"repo_name": "SteppingStone/sstone-dat",
"path": "src/main/java/org/edc/sstone/dat/util/SAFUtil.java",
"license": "gpl-3.0",
"size": 5005
} | [
"javax.swing.Action",
"javax.swing.JButton"
] | import javax.swing.Action; import javax.swing.JButton; | import javax.swing.*; | [
"javax.swing"
] | javax.swing; | 1,502,575 |
private void upload (GBFile file) throws ClientException, IOException {
// If the file is a folder
if (file.isDirectory()) {
// Create the folder in the storage
client.createDirectory(file);
// And create enw works, to upload each file
for (GBFile child : file.getChildren()) {
manager.addWork(new Work(child, Work.WorkKind.UPLOAD));
}
return;
}
// Upload the file
client.uploadFile(file);
} | void function (GBFile file) throws ClientException, IOException { if (file.isDirectory()) { client.createDirectory(file); for (GBFile child : file.getChildren()) { manager.addWork(new Work(child, Work.WorkKind.UPLOAD)); } return; } client.uploadFile(file); } | /**
* Download the file if a file, schedule multiple upload if a folder
* @param file File to upload
* @throws ClientException
* @throws IOException
*/ | Download the file if a file, schedule multiple upload if a folder | upload | {
"repo_name": "simonedegiacomi/goboxclient",
"path": "src/main/java/it/simonedegiacomi/sync/Employee.java",
"license": "gpl-2.0",
"size": 4909
} | [
"it.simonedegiacomi.goboxapi.GBFile",
"it.simonedegiacomi.goboxapi.client.ClientException",
"java.io.IOException"
] | import it.simonedegiacomi.goboxapi.GBFile; import it.simonedegiacomi.goboxapi.client.ClientException; import java.io.IOException; | import it.simonedegiacomi.goboxapi.*; import it.simonedegiacomi.goboxapi.client.*; import java.io.*; | [
"it.simonedegiacomi.goboxapi",
"java.io"
] | it.simonedegiacomi.goboxapi; java.io; | 2,414,143 |
protected String getXMLPropertySet(String message) {
if (saveable) {
try {
saveToFile();
} catch (IOException e) {
}
}
String xml;
if (message == null) {
xml = getXMLPropertySetInit();
} else {
xml = getXMLPropertySetInit(message);
}
List<INDIElement> elem = getElementsAsList();
for (int i = 0 ; i < elem.size() ; i++) {
xml += elem.get(i).getXMLOneElement();
}
xml += getXMLPropertySetEnd();
return xml;
} | String function(String message) { if (saveable) { try { saveToFile(); } catch (IOException e) { } } String xml; if (message == null) { xml = getXMLPropertySetInit(); } else { xml = getXMLPropertySetInit(message); } List<INDIElement> elem = getElementsAsList(); for (int i = 0 ; i < elem.size() ; i++) { xml += elem.get(i).getXMLOneElement(); } xml += getXMLPropertySetEnd(); return xml; } | /**
* Gets the XML code to set the values of the property with a
* <code>message</code>. Should not usually be called by the Drivers.
*
* @param message An message to be sent to the client when setting the values
* of the property.
* @return The XML code to set the values of the property.
*/ | Gets the XML code to set the values of the property with a <code>message</code>. Should not usually be called by the Drivers | getXMLPropertySet | {
"repo_name": "farom57/AstroidDriver",
"path": "src/laazotea/indi/driver/INDIProperty.java",
"license": "gpl-2.0",
"size": 17304
} | [
"java.io.IOException",
"java.util.List"
] | import java.io.IOException; import java.util.List; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 275,408 |
public static ims.core.clinical.domain.objects.PatientAllergy extractPatientAllergy(ims.domain.ILightweightDomainFactory domainFactory, ims.core.vo.PatientAllergyForSummaryOverviewVo valueObject)
{
return extractPatientAllergy(domainFactory, valueObject, new HashMap());
}
| static ims.core.clinical.domain.objects.PatientAllergy function(ims.domain.ILightweightDomainFactory domainFactory, ims.core.vo.PatientAllergyForSummaryOverviewVo valueObject) { return extractPatientAllergy(domainFactory, valueObject, new HashMap()); } | /**
* Create the domain object from the value object.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param valueObject - extract the domain object fields from this.
*/ | Create the domain object from the value object | extractPatientAllergy | {
"repo_name": "open-health-hub/openmaxims-linux",
"path": "openmaxims_workspace/ValueObjects/src/ims/core/vo/domain/PatientAllergyForSummaryOverviewVoAssembler.java",
"license": "agpl-3.0",
"size": 17259
} | [
"java.util.HashMap"
] | import java.util.HashMap; | import java.util.*; | [
"java.util"
] | java.util; | 2,676,230 |
@Override
protected void postSetUpdate(StreamWindowEvent<Tuple> event,
Object partition, LinkedList<I> tuples) throws Exception {
switch (event.getType()) {
case INSERTION:
aggregate(partition, tuples);
break;
default:
break;
}
} | void function(StreamWindowEvent<Tuple> event, Object partition, LinkedList<I> tuples) throws Exception { switch (event.getType()) { case INSERTION: aggregate(partition, tuples); break; default: break; } } | /**
* For a count based window, the eviction precedes the
* insertion, but should be seen as a single action,
* so the eviction does not result in calling the function.
* It will be immediately followed by the INSERTION
*which will result in the call back.
*/ | For a count based window, the eviction precedes the insertion, but should be seen as a single action, so the eviction does not result in calling the function. It will be immediately followed by the INSERTION which will result in the call back | postSetUpdate | {
"repo_name": "ibmkendrick/streamsx.topology",
"path": "java/src/com/ibm/streamsx/topology/internal/functional/window/ContinuousAggregatorCountEvict.java",
"license": "apache-2.0",
"size": 1508
} | [
"com.ibm.streams.operator.Tuple",
"com.ibm.streams.operator.window.StreamWindowEvent",
"java.util.LinkedList"
] | import com.ibm.streams.operator.Tuple; import com.ibm.streams.operator.window.StreamWindowEvent; import java.util.LinkedList; | import com.ibm.streams.operator.*; import com.ibm.streams.operator.window.*; import java.util.*; | [
"com.ibm.streams",
"java.util"
] | com.ibm.streams; java.util; | 2,433,987 |
private void sendSensorStopListeningIntent() {
Intent i = new Intent(Sensor.Intents.SENSOR_UNREGISTER_LISTENER_INTENT);
i.putExtra(Sensor.Intents.EXTRA_SENSOR_ID, mSensorId);
sendToHostApp(i);
} | void function() { Intent i = new Intent(Sensor.Intents.SENSOR_UNREGISTER_LISTENER_INTENT); i.putExtra(Sensor.Intents.EXTRA_SENSOR_ID, mSensorId); sendToHostApp(i); } | /**
* Send stop listening intent to host application
*
* @see Sensor.Intents#SENSOR_UNREGISTER_LISTENER_INTENT
*/ | Send stop listening intent to host application | sendSensorStopListeningIntent | {
"repo_name": "einvalentin/buildwatch",
"path": "3rdParty/SmartExtensionUtils/src/com/sonyericsson/extras/liveware/extension/util/sensor/AccessorySensor.java",
"license": "apache-2.0",
"size": 12739
} | [
"android.content.Intent",
"com.sonyericsson.extras.liveware.aef.sensor.Sensor"
] | import android.content.Intent; import com.sonyericsson.extras.liveware.aef.sensor.Sensor; | import android.content.*; import com.sonyericsson.extras.liveware.aef.sensor.*; | [
"android.content",
"com.sonyericsson.extras"
] | android.content; com.sonyericsson.extras; | 1,427,993 |
public T caseAssociation(Association object) {
return null;
} | T function(Association object) { return null; } | /**
* Returns the result of interpreting the object as an instance of '<em>Association</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Association</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/ | Returns the result of interpreting the object as an instance of 'Association'. This implementation returns null; returning a non-null result will terminate the switch. | caseAssociation | {
"repo_name": "romartin/kie-wb-common",
"path": "kie-wb-common-stunner/kie-wb-common-stunner-sets/kie-wb-common-stunner-bpmn/kie-wb-common-stunner-bpmn-emf/src/main/java/org/eclipse/bpmn2/util/Bpmn2Switch.java",
"license": "apache-2.0",
"size": 144865
} | [
"org.eclipse.bpmn2.Association"
] | import org.eclipse.bpmn2.Association; | import org.eclipse.bpmn2.*; | [
"org.eclipse.bpmn2"
] | org.eclipse.bpmn2; | 1,340,527 |
public boolean isNativeImplementation() {
return false;
}
private static final AtomicReference<CommandLine> sCommandLine =
new AtomicReference<CommandLine>(); | boolean function() { return false; } private static final AtomicReference<CommandLine> sCommandLine = new AtomicReference<CommandLine>(); | /**
* Determine if the command line is bound to the native (JNI) implementation.
* @return true if the underlying implementation is delegating to the native command line.
*/ | Determine if the command line is bound to the native (JNI) implementation | isNativeImplementation | {
"repo_name": "michaelforfxhelp/fxhelprepo",
"path": "third_party/chromium/base/android/java/src/org/chromium/base/CommandLine.java",
"license": "mpl-2.0",
"size": 14814
} | [
"java.util.concurrent.atomic.AtomicReference"
] | import java.util.concurrent.atomic.AtomicReference; | import java.util.concurrent.atomic.*; | [
"java.util"
] | java.util; | 1,265,963 |
public LayoutElementParcelable generateLayoutElement(@NonNull Context c, boolean showThumbs) {
switch (mode) {
case FILE:
case ROOT:
File file = getFile();
LayoutElementParcelable layoutElement;
if (isDirectory()) {
layoutElement =
new LayoutElementParcelable(
c,
path,
RootHelper.parseFilePermission(file),
"",
folderSize() + "",
0,
true,
file.lastModified() + "",
false,
showThumbs,
mode);
} else {
layoutElement =
new LayoutElementParcelable(
c,
file.getPath(),
RootHelper.parseFilePermission(file),
file.getPath(),
file.length() + "",
file.length(),
false,
file.lastModified() + "",
false,
showThumbs,
mode);
}
return layoutElement;
default:
return null;
}
} | LayoutElementParcelable function(@NonNull Context c, boolean showThumbs) { switch (mode) { case FILE: case ROOT: File file = getFile(); LayoutElementParcelable layoutElement; if (isDirectory()) { layoutElement = new LayoutElementParcelable( c, path, RootHelper.parseFilePermission(file), STRSTRSTRSTR", false, showThumbs, mode); } return layoutElement; default: return null; } } | /**
* Generates a {@link LayoutElementParcelable} adapted compatible element. Currently supports only
* local filesystem
*/ | Generates a <code>LayoutElementParcelable</code> adapted compatible element. Currently supports only local filesystem | generateLayoutElement | {
"repo_name": "TeamAmaze/AmazeFileManager",
"path": "app/src/main/java/com/amaze/filemanager/filesystem/HybridFile.java",
"license": "gpl-3.0",
"size": 48290
} | [
"android.content.Context",
"androidx.annotation.NonNull",
"com.amaze.filemanager.adapters.data.LayoutElementParcelable",
"java.io.File"
] | import android.content.Context; import androidx.annotation.NonNull; import com.amaze.filemanager.adapters.data.LayoutElementParcelable; import java.io.File; | import android.content.*; import androidx.annotation.*; import com.amaze.filemanager.adapters.data.*; import java.io.*; | [
"android.content",
"androidx.annotation",
"com.amaze.filemanager",
"java.io"
] | android.content; androidx.annotation; com.amaze.filemanager; java.io; | 2,041,104 |
System.out.println("\nRunning sample: " + Sample.class);
// Parse the command line arguments.
//
String homeDir = "./tmp";
for (int i = 0; i < args.length; i += 1) {
if (args[i].equals("-h") && i < args.length - 1) {
i += 1;
homeDir = args[i];
} else {
System.err.println("Usage:\n java " + Sample.class.getName() +
"\n [-h <home-directory>]");
System.exit(2);
}
}
// Run the sample.
//
Sample sample = null;
try {
sample = new Sample(homeDir);
sample.run();
} catch (Exception e) {
// If an exception reaches this point, the last transaction did not
// complete. If the exception is RunRecoveryException, follow
// the Berkeley DB recovery procedures before running again.
e.printStackTrace();
} finally {
if (sample != null) {
try {
// Always attempt to close the database cleanly.
sample.close();
} catch (Exception e) {
System.err.println("Exception during database close:");
e.printStackTrace();
}
}
}
}
private Sample(String homeDir)
throws DatabaseException, FileNotFoundException {
db = new SampleDatabase(homeDir);
views = new SampleViews(db);
} | System.out.println(STR + Sample.class); for (int i = 0; i < args.length; i += 1) { if (args[i].equals("-h") && i < args.length - 1) { i += 1; homeDir = args[i]; } else { System.err.println(STR + Sample.class.getName() + STR); System.exit(2); } } try { sample = new Sample(homeDir); sample.run(); } catch (Exception e) { e.printStackTrace(); } finally { if (sample != null) { try { sample.close(); } catch (Exception e) { System.err.println(STR); e.printStackTrace(); } } } } private Sample(String homeDir) throws DatabaseException, FileNotFoundException { db = new SampleDatabase(homeDir); views = new SampleViews(db); } | /**
* Run the sample program.
*/ | Run the sample program | main | {
"repo_name": "malin1993ml/h-store",
"path": "third_party/cpp/berkeleydb/examples/java/src/collections/ship/entity/Sample.java",
"license": "gpl-3.0",
"size": 7903
} | [
"com.sleepycat.db.DatabaseException",
"java.io.FileNotFoundException"
] | import com.sleepycat.db.DatabaseException; import java.io.FileNotFoundException; | import com.sleepycat.db.*; import java.io.*; | [
"com.sleepycat.db",
"java.io"
] | com.sleepycat.db; java.io; | 154,121 |
private Object[] compressIndexArrays(
long[] indexCIDS,
IndexRowGenerator[] irgs)
{
long[] workSpace = new long[indexCIDS.length];
int j = 0, k = indexCIDS.length - 1;
for (int i = 0; i < indexCIDS.length; i++)
{
int m;
for (m = 0; m < j; m++) // look up our unique set
{
if (indexCIDS[i] == workSpace[m]) // it's a duplicate
{
workSpace[k--] = i; // save dup index's index
break;
}
}
if (m == j)
workSpace[j++] = indexCIDS[i]; // save unique conglom id
}
if (j < indexCIDS.length) // duplicate exists
{
long[] newIndexCIDS = new long[j];
IndexRowGenerator[] newIrgs = new IndexRowGenerator[j];
int[] duplicateIndexes = new int[indexCIDS.length - j];
k = 0;
// do everything in one loop
for (int m = 0, n = indexCIDS.length - 1; m < indexCIDS.length; m++)
{
// we already gathered our indexCIDS and duplicateIndexes
if (m < j)
newIndexCIDS[m] = workSpace[m];
else
duplicateIndexes[indexCIDS.length - m - 1] = (int) workSpace[m];
// stack up our irgs, indexSCOCIs, indexDCOCIs
if ((n >= j) && (m == (int) workSpace[n]))
n--;
else
{
newIrgs[k] = irgs[m];
k++;
}
}
// construct return value
Object[] returnValue = new Object[3]; // [indexSCOCIs == null ? 3 : 5];
returnValue[0] = duplicateIndexes;
returnValue[1] = newIndexCIDS;
returnValue[2] = newIrgs;
return returnValue;
}
else // no duplicates
return null;
} | Object[] function( long[] indexCIDS, IndexRowGenerator[] irgs) { long[] workSpace = new long[indexCIDS.length]; int j = 0, k = indexCIDS.length - 1; for (int i = 0; i < indexCIDS.length; i++) { int m; for (m = 0; m < j; m++) { if (indexCIDS[i] == workSpace[m]) { workSpace[k--] = i; break; } } if (m == j) workSpace[j++] = indexCIDS[i]; } if (j < indexCIDS.length) { long[] newIndexCIDS = new long[j]; IndexRowGenerator[] newIrgs = new IndexRowGenerator[j]; int[] duplicateIndexes = new int[indexCIDS.length - j]; k = 0; for (int m = 0, n = indexCIDS.length - 1; m < indexCIDS.length; m++) { if (m < j) newIndexCIDS[m] = workSpace[m]; else duplicateIndexes[indexCIDS.length - m - 1] = (int) workSpace[m]; if ((n >= j) && (m == (int) workSpace[n])) n--; else { newIrgs[k] = irgs[m]; k++; } } Object[] returnValue = new Object[3]; returnValue[0] = duplicateIndexes; returnValue[1] = newIndexCIDS; returnValue[2] = newIrgs; return returnValue; } else return null; } | /**
* Get rid of duplicates from a set of index conglomerate numbers and
* index descriptors.
*
* @param indexCIDS array of index conglomerate numbers
* @param irgs array of index row generaters
*
* @return value: If no duplicates, returns NULL; otherwise,
* a size-3 array of objects, first element is an
* array of duplicates' indexes in the input arrays;
* second element is the compact indexCIDs; third
* element is the compact irgs.
*/ | Get rid of duplicates from a set of index conglomerate numbers and index descriptors | compressIndexArrays | {
"repo_name": "trejkaz/derby",
"path": "java/engine/org/apache/derby/impl/sql/execute/AlterTableConstantAction.java",
"license": "apache-2.0",
"size": 131348
} | [
"org.apache.derby.iapi.sql.dictionary.IndexRowGenerator"
] | import org.apache.derby.iapi.sql.dictionary.IndexRowGenerator; | import org.apache.derby.iapi.sql.dictionary.*; | [
"org.apache.derby"
] | org.apache.derby; | 332,079 |
@Test( expected = NotRoutableException.class )
public final void testInvokeCountFoo3()
{
fooRouter.proxy()
.count( "foo3", TEST );
fail( "testInvokeCountFoo3()" );
} | @Test( expected = NotRoutableException.class ) final void function() { fooRouter.proxy() .count( "foo3", TEST ); fail( STR ); } | /**
* Test invoking count forward to Foo3 (non-existant path)
*/ | Test invoking count forward to Foo3 (non-existant path) | testInvokeCountFoo3 | {
"repo_name": "caveman-frak/java-core",
"path": "core-model/src/test/java/uk/co/bluegecko/core/service/base/router/RouterTest.java",
"license": "apache-2.0",
"size": 1559
} | [
"org.junit.Assert",
"org.junit.Test"
] | import org.junit.Assert; import org.junit.Test; | import org.junit.*; | [
"org.junit"
] | org.junit; | 981,410 |
boolean removeMutation(ItemStack result); | boolean removeMutation(ItemStack result); | /**
* Removes all mutations that give this stack as a result
* @param result
* @return True if successful
*/ | Removes all mutations that give this stack as a result | removeMutation | {
"repo_name": "HenryLoenwind/AgriCraft",
"path": "src/main/java/com/InfinityRaider/AgriCraft/api/v1/APIv1.java",
"license": "mit",
"size": 17453
} | [
"net.minecraft.item.ItemStack"
] | import net.minecraft.item.ItemStack; | import net.minecraft.item.*; | [
"net.minecraft.item"
] | net.minecraft.item; | 2,110,144 |
@Override
public boolean processNoteContent(NoteRenderContext context, NoteData item)
throws NoteRenderingPreProcessorException {
item.setContent(processContent(item.getContent()));
item.setShortContent(processContent(item.getShortContent()));
return true;
} | boolean function(NoteRenderContext context, NoteData item) throws NoteRenderingPreProcessorException { item.setContent(processContent(item.getContent())); item.setShortContent(processContent(item.getShortContent())); return true; } | /**
* Processes a note for a specific render context. This method will only be called if the
* processor supports the mode given by the render context.
*
* @param context
* holds details about the render context to allow specific processing in different
* situations
* @param item
* the item to be processed
* @return true if the item was modified, false otherwise
* @throws com.communote.server.api.core.note.processor.NoteRenderingPreProcessorException
* in case something unexpected lead to the failure of the processor
*/ | Processes a note for a specific render context. This method will only be called if the processor supports the mode given by the render context | processNoteContent | {
"repo_name": "Communote/communote-server",
"path": "communote/plugins/communote-smileys-plugin/src/main/java/com/communote/plugins/smileys/SmileysNoteRenderingPreProcessor.java",
"license": "apache-2.0",
"size": 6270
} | [
"com.communote.server.api.core.note.NoteData",
"com.communote.server.api.core.note.NoteRenderContext",
"com.communote.server.api.core.note.processor.NoteRenderingPreProcessorException"
] | import com.communote.server.api.core.note.NoteData; import com.communote.server.api.core.note.NoteRenderContext; import com.communote.server.api.core.note.processor.NoteRenderingPreProcessorException; | import com.communote.server.api.core.note.*; import com.communote.server.api.core.note.processor.*; | [
"com.communote.server"
] | com.communote.server; | 808,070 |
public void setFields(Strings fields) {
this.fields = fields;
} | void function(Strings fields) { this.fields = fields; } | /**
* The Fields
*/ | The Fields | setFields | {
"repo_name": "ullgren/camel",
"path": "components/camel-gora/src/main/java/org/apache/camel/component/gora/GoraConfiguration.java",
"license": "apache-2.0",
"size": 6871
} | [
"com.google.common.base.Strings"
] | import com.google.common.base.Strings; | import com.google.common.base.*; | [
"com.google.common"
] | com.google.common; | 2,802,784 |
public void resetStyleTables( View view ) {
try {
SpatialDatabasesManager dbManager = SpatialDatabasesManager.getInstance();
List<AbstractSpatialDatabaseHandler> spatialDatabaseHandlers = dbManager.getSpatialDatabaseHandlers();
for( AbstractSpatialDatabaseHandler iSpatialDatabaseHandler : spatialDatabaseHandlers ) {
if (iSpatialDatabaseHandler instanceof SpatialiteDatabaseHandler) {
SpatialiteDatabaseHandler sdHandler = (SpatialiteDatabaseHandler) iSpatialDatabaseHandler;
sdHandler.resetStyleTable();
}
}
Utilities.messageDialog(this, "Style reset performed.", null);
} catch (Exception e) {
GPLog.error(this, null, e);
Utilities.messageDialog(this, "An error occurred: " + e.getLocalizedMessage(), null);
}
} | void function( View view ) { try { SpatialDatabasesManager dbManager = SpatialDatabasesManager.getInstance(); List<AbstractSpatialDatabaseHandler> spatialDatabaseHandlers = dbManager.getSpatialDatabaseHandlers(); for( AbstractSpatialDatabaseHandler iSpatialDatabaseHandler : spatialDatabaseHandlers ) { if (iSpatialDatabaseHandler instanceof SpatialiteDatabaseHandler) { SpatialiteDatabaseHandler sdHandler = (SpatialiteDatabaseHandler) iSpatialDatabaseHandler; sdHandler.resetStyleTable(); } } Utilities.messageDialog(this, STR, null); } catch (Exception e) { GPLog.error(this, null, e); Utilities.messageDialog(this, STR + e.getLocalizedMessage(), null); } } | /**
* Reset style tables.
*
* @param view parent.
*/ | Reset style tables | resetStyleTables | {
"repo_name": "kristina-hager/geopaparazzi",
"path": "geopaparazzi.app/src/eu/hydrologis/geopaparazzi/util/SecretActivity.java",
"license": "gpl-3.0",
"size": 8226
} | [
"android.view.View",
"eu.geopaparazzi.library.database.GPLog",
"eu.geopaparazzi.library.util.Utilities",
"eu.geopaparazzi.spatialite.database.spatial.SpatialDatabasesManager",
"eu.geopaparazzi.spatialite.database.spatial.core.databasehandlers.AbstractSpatialDatabaseHandler",
"eu.geopaparazzi.spatialite.database.spatial.core.databasehandlers.SpatialiteDatabaseHandler",
"java.util.List"
] | import android.view.View; import eu.geopaparazzi.library.database.GPLog; import eu.geopaparazzi.library.util.Utilities; import eu.geopaparazzi.spatialite.database.spatial.SpatialDatabasesManager; import eu.geopaparazzi.spatialite.database.spatial.core.databasehandlers.AbstractSpatialDatabaseHandler; import eu.geopaparazzi.spatialite.database.spatial.core.databasehandlers.SpatialiteDatabaseHandler; import java.util.List; | import android.view.*; import eu.geopaparazzi.library.database.*; import eu.geopaparazzi.library.util.*; import eu.geopaparazzi.spatialite.database.spatial.*; import eu.geopaparazzi.spatialite.database.spatial.core.databasehandlers.*; import java.util.*; | [
"android.view",
"eu.geopaparazzi.library",
"eu.geopaparazzi.spatialite",
"java.util"
] | android.view; eu.geopaparazzi.library; eu.geopaparazzi.spatialite; java.util; | 745,606 |
public void parseDetails( final byte[] data ) {
final Details details = replay.details = new Details();
setWrapper( data, ByteOrder.LITTLE_ENDIAN );
final Object[] dtls = (Object[]) readStructure();
//printStructure( dtls, "" );
final Object[] playersArr = (Object[]) dtls[ 0 ];
final int playersCount = playersArr.length;
final List< Player > playerList = new ArrayList< Player >( playersCount );
for ( int i = 0; i < playersCount; i++ ) {
final Player player = new Player();
final Object[] playerArr = (Object[]) playersArr[ i ];
String name = byteArrToString( playerArr[ 0 ] );
if ( name.length() == 0 ) {
player.playerId.name = "";
break;
}
// The read player name contains the clan tag and optional formatting tags (in mark-up format).
// Example: "[RA]<sp/>SvnthSyn"
// Strip these off
int start;
while ( ( start = name.indexOf( '<' ) ) >= 0 )
name = name.substring( 0, start ) + name.substring( name.indexOf( '>', start ) + 1 );
player.nameWithClan = name;
if ( ( start = name.indexOf( '[' ) ) >= 0 )
name = name.substring( 0, start ) + name.substring( name.indexOf( ']', start ) + 1 );
player.playerId.name = name;
for ( final Client client : replay.initData.clients )
if ( client.name.equals( player.nameWithClan ) ) {
player.client = client;
break;
}
final Object[] playerIdArr = (Object[]) playerArr[ 1 ];
player.playerId.battleNetSubId = ( (Number) playerIdArr[ 2 ] ).intValue();
player.playerId.battleNetId = ( (Number) playerIdArr[ 3 ] ).intValue();
if ( player.playerId.battleNetSubId != 0 && player.playerId.battleNetId != 0 )
player.playerId.gateway = replay.initData.gateway; // Copy gateway from initData
player.raceString = byteArrToString( playerArr[ 2 ] );
final Object[] colorArr = (Object[]) playerArr[ 3 ];
for ( int j = 0; j < 4; j++ )
player.argbColor[ j ] = ( (Number) colorArr[ j ] ).intValue();
// playerArr[ 6 ] is handicap (currently taken from attributes)
// playerArr[ 7 ] is team (currently taken from attributes)
// playerArr[ 8 ] is the match result
switch ( (Integer) playerArr[ 8 ] ) { // 1=>win, 2=>loss, 0=>unknown
case 1 : player.isWinner = Boolean.TRUE ; break;
case 2 : player.isWinner = Boolean.FALSE; break;
}
playerList.add( player );
}
details.players = playerList.toArray( new Player[ playerList.size() ] );
details.originalMapName = byteArrToString( dtls[ 1 ] );
details.mapName = Settings.getMapAliasGroupName( details.originalMapName );
details.mapPreviewFileName = byteArrToString( ( (Object[]) dtls[ 3 ] )[ 0 ] );
details.saveTime = ( ( (Number) dtls[ 5 ] ).longValue() - 116444736000000000L ) / 10000L;
details.saveTimeZone = ( (Number) dtls[ 6 ] ).longValue() / ( 10000f * 1000 * 60 * 60 );
final Object[] dependencies = (Object[]) dtls[ 10 ];
details.dependencies = new String[ dependencies.length ];
final byte[] ext = new byte[ 4];
for ( int i = 0; i < dependencies.length; i++ ) {
// Dependency: 4 byte extension ("s2ma"), 4 byte gw code ("\0\0EU") and the rest is the SHA-256 data.
final byte[] dep = (byte[]) dependencies[ i ];
System.arraycopy( dep, 0, ext, 0, 4 );
try {
details.dependencies[ i ] = GeneralUtils.convertToHexString( dep, 8, dep.length - 8 ) + "." + new String( ext, "UTF-8" );
} catch ( final UnsupportedEncodingException uee ) {
uee.printStackTrace(); // Never to happen
}
}
// Order is important because HotS also requires WoL!
expansionCycle:
for ( final ExpansionLevel expansion : EnumCache.EXPANSIONS ) {
for ( final String dependency : details.dependencies ) {
if ( expansion.dependency.equals( dependency ) ) {
details.expansion = expansion;
break expansionCycle;
}
}
}
}
| void function( final byte[] data ) { final Details details = replay.details = new Details(); setWrapper( data, ByteOrder.LITTLE_ENDIAN ); final Object[] dtls = (Object[]) readStructure(); final Object[] playersArr = (Object[]) dtls[ 0 ]; final int playersCount = playersArr.length; final List< Player > playerList = new ArrayList< Player >( playersCount ); for ( int i = 0; i < playersCount; i++ ) { final Player player = new Player(); final Object[] playerArr = (Object[]) playersArr[ i ]; String name = byteArrToString( playerArr[ 0 ] ); if ( name.length() == 0 ) { player.playerId.name = STR.STRUTF-8" ); } catch ( final UnsupportedEncodingException uee ) { uee.printStackTrace(); } } expansionCycle: for ( final ExpansionLevel expansion : EnumCache.EXPANSIONS ) { for ( final String dependency : details.dependencies ) { if ( expansion.dependency.equals( dependency ) ) { details.expansion = expansion; break expansionCycle; } } } } | /**
* Parses replay details from the given data.
* @param data data of the replay details
*/ | Parses replay details from the given data | parseDetails | {
"repo_name": "icza/sc2gears",
"path": "zprj-Sc2gears-parsing-engine/src/hu/belicza/andras/sc2gears/sc2replay/ReplayParser.java",
"license": "apache-2.0",
"size": 48410
} | [
"hu.belicza.andras.sc2gears.sc2replay.model.Details",
"hu.belicza.andras.sc2gearspluginapi.api.sc2replay.ReplayConsts",
"java.io.UnsupportedEncodingException",
"java.nio.ByteOrder",
"java.util.ArrayList",
"java.util.List"
] | import hu.belicza.andras.sc2gears.sc2replay.model.Details; import hu.belicza.andras.sc2gearspluginapi.api.sc2replay.ReplayConsts; import java.io.UnsupportedEncodingException; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.List; | import hu.belicza.andras.sc2gears.sc2replay.model.*; import hu.belicza.andras.sc2gearspluginapi.api.sc2replay.*; import java.io.*; import java.nio.*; import java.util.*; | [
"hu.belicza.andras",
"java.io",
"java.nio",
"java.util"
] | hu.belicza.andras; java.io; java.nio; java.util; | 1,618,853 |
public void modelSnapshots(String jobId, int from, int size, Consumer<QueryPage<ModelSnapshot>> handler,
Consumer<Exception> errorHandler) {
modelSnapshots(jobId, from, size, null, true, QueryBuilders.matchAllQuery(), handler, errorHandler);
} | void function(String jobId, int from, int size, Consumer<QueryPage<ModelSnapshot>> handler, Consumer<Exception> errorHandler) { modelSnapshots(jobId, from, size, null, true, QueryBuilders.matchAllQuery(), handler, errorHandler); } | /**
* Get model snapshots for the job ordered by descending timestamp (newest first).
*
* @param jobId the job id
* @param from number of snapshots to from
* @param size number of snapshots to retrieve
*/ | Get model snapshots for the job ordered by descending timestamp (newest first) | modelSnapshots | {
"repo_name": "coding0011/elasticsearch",
"path": "x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java",
"license": "apache-2.0",
"size": 86050
} | [
"java.util.function.Consumer",
"org.elasticsearch.index.query.QueryBuilders",
"org.elasticsearch.xpack.core.action.util.QueryPage",
"org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot"
] | import java.util.function.Consumer; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; | import java.util.function.*; import org.elasticsearch.index.query.*; import org.elasticsearch.xpack.core.action.util.*; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.*; | [
"java.util",
"org.elasticsearch.index",
"org.elasticsearch.xpack"
] | java.util; org.elasticsearch.index; org.elasticsearch.xpack; | 1,862,945 |
public static IllegalStateException newIllegalContextPoppingException(
RequestContext currentCtx, RequestContext contextInStorage) {
requireNonNull(currentCtx, "currentCtx");
requireNonNull(contextInStorage, "contextInStorage");
final IllegalStateException ex = new IllegalStateException(
"The currentCtx " + currentCtx + " is not the same as the context in the storage: " +
contextInStorage + ". This means the callback was called from " +
"unexpected thread or forgetting to close previous context.");
if (REPORTED_THREADS.add(Thread.currentThread())) {
logger.warn("An error occurred while popping a context", ex);
}
return ex;
} | static IllegalStateException function( RequestContext currentCtx, RequestContext contextInStorage) { requireNonNull(currentCtx, STR); requireNonNull(contextInStorage, STR); final IllegalStateException ex = new IllegalStateException( STR + currentCtx + STR + contextInStorage + STR + STR); if (REPORTED_THREADS.add(Thread.currentThread())) { logger.warn(STR, ex); } return ex; } | /**
* Returns an {@link IllegalStateException} which is raised when popping a context from
* the unexpected thread or forgetting to close the previous context.
*/ | Returns an <code>IllegalStateException</code> which is raised when popping a context from the unexpected thread or forgetting to close the previous context | newIllegalContextPoppingException | {
"repo_name": "anuraaga/armeria",
"path": "core/src/main/java/com/linecorp/armeria/internal/common/RequestContextUtil.java",
"license": "apache-2.0",
"size": 8620
} | [
"com.linecorp.armeria.common.RequestContext",
"java.util.Objects"
] | import com.linecorp.armeria.common.RequestContext; import java.util.Objects; | import com.linecorp.armeria.common.*; import java.util.*; | [
"com.linecorp.armeria",
"java.util"
] | com.linecorp.armeria; java.util; | 485,386 |
public int getSize(){
return this.size;
}
private XMLEncoder encode;
private int size; | int function(){ return this.size; } private XMLEncoder encode; private int size; | /**
* Get total number of object store in the file
* @retunr integer count of object written in the file
*/ | Get total number of object store in the file | getSize | {
"repo_name": "pramod-rana7/algorithm",
"path": "com/oops/rana/cache/cacheWrite.java",
"license": "apache-2.0",
"size": 1449
} | [
"java.beans.XMLEncoder"
] | import java.beans.XMLEncoder; | import java.beans.*; | [
"java.beans"
] | java.beans; | 1,509,850 |
@Override
protected String getOutputExtension()
{
return ConstantsCommon.STRING_DOT + ConstantsOutput.EXTENSION_HTML;
}
| String function() { return ConstantsCommon.STRING_DOT + ConstantsOutput.EXTENSION_HTML; } | /**
* We have to redefine the output extension because it is not a Angular2 file.
* It is a HTML file.
*/ | We have to redefine the output extension because it is not a Angular2 file. It is a HTML file | getOutputExtension | {
"repo_name": "BBVA-CIB/APIRestGenerator",
"path": "generator.client.angular2/src/main/java/com/bbva/kltt/apirest/generator/client/angular2/velocity/example/ExampleLauncherClientAngular2Generator.java",
"license": "apache-2.0",
"size": 5826
} | [
"com.bbva.kltt.apirest.core.util.ConstantsCommon",
"com.bbva.kltt.apirest.core.util.ConstantsOutput"
] | import com.bbva.kltt.apirest.core.util.ConstantsCommon; import com.bbva.kltt.apirest.core.util.ConstantsOutput; | import com.bbva.kltt.apirest.core.util.*; | [
"com.bbva.kltt"
] | com.bbva.kltt; | 275,979 |
protected void basicProcess(final DistributionManager dm, final boolean waitForGrantor) {
final boolean isDebugEnabled_DLS = logger.isTraceEnabled(LogMarker.DLS);
if (isDebugEnabled_DLS) {
logger.trace(LogMarker.DLS, "[basicProcess] {}", this);
}
final DLockQueryReplyMessage replyMsg = new DLockQueryReplyMessage();
replyMsg.setProcessorId(this.processorId);
replyMsg.setRecipient(getSender());
replyMsg.replyCode = DLockQueryReplyMessage.NOT_GRANTOR;
replyMsg.lesseeThread = null;
replyMsg.leaseId = DLockService.INVALID_LEASE_ID;
replyMsg.leaseExpireTime = 0;
try {
if (svc == null || svc.isDestroyed())
return;
if (waitForGrantor) {
try {
this.grantor = DLockGrantor.waitForGrantor(this.svc);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
this.grantor = null;
}
}
if (grantor == null || grantor.isDestroyed()) {
return;
}
if (lockBatch) {
throw new UnsupportedOperationException(
"DLockQueryProcessor does not support lock batches");
} else {
DLockGrantToken grantToken;
try {
grantToken = grantor.handleLockQuery(this);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
grantToken = null;
}
if (grantToken != null) {
synchronized (grantToken) {
if (!grantToken.isDestroyed()) {
replyMsg.lesseeThread = grantToken.getRemoteThread();
replyMsg.leaseId = grantToken.getLockId();
replyMsg.leaseExpireTime = grantToken.getLeaseExpireTime();
}
}
}
}
replyMsg.replyCode = DLockQueryReplyMessage.OK;
} catch (LockGrantorDestroyedException ignore) {
} catch (LockServiceDestroyedException ignore) {
} catch (RuntimeException e) {
replyMsg.setException(new ReplyException(e));
if (isDebugEnabled_DLS) {
logger.trace(LogMarker.DLS, "[basicProcess] caught RuntimeException", e);
}
} catch (VirtualMachineError err) {
SystemFailure.initiateFailure(err);
// If this ever returns, rethrow the error. We're poisoned
// now, so don't let this thread continue.
throw err;
} catch (Error e) {
// Whenever you catch Error or Throwable, you must also
// catch VirtualMachineError (see above). However, there is
// _still_ a possibility that you are dealing with a cascading
// error condition, so you also need to check to see if the JVM
// is still usable:
SystemFailure.checkFailure();
replyMsg.setException(new ReplyException(e));
if (isDebugEnabled_DLS) {
logger.trace(LogMarker.DLS, "[basicProcess] caught Error", e);
}
} finally {
if (dm.getId().equals(getSender())) {
replyMsg.setSender(getSender());
replyMsg.dmProcess(dm);
} else {
dm.putOutgoing(replyMsg);
}
}
} | void function(final DistributionManager dm, final boolean waitForGrantor) { final boolean isDebugEnabled_DLS = logger.isTraceEnabled(LogMarker.DLS); if (isDebugEnabled_DLS) { logger.trace(LogMarker.DLS, STR, this); } final DLockQueryReplyMessage replyMsg = new DLockQueryReplyMessage(); replyMsg.setProcessorId(this.processorId); replyMsg.setRecipient(getSender()); replyMsg.replyCode = DLockQueryReplyMessage.NOT_GRANTOR; replyMsg.lesseeThread = null; replyMsg.leaseId = DLockService.INVALID_LEASE_ID; replyMsg.leaseExpireTime = 0; try { if (svc == null svc.isDestroyed()) return; if (waitForGrantor) { try { this.grantor = DLockGrantor.waitForGrantor(this.svc); } catch (InterruptedException e) { Thread.currentThread().interrupt(); this.grantor = null; } } if (grantor == null grantor.isDestroyed()) { return; } if (lockBatch) { throw new UnsupportedOperationException( STR); } else { DLockGrantToken grantToken; try { grantToken = grantor.handleLockQuery(this); } catch (InterruptedException e) { Thread.currentThread().interrupt(); grantToken = null; } if (grantToken != null) { synchronized (grantToken) { if (!grantToken.isDestroyed()) { replyMsg.lesseeThread = grantToken.getRemoteThread(); replyMsg.leaseId = grantToken.getLockId(); replyMsg.leaseExpireTime = grantToken.getLeaseExpireTime(); } } } } replyMsg.replyCode = DLockQueryReplyMessage.OK; } catch (LockGrantorDestroyedException ignore) { } catch (LockServiceDestroyedException ignore) { } catch (RuntimeException e) { replyMsg.setException(new ReplyException(e)); if (isDebugEnabled_DLS) { logger.trace(LogMarker.DLS, STR, e); } } catch (VirtualMachineError err) { SystemFailure.initiateFailure(err); throw err; } catch (Error e) { SystemFailure.checkFailure(); replyMsg.setException(new ReplyException(e)); if (isDebugEnabled_DLS) { logger.trace(LogMarker.DLS, STR, e); } } finally { if (dm.getId().equals(getSender())) { replyMsg.setSender(getSender()); replyMsg.dmProcess(dm); } else { dm.putOutgoing(replyMsg); } } } | /**
* Perform basic processing of this message.
* <p>
* this.svc and this.grantor must be set before calling this method.
*/ | Perform basic processing of this message. this.svc and this.grantor must be set before calling this method | basicProcess | {
"repo_name": "smanvi-pivotal/geode",
"path": "geode-core/src/main/java/org/apache/geode/distributed/internal/locks/DLockQueryProcessor.java",
"license": "apache-2.0",
"size": 16670
} | [
"org.apache.geode.SystemFailure",
"org.apache.geode.distributed.LockServiceDestroyedException",
"org.apache.geode.distributed.internal.DistributionManager",
"org.apache.geode.distributed.internal.ReplyException",
"org.apache.geode.distributed.internal.locks.DLockGrantor",
"org.apache.geode.internal.logging.log4j.LogMarker"
] | import org.apache.geode.SystemFailure; import org.apache.geode.distributed.LockServiceDestroyedException; import org.apache.geode.distributed.internal.DistributionManager; import org.apache.geode.distributed.internal.ReplyException; import org.apache.geode.distributed.internal.locks.DLockGrantor; import org.apache.geode.internal.logging.log4j.LogMarker; | import org.apache.geode.*; import org.apache.geode.distributed.*; import org.apache.geode.distributed.internal.*; import org.apache.geode.distributed.internal.locks.*; import org.apache.geode.internal.logging.log4j.*; | [
"org.apache.geode"
] | org.apache.geode; | 755,959 |
public static Field<Object> gCubeDecompress(Object __1) {
GCubeDecompress f = new GCubeDecompress();
f.set__1(__1);
return f.asField();
} | static Field<Object> function(Object __1) { GCubeDecompress f = new GCubeDecompress(); f.set__1(__1); return f.asField(); } | /**
* Get <code>public.g_cube_decompress</code> as a field.
*/ | Get <code>public.g_cube_decompress</code> as a field | gCubeDecompress | {
"repo_name": "Remper/sociallink",
"path": "alignments/src/main/java/eu/fbk/fm/alignments/index/db/Routines.java",
"license": "apache-2.0",
"size": 37686
} | [
"eu.fbk.fm.alignments.index.db.routines.GCubeDecompress",
"org.jooq.Field"
] | import eu.fbk.fm.alignments.index.db.routines.GCubeDecompress; import org.jooq.Field; | import eu.fbk.fm.alignments.index.db.routines.*; import org.jooq.*; | [
"eu.fbk.fm",
"org.jooq"
] | eu.fbk.fm; org.jooq; | 2,219,555 |
public void infoSeeLine(Line line, double distance, double direction, double distChange, double dirChange,
double bodyFacingDirection, double headFacingDirection); | void function(Line line, double distance, double direction, double distChange, double dirChange, double bodyFacingDirection, double headFacingDirection); | /**
* The controller is informed that one of the pitch lines are in sight.
*
* @param line a {@link com.github.robocup_atan.atan.model.enums.Line} object.
* @param distance The distance to the line.
* @param direction The direction of the line.
* @param distChange a double.
* @param dirChange a double.
* @param bodyFacingDirection a double.
* @param headFacingDirection a double.
*/ | The controller is informed that one of the pitch lines are in sight | infoSeeLine | {
"repo_name": "robocup-atan/atan",
"path": "src/main/java/com/github/robocup_atan/atan/model/ControllerPlayer.java",
"license": "mit",
"size": 19961
} | [
"com.github.robocup_atan.atan.model.enums.Line"
] | import com.github.robocup_atan.atan.model.enums.Line; | import com.github.robocup_atan.atan.model.enums.*; | [
"com.github.robocup_atan"
] | com.github.robocup_atan; | 1,110,322 |
public Transaction get(String transactionId)throws Exception
{
String urlString = url+"/"+transactionId;
String response = ZohoHTTPClient.get(urlString, getQueryMap());
Transaction transaction = bankTransactionParser.getTransaction(response);
return transaction;
}
| Transaction function(String transactionId)throws Exception { String urlString = url+"/"+transactionId; String response = ZohoHTTPClient.get(urlString, getQueryMap()); Transaction transaction = bankTransactionParser.getTransaction(response); return transaction; } | /**
* Fetch the details of a transaction by specifying the transaction_id.
* Pass the transactionId to get the details of a transaction.
* It returns the Transaction object.
* @param transactionId ID of the transaction.
* @return Returns the Transaction object.
*/ | Fetch the details of a transaction by specifying the transaction_id. Pass the transactionId to get the details of a transaction. It returns the Transaction object | get | {
"repo_name": "zoho/books-java-wrappers",
"path": "source/com/zoho/books/api/BankTransactionsApi.java",
"license": "mit",
"size": 18879
} | [
"com.zoho.books.model.Transaction",
"com.zoho.books.util.ZohoHTTPClient"
] | import com.zoho.books.model.Transaction; import com.zoho.books.util.ZohoHTTPClient; | import com.zoho.books.model.*; import com.zoho.books.util.*; | [
"com.zoho.books"
] | com.zoho.books; | 2,493,574 |
@Test()
public void testExplodeDNValidMultipleComponentWithMultivaluedNoExclude()
throws Exception
{
String[] s =
LDAPDN.explodeDN("givenName=Test+sn=User , dc=example, dc=com", false);
assertNotNull(s);
assertEquals(s.length, 3);
assertEquals(s[0], "givenName=Test+sn=User");
assertEquals(s[1], "dc=example");
assertEquals(s[2], "dc=com");
} | @Test() void function() throws Exception { String[] s = LDAPDN.explodeDN(STR, false); assertNotNull(s); assertEquals(s.length, 3); assertEquals(s[0], STR); assertEquals(s[1], STR); assertEquals(s[2], STR); } | /**
* Tests the {@code explodeDN} method for a valid DN with multiple components
* including a multivalued RDN and not excluding types.
*
* @throws Exception If an unexpected problem occurs.
*/ | Tests the explodeDN method for a valid DN with multiple components including a multivalued RDN and not excluding types | testExplodeDNValidMultipleComponentWithMultivaluedNoExclude | {
"repo_name": "UnboundID/ldapsdk",
"path": "tests/unit/src/com/unboundid/ldap/sdk/migrate/ldapjdk/LDAPDNTestCase.java",
"license": "gpl-2.0",
"size": 9791
} | [
"org.testng.annotations.Test"
] | import org.testng.annotations.Test; | import org.testng.annotations.*; | [
"org.testng.annotations"
] | org.testng.annotations; | 206,309 |
public double stringwidth(String string, @NotNull PDFFont font, double size)
{
if (font == null)
return 0;
return size * font.stringWidth(string) / 1000.0;
} | double function(String string, @NotNull PDFFont font, double size) { if (font == null) return 0; return size * font.stringWidth(string) / 1000.0; } | /**
* Returns the length of a string for a font.
*/ | Returns the length of a string for a font | stringwidth | {
"repo_name": "dlitz/resin",
"path": "modules/quercus/src/com/caucho/quercus/lib/pdf/PDF.java",
"license": "gpl-2.0",
"size": 21718
} | [
"com.caucho.quercus.annotation.NotNull"
] | import com.caucho.quercus.annotation.NotNull; | import com.caucho.quercus.annotation.*; | [
"com.caucho.quercus"
] | com.caucho.quercus; | 2,627,141 |
public boolean matchesAllIssues(Collection<Issue> issues, List<String> messages) {
Collection<Issue> issueCopy = new LinkedList<>(issues);
Collection<IssueMatcher> matcherCopy = new LinkedList<>(issueMatchers);
performMatching(issueCopy, matcherCopy, messages);
if (inverted) {
if (issueCopy.isEmpty()) {
explainIssues(issues, messages, inverted);
return false;
}
} else {
if (!issueCopy.isEmpty()) {
explainIssues(issueCopy, messages, inverted);
return false;
}
}
return true;
} | boolean function(Collection<Issue> issues, List<String> messages) { Collection<Issue> issueCopy = new LinkedList<>(issues); Collection<IssueMatcher> matcherCopy = new LinkedList<>(issueMatchers); performMatching(issueCopy, matcherCopy, messages); if (inverted) { if (issueCopy.isEmpty()) { explainIssues(issues, messages, inverted); return false; } } else { if (!issueCopy.isEmpty()) { explainIssues(issueCopy, messages, inverted); return false; } } return true; } | /**
* Matches the expectations in the added issues matchers against the given issues.
*
* @param issues
* the issues to match the expectations against
* @param messages
* if this parameter is not <code>null</code>, this method will add an explanatory message for each
* mismatch
* @return <code>true</code> if and only if every issue in the given collection was matched by an expectation
*/ | Matches the expectations in the added issues matchers against the given issues | matchesAllIssues | {
"repo_name": "lbeurerkellner/n4js",
"path": "testhelpers/org.eclipse.n4js.tests.helper/src/org/eclipse/n4js/tests/issues/IssueExpectations.java",
"license": "epl-1.0",
"size": 6420
} | [
"java.util.Collection",
"java.util.LinkedList",
"java.util.List",
"org.eclipse.xtext.validation.Issue"
] | import java.util.Collection; import java.util.LinkedList; import java.util.List; import org.eclipse.xtext.validation.Issue; | import java.util.*; import org.eclipse.xtext.validation.*; | [
"java.util",
"org.eclipse.xtext"
] | java.util; org.eclipse.xtext; | 1,809,987 |
private boolean isNullAddress(InetAddress addr) {
final byte a[] = addr.getAddress();
for (int i = 0; i < a.length; i++)
if (a[i] != 0)
return false;
return true;
} | boolean function(InetAddress addr) { final byte a[] = addr.getAddress(); for (int i = 0; i < a.length; i++) if (a[i] != 0) return false; return true; } | /**
* Determine, whether the given address ist actually the null address
* "0.0.0.0".
*
* @param relayAgentAddress
* @return
*/ | Determine, whether the given address ist actually the null address "0.0.0.0" | isNullAddress | {
"repo_name": "openthinclient/openthinclient-manager",
"path": "thirdparty/apacheds-protocol-dhcp/src/main/java/org/apache/directory/server/dhcp/protocol/DhcpProtocolHandler.java",
"license": "gpl-2.0",
"size": 5663
} | [
"java.net.InetAddress"
] | import java.net.InetAddress; | import java.net.*; | [
"java.net"
] | java.net; | 1,871,169 |
T visitCoverageExpressionOverlayLabel(@NotNull wcpsParser.CoverageExpressionOverlayLabelContext ctx); | T visitCoverageExpressionOverlayLabel(@NotNull wcpsParser.CoverageExpressionOverlayLabelContext ctx); | /**
* Visit a parse tree produced by {@link wcpsParser#CoverageExpressionOverlayLabel}.
*
* @param ctx the parse tree
* @return the visitor result
*/ | Visit a parse tree produced by <code>wcpsParser#CoverageExpressionOverlayLabel</code> | visitCoverageExpressionOverlayLabel | {
"repo_name": "diogo-andrade/DataHubSystem",
"path": "petascope/src/main/java/petascope/wcps2/parser/wcpsVisitor.java",
"license": "agpl-3.0",
"size": 28497
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 2,329,860 |
public MockForm getForm() {
return form;
} | MockForm function() { return form; } | /**
* Returns the form associated with this YaFormEditor.
*
* @return a MockForm
*/ | Returns the form associated with this YaFormEditor | getForm | {
"repo_name": "E-Hon/appinventor-sources",
"path": "appinventor/appengine/src/com/google/appinventor/client/editor/youngandroid/YaFormEditor.java",
"license": "apache-2.0",
"size": 28241
} | [
"com.google.appinventor.client.editor.simple.components.MockForm"
] | import com.google.appinventor.client.editor.simple.components.MockForm; | import com.google.appinventor.client.editor.simple.components.*; | [
"com.google.appinventor"
] | com.google.appinventor; | 196,570 |
@Test public void originServerSends407() throws Exception {
server.enqueue(new MockResponse().setResponseCode(407));
HttpURLConnection conn = factory.open(server.url("/").url());
try {
conn.getResponseCode();
fail();
} catch (IOException ignored) {
}
} | @Test void function() throws Exception { server.enqueue(new MockResponse().setResponseCode(407)); HttpURLConnection conn = factory.open(server.url("/").url()); try { conn.getResponseCode(); fail(); } catch (IOException ignored) { } } | /**
* Response code 407 should only come from proxy servers. Android's client throws if it is sent by
* an origin server.
*/ | Response code 407 should only come from proxy servers. Android's client throws if it is sent by an origin server | originServerSends407 | {
"repo_name": "Synix/okhttp",
"path": "okhttp-urlconnection/src/test/java/okhttp3/OkUrlFactoryTest.java",
"license": "apache-2.0",
"size": 8545
} | [
"java.io.IOException",
"java.net.HttpURLConnection",
"org.junit.Assert",
"org.junit.Test"
] | import java.io.IOException; import java.net.HttpURLConnection; import org.junit.Assert; import org.junit.Test; | import java.io.*; import java.net.*; import org.junit.*; | [
"java.io",
"java.net",
"org.junit"
] | java.io; java.net; org.junit; | 860,566 |
@Generated
@Selector("opaque")
public native boolean opaque(); | @Selector(STR) native boolean function(); | /**
* indicates the bitmap context will draw fully opaque. The preferredFormat sets this to NO.
*/ | indicates the bitmap context will draw fully opaque. The preferredFormat sets this to NO | opaque | {
"repo_name": "multi-os-engine/moe-core",
"path": "moe.apple/moe.platform.ios/src/main/java/apple/uikit/UIGraphicsImageRendererFormat.java",
"license": "apache-2.0",
"size": 7352
} | [
"org.moe.natj.objc.ann.Selector"
] | import org.moe.natj.objc.ann.Selector; | import org.moe.natj.objc.ann.*; | [
"org.moe.natj"
] | org.moe.natj; | 271,071 |
Map<String, String> get(); | Map<String, String> get(); | /**
* Retrieves all cookies
*
* @see #get(String)
*/ | Retrieves all cookies | get | {
"repo_name": "sumit784/java-cookie",
"path": "src/main/java/com/github/jscookie/javacookie/CookiesDefinition.java",
"license": "mit",
"size": 5053
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 515,024 |
private long obtainHeuristicValue(ArrayList<DiscreteVariable> linkedVars,
Type orderingType)
{
ProbabilityVariable probVar;
long value = 0;
if (orderingType == Type.MINIMUM_WEIGHT)
{
long weight = 1;
for (DiscreteVariable discrVar : linkedVars)
{
probVar = (ProbabilityVariable) discrVar;
weight *= probVar.numberValues();
}
value = weight;
}
return value;
}
| long function(ArrayList<DiscreteVariable> linkedVars, Type orderingType) { ProbabilityVariable probVar; long value = 0; if (orderingType == Type.MINIMUM_WEIGHT) { long weight = 1; for (DiscreteVariable discrVar : linkedVars) { probVar = (ProbabilityVariable) discrVar; weight *= probVar.numberValues(); } value = weight; } return value; } | /**
* Obtain the heuristic value of eliminating a variable, represented by the
* list of variables linked to it.
*
* @param linkedVars list of variables that are linked to the variable in
* question
* @param orderingType indicates which heuristic to use in the elimination
* @return the heuristic value
*/ | Obtain the heuristic value of eliminating a variable, represented by the list of variables linked to it | obtainHeuristicValue | {
"repo_name": "kingkybel/JavaBayes",
"path": "src/BayesianInferences/Ordering.java",
"license": "gpl-3.0",
"size": 21904
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 2,878,828 |
EObject getExtendedCapabilities(); | EObject getExtendedCapabilities(); | /**
* Returns the value of the '<em><b>Extended Capabilities</b></em>' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* <!-- begin-model-doc -->
* Individual software vendors and servers can use this element to provide metadata about any additional server abilities.
* <!-- end-model-doc -->
* @return the value of the '<em>Extended Capabilities</em>' containment reference.
* @see #setExtendedCapabilities(EObject)
* @see net.opengis.ows11.Ows11Package#getDocumentRoot_ExtendedCapabilities()
* @model containment="true" upper="-2" transient="true" volatile="true" derived="true"
* extendedMetaData="kind='element' name='ExtendedCapabilities' namespace='##targetNamespace'"
* @generated
*/ | Returns the value of the 'Extended Capabilities' containment reference. Individual software vendors and servers can use this element to provide metadata about any additional server abilities. | getExtendedCapabilities | {
"repo_name": "geotools/geotools",
"path": "modules/ogc/net.opengis.ows/src/net/opengis/ows11/DocumentRoot.java",
"license": "lgpl-2.1",
"size": 76894
} | [
"org.eclipse.emf.ecore.EObject"
] | import org.eclipse.emf.ecore.EObject; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 1,004,812 |
public Collection<AbstractInputSource> getRegisteredInputSources(){
return this.registeredInputSources;
}
| Collection<AbstractInputSource> function(){ return this.registeredInputSources; } | /**
* Gets the registered input sources.
*
* @return the registered input sources
* @deprecated use getInputSources() instead
*/ | Gets the registered input sources | getRegisteredInputSources | {
"repo_name": "rogiermars/mt4j-core",
"path": "src/org/mt4j/input/InputManager.java",
"license": "gpl-2.0",
"size": 9221
} | [
"java.util.Collection",
"org.mt4j.input.inputSources.AbstractInputSource"
] | import java.util.Collection; import org.mt4j.input.inputSources.AbstractInputSource; | import java.util.*; import org.mt4j.input.*; | [
"java.util",
"org.mt4j.input"
] | java.util; org.mt4j.input; | 2,776,006 |
protected static UpdateOperation getUpdateOperation(Collection<URI> repositories,
IProgressMonitor progressMonitor) throws InstallerException
{
final ProvisioningUI provisioningUI = ProvisioningUI.getDefaultUI();
ProvisioningSession session = provisioningUI.getSession();
final UpdateOperation op = new UpdateOperation(session);
final boolean[] canContinue = new boolean[]
{
false
};
ArrayList<IRepositoryReference> references = new ArrayList<IRepositoryReference>();
ArrayList<URI> metadataRepositories = new ArrayList<URI>();
ArrayList<URI> artifactRepositories = new ArrayList<URI>();
IMetadataRepository repository = null;
for (Iterator<URI> iterator = repositories.iterator(); iterator.hasNext();)
{
URI uri = iterator.next();
try
{
repository = getMetadataRepository(uri, progressMonitor);
metadataRepositories.add(uri);
}
catch (OperationCanceledException e)
{
progressMonitor.setCanceled(true);
}
}
if (repository != null)
{
references.addAll(repository.getReferences());
}
for (IRepositoryReference reference : references)
{
if (reference.getOptions() == IRepository.ENABLED)
{
if (reference.getType() == IRepository.TYPE_METADATA)
{
metadataRepositories.add(reference.getLocation());
}
else
{
artifactRepositories.add(reference.getLocation());
}
}
}
if (!progressMonitor.isCanceled())
{
SubMonitor subMonitor = SubMonitor.convert(progressMonitor);
subMonitor.beginTask(InstallerNLS.P2Utilities_PreparingEnvironment, 200);
op.getProvisioningContext().setMetadataRepositories(
metadataRepositories.toArray(new URI[0]));
op.getProvisioningContext().setArtifactRepositories(
artifactRepositories.toArray(new URI[0]));
| static UpdateOperation function(Collection<URI> repositories, IProgressMonitor progressMonitor) throws InstallerException { final ProvisioningUI provisioningUI = ProvisioningUI.getDefaultUI(); ProvisioningSession session = provisioningUI.getSession(); final UpdateOperation op = new UpdateOperation(session); final boolean[] canContinue = new boolean[] { false }; ArrayList<IRepositoryReference> references = new ArrayList<IRepositoryReference>(); ArrayList<URI> metadataRepositories = new ArrayList<URI>(); ArrayList<URI> artifactRepositories = new ArrayList<URI>(); IMetadataRepository repository = null; for (Iterator<URI> iterator = repositories.iterator(); iterator.hasNext();) { URI uri = iterator.next(); try { repository = getMetadataRepository(uri, progressMonitor); metadataRepositories.add(uri); } catch (OperationCanceledException e) { progressMonitor.setCanceled(true); } } if (repository != null) { references.addAll(repository.getReferences()); } for (IRepositoryReference reference : references) { if (reference.getOptions() == IRepository.ENABLED) { if (reference.getType() == IRepository.TYPE_METADATA) { metadataRepositories.add(reference.getLocation()); } else { artifactRepositories.add(reference.getLocation()); } } } if (!progressMonitor.isCanceled()) { SubMonitor subMonitor = SubMonitor.convert(progressMonitor); subMonitor.beginTask(InstallerNLS.P2Utilities_PreparingEnvironment, 200); op.getProvisioningContext().setMetadataRepositories( metadataRepositories.toArray(new URI[0])); op.getProvisioningContext().setArtifactRepositories( artifactRepositories.toArray(new URI[0])); | /**
* Retrieves the UpdateOperation that is necessary to update available IUS on the current Profile.
*
* @param repositories where the update should be searched
* @param progressMonitor
* @return
* @throws InstallerException
*/ | Retrieves the UpdateOperation that is necessary to update available IUS on the current Profile | getUpdateOperation | {
"repo_name": "rex-xxx/mt6572_x201",
"path": "tools/motodev/src/plugins/installer/src/com/motorola/studio/android/installer/utilities/P2Utilities.java",
"license": "gpl-2.0",
"size": 30284
} | [
"com.motorola.studio.android.installer.InstallerException",
"com.motorola.studio.android.installer.i18n.InstallerNLS",
"java.util.ArrayList",
"java.util.Collection",
"java.util.Iterator",
"org.eclipse.core.runtime.IProgressMonitor",
"org.eclipse.core.runtime.OperationCanceledException",
"org.eclipse.core.runtime.SubMonitor",
"org.eclipse.equinox.p2.operations.ProvisioningSession",
"org.eclipse.equinox.p2.operations.UpdateOperation",
"org.eclipse.equinox.p2.repository.IRepository",
"org.eclipse.equinox.p2.repository.IRepositoryReference",
"org.eclipse.equinox.p2.repository.metadata.IMetadataRepository",
"org.eclipse.equinox.p2.ui.ProvisioningUI"
] | import com.motorola.studio.android.installer.InstallerException; import com.motorola.studio.android.installer.i18n.InstallerNLS; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.OperationCanceledException; import org.eclipse.core.runtime.SubMonitor; import org.eclipse.equinox.p2.operations.ProvisioningSession; import org.eclipse.equinox.p2.operations.UpdateOperation; import org.eclipse.equinox.p2.repository.IRepository; import org.eclipse.equinox.p2.repository.IRepositoryReference; import org.eclipse.equinox.p2.repository.metadata.IMetadataRepository; import org.eclipse.equinox.p2.ui.ProvisioningUI; | import com.motorola.studio.android.installer.*; import com.motorola.studio.android.installer.i18n.*; import java.util.*; import org.eclipse.core.runtime.*; import org.eclipse.equinox.p2.operations.*; import org.eclipse.equinox.p2.repository.*; import org.eclipse.equinox.p2.repository.metadata.*; import org.eclipse.equinox.p2.ui.*; | [
"com.motorola.studio",
"java.util",
"org.eclipse.core",
"org.eclipse.equinox"
] | com.motorola.studio; java.util; org.eclipse.core; org.eclipse.equinox; | 1,510,406 |
InvalidParameterSpecException tE = new InvalidParameterSpecException();
assertNull("getMessage() must return null.", tE.getMessage());
assertNull("getCause() must return null", tE.getCause());
} | InvalidParameterSpecException tE = new InvalidParameterSpecException(); assertNull(STR, tE.getMessage()); assertNull(STR, tE.getCause()); } | /**
* Test for <code>InvalidParameterSpecException()</code> constructor
* Assertion: constructs InvalidParameterSpecException with no detail
* message
*/ | Test for <code>InvalidParameterSpecException()</code> constructor Assertion: constructs InvalidParameterSpecException with no detail message | testInvalidParameterSpecException01 | {
"repo_name": "AdmireTheDistance/android_libcore",
"path": "luni/src/test/java/tests/security/spec/InvalidParameterSpecExceptionTest.java",
"license": "gpl-2.0",
"size": 3040
} | [
"java.security.spec.InvalidParameterSpecException"
] | import java.security.spec.InvalidParameterSpecException; | import java.security.spec.*; | [
"java.security"
] | java.security; | 2,574,476 |
@Override
public void generateFinally(JavaWriter out)
throws IOException
{
_next.generateFinally(out);
} | void function(JavaWriter out) throws IOException { _next.generateFinally(out); } | /**
* Generates finally code for the method
*/ | Generates finally code for the method | generateFinally | {
"repo_name": "christianchristensen/resin",
"path": "modules/kernel/src/com/caucho/config/gen/AbstractCallChain.java",
"license": "gpl-2.0",
"size": 7992
} | [
"com.caucho.java.JavaWriter",
"java.io.IOException"
] | import com.caucho.java.JavaWriter; import java.io.IOException; | import com.caucho.java.*; import java.io.*; | [
"com.caucho.java",
"java.io"
] | com.caucho.java; java.io; | 663,425 |
@ApiModelProperty(example = "null", required = true, value = "ID of the squad the member is in. If not applicable, will be set to -1")
public Long getSquadId() {
return squadId;
} | @ApiModelProperty(example = "null", required = true, value = STR) Long function() { return squadId; } | /**
* ID of the squad the member is in. If not applicable, will be set to -1
*
* @return squadId
**/ | ID of the squad the member is in. If not applicable, will be set to -1 | getSquadId | {
"repo_name": "GoldenGnu/eve-esi",
"path": "src/main/java/net/troja/eve/esi/model/FleetMembersResponse.java",
"license": "apache-2.0",
"size": 9772
} | [
"io.swagger.annotations.ApiModelProperty"
] | import io.swagger.annotations.ApiModelProperty; | import io.swagger.annotations.*; | [
"io.swagger.annotations"
] | io.swagger.annotations; | 611,082 |
public void doJoin(RunData data)
{
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
// read the group ids to join
String id = data.getParameters().getString("itemReference");
if (id != null)
{
try
{
// bjones86 - SAK-24423 - joinable site settings - join the site
if( JoinableSiteSettings.doJoinForMembership( id ) )
{
addAlert( state, rb.getString( "mb.youhave2" ) + " " + SiteService.getSite( id ).getTitle() );
}
else
{
addAlert( state, rb.getString( "mb.join.notAllowed" ) );
}
// add to user auditing
List<String[]> userAuditList = new ArrayList<String[]>();
String currentUserEid = userDirectoryService.getCurrentUser().getEid();
String roleId = SiteService.getSite(id).getJoinerRole();
String[] userAuditString = {id,currentUserEid,roleId,userAuditService.USER_AUDIT_ACTION_ADD,userAuditRegistration.getDatabaseSourceKey(),currentUserEid};
userAuditList.add(userAuditString);
if (!userAuditList.isEmpty())
{
userAuditRegistration.addToUserAuditing(userAuditList);
}
}
catch (IdUnusedException e)
{
Log.warn("chef", this + ".doJoin(): " + e);
}
catch (PermissionException e)
{
Log.warn("chef", this + ".doJoin(): " + e);
}
catch (InUseException e)
{
addAlert(state, rb.getString("mb.sitebeing"));
}
}
// TODO: hard coding this frame id is fragile, portal dependent, and needs to be fixed -ggolden
schedulePeerFrameRefresh("sitenav");
//scheduleTopRefresh();
} // doJoin | void function(RunData data) { SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid()); String id = data.getParameters().getString(STR); if (id != null) { try { if( JoinableSiteSettings.doJoinForMembership( id ) ) { addAlert( state, rb.getString( STR ) + " " + SiteService.getSite( id ).getTitle() ); } else { addAlert( state, rb.getString( STR ) ); } List<String[]> userAuditList = new ArrayList<String[]>(); String currentUserEid = userDirectoryService.getCurrentUser().getEid(); String roleId = SiteService.getSite(id).getJoinerRole(); String[] userAuditString = {id,currentUserEid,roleId,userAuditService.USER_AUDIT_ACTION_ADD,userAuditRegistration.getDatabaseSourceKey(),currentUserEid}; userAuditList.add(userAuditString); if (!userAuditList.isEmpty()) { userAuditRegistration.addToUserAuditing(userAuditList); } } catch (IdUnusedException e) { Log.warn("chef", this + STR + e); } catch (PermissionException e) { Log.warn("chef", this + STR + e); } catch (InUseException e) { addAlert(state, rb.getString(STR)); } } schedulePeerFrameRefresh(STR); } | /**
* Handle the eventSubmit_doJoin command to have the user join one or more sites.
*/ | Handle the eventSubmit_doJoin command to have the user join one or more sites | doJoin | {
"repo_name": "whumph/sakai",
"path": "site-manage/site-manage-tool/tool/src/java/org/sakaiproject/site/tool/MembershipAction.java",
"license": "apache-2.0",
"size": 19183
} | [
"java.util.ArrayList",
"java.util.List",
"org.sakaiproject.cheftool.JetspeedRunData",
"org.sakaiproject.cheftool.RunData",
"org.sakaiproject.event.api.SessionState",
"org.sakaiproject.exception.IdUnusedException",
"org.sakaiproject.exception.InUseException",
"org.sakaiproject.exception.PermissionException",
"org.sakaiproject.site.cover.SiteService"
] | import java.util.ArrayList; import java.util.List; import org.sakaiproject.cheftool.JetspeedRunData; import org.sakaiproject.cheftool.RunData; import org.sakaiproject.event.api.SessionState; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.exception.InUseException; import org.sakaiproject.exception.PermissionException; import org.sakaiproject.site.cover.SiteService; | import java.util.*; import org.sakaiproject.cheftool.*; import org.sakaiproject.event.api.*; import org.sakaiproject.exception.*; import org.sakaiproject.site.cover.*; | [
"java.util",
"org.sakaiproject.cheftool",
"org.sakaiproject.event",
"org.sakaiproject.exception",
"org.sakaiproject.site"
] | java.util; org.sakaiproject.cheftool; org.sakaiproject.event; org.sakaiproject.exception; org.sakaiproject.site; | 2,777,544 |
public void setUser(Person user) {
this.user = user;
} | void function(Person user) { this.user = user; } | /**
* Sets the user attribute.
*
* @param user The user to set.
*/ | Sets the user attribute | setUser | {
"repo_name": "bhutchinson/kfs",
"path": "kfs-ar/src/main/java/org/kuali/kfs/module/ar/businessobject/CollectionEvent.java",
"license": "agpl-3.0",
"size": 8911
} | [
"org.kuali.rice.kim.api.identity.Person"
] | import org.kuali.rice.kim.api.identity.Person; | import org.kuali.rice.kim.api.identity.*; | [
"org.kuali.rice"
] | org.kuali.rice; | 1,312,722 |
public void removeNamespaceBinding(String prefix) throws XBRLException; | void function(String prefix) throws XBRLException; | /**
* This should not need to be called but is available if the number of mappings
* in the namepace mapping table grows innefficiently large.
* @param prefix the prefix to remove the namespace binding for
* @throws XBRLException
*/ | This should not need to be called but is available if the number of mappings in the namepace mapping table grows innefficiently large | removeNamespaceBinding | {
"repo_name": "martinggww/Programming",
"path": "XBRL/xbrl-api/module-api/src/main/java/org/xbrlapi/data/Store.java",
"license": "gpl-2.0",
"size": 59248
} | [
"org.xbrlapi.utilities.XBRLException"
] | import org.xbrlapi.utilities.XBRLException; | import org.xbrlapi.utilities.*; | [
"org.xbrlapi.utilities"
] | org.xbrlapi.utilities; | 1,919,420 |
EAttribute getRuleWithPattern_IsAbstract(); | EAttribute getRuleWithPattern_IsAbstract(); | /**
* Returns the meta object for the attribute '{@link anatlyzer.atlext.ATL.RuleWithPattern#isIsAbstract <em>Is Abstract</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Is Abstract</em>'.
* @see anatlyzer.atlext.ATL.RuleWithPattern#isIsAbstract()
* @see #getRuleWithPattern()
* @generated
*/ | Returns the meta object for the attribute '<code>anatlyzer.atlext.ATL.RuleWithPattern#isIsAbstract Is Abstract</code>'. | getRuleWithPattern_IsAbstract | {
"repo_name": "jesusc/anatlyzer",
"path": "plugins/anatlyzer.atl.typing/src-gen/anatlyzer/atlext/ATL/ATLPackage.java",
"license": "epl-1.0",
"size": 222096
} | [
"org.eclipse.emf.ecore.EAttribute"
] | import org.eclipse.emf.ecore.EAttribute; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 570,042 |
public Observable<ServiceResponse<List<WorkspaceInner>>> listByResourceGroupWithServiceResponseAsync(String resourceGroupName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
} | Observable<ServiceResponse<List<WorkspaceInner>>> function(String resourceGroupName) { if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException(STR); } | /**
* Gets workspaces in a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @return the observable to the List<WorkspaceInner> object
*/ | Gets workspaces in a resource group | listByResourceGroupWithServiceResponseAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/loganalytics/mgmt-v2020_08_01/src/main/java/com/microsoft/azure/management/loganalytics/v2020_08_01/implementation/WorkspacesInner.java",
"license": "mit",
"size": 57374
} | [
"com.microsoft.rest.ServiceResponse",
"java.util.List"
] | import com.microsoft.rest.ServiceResponse; import java.util.List; | import com.microsoft.rest.*; import java.util.*; | [
"com.microsoft.rest",
"java.util"
] | com.microsoft.rest; java.util; | 2,103,188 |
return new TypeSafeMatcher<Table>() { | return new TypeSafeMatcher<Table>() { | /**
* Returns whether the {@link Table table} has a certain number of
* {@link TableRow rows}.
*
* @param count the expected number of Rows
* @return whether the table has the expected number of rows
* @since 0.9.7
*/ | Returns whether the <code>Table table</code> has a certain number of <code>TableRow rows</code> | hasNumberOfRows | {
"repo_name": "testIT-WebTester/webtester-core",
"path": "webtester-support-hamcrest/src/main/java/info/novatec/testit/webtester/support/hamcrest/TableMatcher.java",
"license": "apache-2.0",
"size": 1450
} | [
"info.novatec.testit.webtester.pageobjects.Table",
"org.hamcrest.TypeSafeMatcher"
] | import info.novatec.testit.webtester.pageobjects.Table; import org.hamcrest.TypeSafeMatcher; | import info.novatec.testit.webtester.pageobjects.*; import org.hamcrest.*; | [
"info.novatec.testit",
"org.hamcrest"
] | info.novatec.testit; org.hamcrest; | 1,867,664 |
Map<String, ?> getProperties(T service); | Map<String, ?> getProperties(T service); | /**
* Retrieve the metadata associated to a registered service.
*
* @param service the service for which to retrieve metadata
* @return the metadata associated with the service
*/ | Retrieve the metadata associated to a registered service | getProperties | {
"repo_name": "apache/servicemix4-nmr",
"path": "nmr/api/src/main/java/org/apache/servicemix/nmr/api/service/ServiceRegistry.java",
"license": "apache-2.0",
"size": 1966
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 1,530,746 |
@Override
public UrlMatch match(final String url) {
final Matcher matcher = compiledUrl.matcher(url);
if (matcher.matches()) {
return new UrlMatch(extractParameters(matcher));
}
return null;
} | UrlMatch function(final String url) { final Matcher matcher = compiledUrl.matcher(url); if (matcher.matches()) { return new UrlMatch(extractParameters(matcher)); } return null; } | /**
* Test the given URL against the underlying pattern to determine if it
* matches, returning the results in a UrlMatch instance. If the URL
* matches, parse any applicable parameters from it, placing those also in
* the UrlMatch instance accessible by their parameter names.
*
* @param url
* an URL string with or without query string.
* @return a UrlMatch instance reflecting the outcome of the comparison, if
* matched. Otherwise, null.
*/ | Test the given URL against the underlying pattern to determine if it matches, returning the results in a UrlMatch instance. If the URL matches, parse any applicable parameters from it, placing those also in the UrlMatch instance accessible by their parameter names | match | {
"repo_name": "geronimo-iia/restexpress",
"path": "restexpress-core/src/main/java/org/restexpress/url/UrlPattern.java",
"license": "apache-2.0",
"size": 8239
} | [
"java.util.regex.Matcher"
] | import java.util.regex.Matcher; | import java.util.regex.*; | [
"java.util"
] | java.util; | 2,816,244 |
public static NBTTagCompound getNBT(ItemStack stack) {
initNBT(stack);
return stack.getTagCompound();
}
// SETTERS /////////////////////////////////////////////////////////////////// | static NBTTagCompound function(ItemStack stack) { initNBT(stack); return stack.getTagCompound(); } | /** Gets the NBTTagCompound in an ItemStack. Tries to init it
* previously in case there isn't one present **/ | Gets the NBTTagCompound in an ItemStack. Tries to init it | getNBT | {
"repo_name": "bafomdad/realfilingcabinet",
"path": "com/bafomdad/realfilingcabinet/utils/NBTUtils.java",
"license": "mit",
"size": 5196
} | [
"net.minecraft.item.ItemStack",
"net.minecraft.nbt.NBTTagCompound"
] | import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; | import net.minecraft.item.*; import net.minecraft.nbt.*; | [
"net.minecraft.item",
"net.minecraft.nbt"
] | net.minecraft.item; net.minecraft.nbt; | 600,821 |
@SuppressWarnings("TooManyParameters")
static <T, U, V, W, X, Y> Attributes of(
AttributeKey<T> key1,
T value1,
AttributeKey<U> key2,
U value2,
AttributeKey<V> key3,
V value3,
AttributeKey<W> key4,
W value4,
AttributeKey<X> key5,
X value5,
AttributeKey<Y> key6,
Y value6) {
return sortAndFilterToAttributes(
key1, value1,
key2, value2,
key3, value3,
key4, value4,
key5, value5,
key6, value6);
} | @SuppressWarnings(STR) static <T, U, V, W, X, Y> Attributes of( AttributeKey<T> key1, T value1, AttributeKey<U> key2, U value2, AttributeKey<V> key3, V value3, AttributeKey<W> key4, W value4, AttributeKey<X> key5, X value5, AttributeKey<Y> key6, Y value6) { return sortAndFilterToAttributes( key1, value1, key2, value2, key3, value3, key4, value4, key5, value5, key6, value6); } | /**
* Returns a {@link Attributes} instance with the given key-value pairs. Order of the keys is not
* preserved. Duplicate keys will be removed.
*/ | Returns a <code>Attributes</code> instance with the given key-value pairs. Order of the keys is not preserved. Duplicate keys will be removed | of | {
"repo_name": "open-telemetry/opentelemetry-java",
"path": "api/all/src/main/java/io/opentelemetry/api/common/Attributes.java",
"license": "apache-2.0",
"size": 5542
} | [
"io.opentelemetry.api.common.ArrayBackedAttributes"
] | import io.opentelemetry.api.common.ArrayBackedAttributes; | import io.opentelemetry.api.common.*; | [
"io.opentelemetry.api"
] | io.opentelemetry.api; | 157,938 |
@Ignore
@Test
public void testExternalJDBCDriverUsage() throws InitializationException, SQLException {
final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
final DBCPConnectionPool service = new DBCPConnectionPool();
runner.addControllerService("test-external-jar", service);
// set MariaDB database connection url
runner.setProperty(service, DBCPConnectionPool.DATABASE_URL, "jdbc:mariadb://localhost:3306/" + "testdb");
runner.setProperty(service, DBCPConnectionPool.DB_DRIVERNAME, "org.mariadb.jdbc.Driver");
runner.setProperty(service, DBCPConnectionPool.DB_DRIVER_LOCATION, "file:///var/tmp/mariadb-java-client-1.1.7.jar");
runner.setProperty(service, DBCPConnectionPool.DB_USER, "tester");
runner.setProperty(service, DBCPConnectionPool.DB_PASSWORD, "testerp");
runner.enableControllerService(service);
runner.assertValid(service);
final DBCPService dbcpService = (DBCPService) runner.getProcessContext().getControllerServiceLookup().getControllerService("test-external-jar");
Assert.assertNotNull(dbcpService);
final Connection connection = dbcpService.getConnection();
Assert.assertNotNull(connection);
createInsertSelectDrop(connection);
connection.close(); // return to pool
}
@Rule
public ExpectedException exception = ExpectedException.none(); | void function() throws InitializationException, SQLException { final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class); final DBCPConnectionPool service = new DBCPConnectionPool(); runner.addControllerService(STR, service); runner.setProperty(service, DBCPConnectionPool.DATABASE_URL, STRorg.mariadb.jdbc.DriverSTRfile: runner.setProperty(service, DBCPConnectionPool.DB_USER, STR); runner.setProperty(service, DBCPConnectionPool.DB_PASSWORD, STR); runner.enableControllerService(service); runner.assertValid(service); final DBCPService dbcpService = (DBCPService) runner.getProcessContext().getControllerServiceLookup().getControllerService(STR); Assert.assertNotNull(dbcpService); final Connection connection = dbcpService.getConnection(); Assert.assertNotNull(connection); createInsertSelectDrop(connection); connection.close(); } public ExpectedException exception = ExpectedException.none(); | /**
* NB!!!! Prerequisite: file should be present in /var/tmp/mariadb-java-client-1.1.7.jar Prerequisite: access to running MariaDb database server
*
* Test database connection using external JDBC jar located by URL. Connect, create table, insert, select, drop table.
*
*/ | NB!!!! Prerequisite: file should be present in /var/tmp/mariadb-java-client-1.1.7.jar Prerequisite: access to running MariaDb database server Test database connection using external JDBC jar located by URL. Connect, create table, insert, select, drop table | testExternalJDBCDriverUsage | {
"repo_name": "mcgilman/nifi",
"path": "nifi-nar-bundles/nifi-standard-services/nifi-dbcp-service-bundle/nifi-dbcp-service/src/test/java/org/apache/nifi/dbcp/DBCPServiceTest.java",
"license": "apache-2.0",
"size": 28145
} | [
"java.sql.Connection",
"java.sql.SQLException",
"org.apache.nifi.reporting.InitializationException",
"org.apache.nifi.util.TestRunner",
"org.apache.nifi.util.TestRunners",
"org.junit.Assert",
"org.junit.rules.ExpectedException"
] | import java.sql.Connection; import java.sql.SQLException; import org.apache.nifi.reporting.InitializationException; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; import org.junit.Assert; import org.junit.rules.ExpectedException; | import java.sql.*; import org.apache.nifi.reporting.*; import org.apache.nifi.util.*; import org.junit.*; import org.junit.rules.*; | [
"java.sql",
"org.apache.nifi",
"org.junit",
"org.junit.rules"
] | java.sql; org.apache.nifi; org.junit; org.junit.rules; | 1,230,346 |
public void initialize() throws Exception {
logger.info("Attempting to initialize resource.");
Resource resource = find(null);
if (resource == null) {
logger.error("Unable to initialize resource!");
} else {
logger.info("Successfully initialized resource.");
}
}
| void function() throws Exception { logger.info(STR); Resource resource = find(null); if (resource == null) { logger.error(STR); } else { logger.info(STR); } } | /**
* Initialze the singleton resource, when the home is initialized.
*/ | Initialze the singleton resource, when the home is initialized | initialize | {
"repo_name": "NCIP/cagrid-grid-incubation",
"path": "grid-incubation/incubator/projects/mdr/projects/mdrQuery/src/org/cagrid/mdrq/service/globus/resource/MDRQueryResourceHome.java",
"license": "bsd-3-clause",
"size": 3548
} | [
"org.globus.wsrf.Resource"
] | import org.globus.wsrf.Resource; | import org.globus.wsrf.*; | [
"org.globus.wsrf"
] | org.globus.wsrf; | 1,207,879 |
@Test
public void writeAndReadHeader_WithCompressionAndEncryption() throws IOException
{
CommitLogDescriptor descriptor = new CommitLogDescriptor(CommitLogDescriptor.current_version, 1, compression, enabledEncryption);
ByteBuffer buffer = ByteBuffer.allocate(16 * 1024);
CommitLogDescriptor.writeHeader(buffer, descriptor);
buffer.flip();
FileSegmentInputStream dataInput = new FileSegmentInputStream(buffer, null, 0);
CommitLogDescriptor result = CommitLogDescriptor.readHeader(dataInput, enabledEncryption);
Assert.assertNotNull(result);
Assert.assertEquals(compression, result.compression);
Assert.assertTrue(result.getEncryptionContext().isEnabled());
Assert.assertEquals(enabledEncryption, result.getEncryptionContext());
Assert.assertArrayEquals(iv, result.getEncryptionContext().getIV());
} | void function() throws IOException { CommitLogDescriptor descriptor = new CommitLogDescriptor(CommitLogDescriptor.current_version, 1, compression, enabledEncryption); ByteBuffer buffer = ByteBuffer.allocate(16 * 1024); CommitLogDescriptor.writeHeader(buffer, descriptor); buffer.flip(); FileSegmentInputStream dataInput = new FileSegmentInputStream(buffer, null, 0); CommitLogDescriptor result = CommitLogDescriptor.readHeader(dataInput, enabledEncryption); Assert.assertNotNull(result); Assert.assertEquals(compression, result.compression); Assert.assertTrue(result.getEncryptionContext().isEnabled()); Assert.assertEquals(enabledEncryption, result.getEncryptionContext()); Assert.assertArrayEquals(iv, result.getEncryptionContext().getIV()); } | /**
* Shouldn't happen in the real world (should only have either compression or enabledTdeOptions), but the header
* functionality should be correct
*/ | Shouldn't happen in the real world (should only have either compression or enabledTdeOptions), but the header functionality should be correct | writeAndReadHeader_WithCompressionAndEncryption | {
"repo_name": "jrwest/cassandra",
"path": "test/unit/org/apache/cassandra/db/commitlog/CommitLogDescriptorTest.java",
"license": "apache-2.0",
"size": 16541
} | [
"java.io.IOException",
"java.nio.ByteBuffer",
"org.apache.cassandra.io.util.FileSegmentInputStream",
"org.junit.Assert"
] | import java.io.IOException; import java.nio.ByteBuffer; import org.apache.cassandra.io.util.FileSegmentInputStream; import org.junit.Assert; | import java.io.*; import java.nio.*; import org.apache.cassandra.io.util.*; import org.junit.*; | [
"java.io",
"java.nio",
"org.apache.cassandra",
"org.junit"
] | java.io; java.nio; org.apache.cassandra; org.junit; | 2,560,144 |
private RelationalStorageAttributesDto getRelationalStorageAttributes(StorageEntity storageEntity)
{
// Get JDBC URL for this storage. This storage attribute is required and must have a non-blank value.
String jdbcUrl = storageHelper
.getStorageAttributeValueByName(configurationHelper.getProperty(ConfigurationValue.STORAGE_ATTRIBUTE_NAME_JDBC_URL), storageEntity, true, true);
// Get JDBC username for this storage. This storage attribute is not required and it is allowed to have a blank value.
String jdbcUsername = storageHelper
.getStorageAttributeValueByName(configurationHelper.getProperty(ConfigurationValue.STORAGE_ATTRIBUTE_NAME_JDBC_USERNAME), storageEntity, false,
false);
// Get JDBC user credential name for this storage. This storage attribute is not required and it is allowed to have a blank value.
String jdbcUserCredentialName = storageHelper
.getStorageAttributeValueByName(configurationHelper.getProperty(ConfigurationValue.STORAGE_ATTRIBUTE_NAME_JDBC_USER_CREDENTIAL_NAME), storageEntity,
false, false);
// Create and return a relational storage attributes DTO.
return new RelationalStorageAttributesDto(jdbcUrl, jdbcUsername, jdbcUserCredentialName);
} | RelationalStorageAttributesDto function(StorageEntity storageEntity) { String jdbcUrl = storageHelper .getStorageAttributeValueByName(configurationHelper.getProperty(ConfigurationValue.STORAGE_ATTRIBUTE_NAME_JDBC_URL), storageEntity, true, true); String jdbcUsername = storageHelper .getStorageAttributeValueByName(configurationHelper.getProperty(ConfigurationValue.STORAGE_ATTRIBUTE_NAME_JDBC_USERNAME), storageEntity, false, false); String jdbcUserCredentialName = storageHelper .getStorageAttributeValueByName(configurationHelper.getProperty(ConfigurationValue.STORAGE_ATTRIBUTE_NAME_JDBC_USER_CREDENTIAL_NAME), storageEntity, false, false); return new RelationalStorageAttributesDto(jdbcUrl, jdbcUsername, jdbcUserCredentialName); } | /**
* Returns storage attributes required to access relation table schema.
*
* @param storageEntity the storage entity
*
* @return the relational storage attributes DTO
*/ | Returns storage attributes required to access relation table schema | getRelationalStorageAttributes | {
"repo_name": "FINRAOS/herd",
"path": "herd-code/herd-service/src/main/java/org/finra/herd/service/impl/RelationalTableRegistrationHelperServiceImpl.java",
"license": "apache-2.0",
"size": 36953
} | [
"org.finra.herd.model.dto.ConfigurationValue",
"org.finra.herd.model.dto.RelationalStorageAttributesDto",
"org.finra.herd.model.jpa.StorageEntity"
] | import org.finra.herd.model.dto.ConfigurationValue; import org.finra.herd.model.dto.RelationalStorageAttributesDto; import org.finra.herd.model.jpa.StorageEntity; | import org.finra.herd.model.dto.*; import org.finra.herd.model.jpa.*; | [
"org.finra.herd"
] | org.finra.herd; | 2,325,839 |
public PageImpl<T> setItems(List<T> items) {
this.items = items;
return this;
} | PageImpl<T> function(List<T> items) { this.items = items; return this; } | /**
* Sets the list of items.
*
* @param items the list of items in {@link List}.
* @return this Page object itself.
*/ | Sets the list of items | setItems | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/storagecache/mgmt-v2019_11_01/src/main/java/com/microsoft/azure/management/storagecache/v2019_11_01/implementation/PageImpl.java",
"license": "mit",
"size": 1762
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,671,489 |
@Test
public void virtualizePacket() {
Ethernet eth = new Ethernet();
eth.setSourceMACAddress(SRC_MAC_ADDR);
eth.setDestinationMACAddress(DST_MAC_ADDR);
eth.setVlanID((short) 1);
eth.setPayload(null);
InboundPacket pInPacket =
new DefaultInboundPacket(CP22, eth,
ByteBuffer.wrap(eth.serialize()));
PacketContext pContext =
new TestPacketContext(System.nanoTime(), pInPacket, null, false);
testPacketService.sendTestPacketContext(pContext);
PacketContext vContext = providerService.getRequestedPacketContext(0);
InboundPacket vInPacket = vContext.inPacket();
assertEquals("the packet should be received from VCP12",
VCP12, vInPacket.receivedFrom());
assertEquals("VLAN tag should be excludede", VlanId.UNTAGGED,
vInPacket.parsed().getVlanID());
}
private class TestPacketContext extends DefaultPacketContext {
protected TestPacketContext(long time, InboundPacket inPkt,
OutboundPacket outPkt, boolean block) {
super(time, inPkt, outPkt, block);
} | void function() { Ethernet eth = new Ethernet(); eth.setSourceMACAddress(SRC_MAC_ADDR); eth.setDestinationMACAddress(DST_MAC_ADDR); eth.setVlanID((short) 1); eth.setPayload(null); InboundPacket pInPacket = new DefaultInboundPacket(CP22, eth, ByteBuffer.wrap(eth.serialize())); PacketContext pContext = new TestPacketContext(System.nanoTime(), pInPacket, null, false); testPacketService.sendTestPacketContext(pContext); PacketContext vContext = providerService.getRequestedPacketContext(0); InboundPacket vInPacket = vContext.inPacket(); assertEquals(STR, VCP12, vInPacket.receivedFrom()); assertEquals(STR, VlanId.UNTAGGED, vInPacket.parsed().getVlanID()); } private class TestPacketContext extends DefaultPacketContext { protected TestPacketContext(long time, InboundPacket inPkt, OutboundPacket outPkt, boolean block) { super(time, inPkt, outPkt, block); } | /** Test the physical packet context is delivered to a proper (physical)
* virtual network and device.
*/ | Test the physical packet context is delivered to a proper (physical) virtual network and device | virtualizePacket | {
"repo_name": "gkatsikas/onos",
"path": "apps/virtual/app/src/test/java/org/onosproject/incubator/net/virtual/impl/provider/DefaultVirtualPacketProviderTest.java",
"license": "apache-2.0",
"size": 12712
} | [
"java.nio.ByteBuffer",
"org.junit.Assert",
"org.onlab.packet.Ethernet",
"org.onlab.packet.VlanId",
"org.onosproject.net.packet.DefaultInboundPacket",
"org.onosproject.net.packet.DefaultPacketContext",
"org.onosproject.net.packet.InboundPacket",
"org.onosproject.net.packet.OutboundPacket",
"org.onosproject.net.packet.PacketContext"
] | import java.nio.ByteBuffer; import org.junit.Assert; import org.onlab.packet.Ethernet; import org.onlab.packet.VlanId; import org.onosproject.net.packet.DefaultInboundPacket; import org.onosproject.net.packet.DefaultPacketContext; import org.onosproject.net.packet.InboundPacket; import org.onosproject.net.packet.OutboundPacket; import org.onosproject.net.packet.PacketContext; | import java.nio.*; import org.junit.*; import org.onlab.packet.*; import org.onosproject.net.packet.*; | [
"java.nio",
"org.junit",
"org.onlab.packet",
"org.onosproject.net"
] | java.nio; org.junit; org.onlab.packet; org.onosproject.net; | 1,546,303 |
protected SourceInfo determineSourceType(File sourceDir) throws IOException {
Properties props = new Properties();
File propsFile = new File(sourceDir, SOURCE_PROPERTY_FILENAME);
Reader reader = new FileReader(propsFile);
try {
props.load(reader);
} finally {
reader.close();
}
for (String antTaskName : derivatedSources.keySet()) {
boolean found =
Boolean.parseBoolean(props.getProperty(antTaskName, Boolean.FALSE.toString()));
if (found) {
return derivatedSources.get(antTaskName);
}
}
return null;
}
private static class SourceDirectoryFilter implements FileFilter
{
private String sourceName;
public SourceDirectoryFilter(String name) {
sourceName = name;
} | SourceInfo function(File sourceDir) throws IOException { Properties props = new Properties(); File propsFile = new File(sourceDir, SOURCE_PROPERTY_FILENAME); Reader reader = new FileReader(propsFile); try { props.load(reader); } finally { reader.close(); } for (String antTaskName : derivatedSources.keySet()) { boolean found = Boolean.parseBoolean(props.getProperty(antTaskName, Boolean.FALSE.toString())); if (found) { return derivatedSources.get(antTaskName); } } return null; } private static class SourceDirectoryFilter implements FileFilter { private String sourceName; public SourceDirectoryFilter(String name) { sourceName = name; } | /**
* For a candidate data source directory, check whether its
* <code>project.properties</code> file indicates what type of standard source
* it was derived from and return that parent source if possible.
*
* @param sourceDir The data source directory.
*
* @return The SourceInfo for the data source's parent standard source if
* possible, or <code>null</code> if no such source can be determined.
*
* @throws IOException if there is an I/O problem while determining the
* parent source.
*/ | For a candidate data source directory, check whether its <code>project.properties</code> file indicates what type of standard source it was derived from and return that parent source if possible | determineSourceType | {
"repo_name": "drhee/toxoMine",
"path": "intermine/MineManager/installer/src/main/java/org/intermine/install/project/source/SourceInfoLoader.java",
"license": "lgpl-2.1",
"size": 12185
} | [
"java.io.File",
"java.io.FileFilter",
"java.io.FileReader",
"java.io.IOException",
"java.io.Reader",
"java.util.Properties"
] | import java.io.File; import java.io.FileFilter; import java.io.FileReader; import java.io.IOException; import java.io.Reader; import java.util.Properties; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 1,228,957 |
public static <T> PCollection<T> createPrimitiveOutputInternal(
Pipeline pipeline,
WindowingStrategy<?, ?> windowingStrategy,
IsBounded isBounded) {
return new PCollection<T>(pipeline)
.setWindowingStrategyInternal(windowingStrategy)
.setIsBoundedInternal(isBounded);
} | static <T> PCollection<T> function( Pipeline pipeline, WindowingStrategy<?, ?> windowingStrategy, IsBounded isBounded) { return new PCollection<T>(pipeline) .setWindowingStrategyInternal(windowingStrategy) .setIsBoundedInternal(isBounded); } | /**
* Creates and returns a new {@link PCollection} for a primitive output.
*
* <p>For use by primitive transformations only.
*/ | Creates and returns a new <code>PCollection</code> for a primitive output. For use by primitive transformations only | createPrimitiveOutputInternal | {
"repo_name": "shakamunyi/beam",
"path": "sdks/java/core/src/main/java/com/google/cloud/dataflow/sdk/values/PCollection.java",
"license": "apache-2.0",
"size": 9264
} | [
"com.google.cloud.dataflow.sdk.Pipeline",
"com.google.cloud.dataflow.sdk.util.WindowingStrategy"
] | import com.google.cloud.dataflow.sdk.Pipeline; import com.google.cloud.dataflow.sdk.util.WindowingStrategy; | import com.google.cloud.dataflow.sdk.*; import com.google.cloud.dataflow.sdk.util.*; | [
"com.google.cloud"
] | com.google.cloud; | 1,193,572 |
@Test
public void testCopyAssetsToDirectory_validArgs() throws Exception {
AssetsHelper.copyAssetsToDirectory(context.getAssets(), OUTPUT_DIR, ASSETS_TO_COPY);
// Get the Files in the output directory as an ArrayList for ease of use (compared to array)
final List<File> filesInOutputDir = new ArrayList<>(Arrays.asList(OUTPUT_DIR.listFiles()));
// Post-condition 1: All assets to copy exist in the output directory
for (final String assetName : ASSETS_TO_COPY) {
final File potentialOutputFile = new File(OUTPUT_DIR, assetName);
assertThat("An asset was unexpectedly not copied.", filesInOutputDir.contains
(potentialOutputFile));
}
// Post-condition 2: None of the assets to ignore exist in the output directory
for (final String assetName : ASSETS_TO_IGNORE) {
final File potentialOutputFile = new File(OUTPUT_DIR, assetName);
assertThat("An asset was unexpectedly copied.", !filesInOutputDir.contains
(potentialOutputFile));
}
} | void function() throws Exception { AssetsHelper.copyAssetsToDirectory(context.getAssets(), OUTPUT_DIR, ASSETS_TO_COPY); final List<File> filesInOutputDir = new ArrayList<>(Arrays.asList(OUTPUT_DIR.listFiles())); for (final String assetName : ASSETS_TO_COPY) { final File potentialOutputFile = new File(OUTPUT_DIR, assetName); assertThat(STR, filesInOutputDir.contains (potentialOutputFile)); } for (final String assetName : ASSETS_TO_IGNORE) { final File potentialOutputFile = new File(OUTPUT_DIR, assetName); assertThat(STR, !filesInOutputDir.contains (potentialOutputFile)); } } | /**
* Test to verify that the
* {@link AssetsHelper#copyAssetsToDirectory(AssetManager, File, String...)} method functions
* correctly when provided with valid arguments.
*
* @throws Exception
* the method under test may throw this exception if some operation fails
*/ | Test to verify that the <code>AssetsHelper#copyAssetsToDirectory(AssetManager, File, String...)</code> method functions correctly when provided with valid arguments | testCopyAssetsToDirectory_validArgs | {
"repo_name": "MatthewTamlin/AndroidUtilities",
"path": "testing/src/androidTest/java/com/matthewtamlin/android_utilities/testing/TestAssetsHelper.java",
"license": "apache-2.0",
"size": 5190
} | [
"com.matthewtamlin.android_utilities.library.helpers.AssetsHelper",
"java.io.File",
"java.util.ArrayList",
"java.util.Arrays",
"java.util.List",
"org.hamcrest.MatcherAssert"
] | import com.matthewtamlin.android_utilities.library.helpers.AssetsHelper; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.hamcrest.MatcherAssert; | import com.matthewtamlin.android_utilities.library.helpers.*; import java.io.*; import java.util.*; import org.hamcrest.*; | [
"com.matthewtamlin.android_utilities",
"java.io",
"java.util",
"org.hamcrest"
] | com.matthewtamlin.android_utilities; java.io; java.util; org.hamcrest; | 1,602,137 |
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<GenericResourceExpandedInner>> listByResourceGroupSinglePageAsync(
String resourceGroupName, String filter, String expand, Integer top, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listByResourceGroup(
this.client.getEndpoint(),
resourceGroupName,
filter,
expand,
top,
this.client.getApiVersion(),
this.client.getSubscriptionId(),
accept,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
} | @ServiceMethod(returns = ReturnType.SINGLE) Mono<PagedResponse<GenericResourceExpandedInner>> function( String resourceGroupName, String filter, String expand, Integer top, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( STR)); } final String accept = STR; context = this.client.mergeContext(context); return service .listByResourceGroup( this.client.getEndpoint(), resourceGroupName, filter, expand, top, this.client.getApiVersion(), this.client.getSubscriptionId(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } | /**
* Get all the resources for a resource group.
*
* @param resourceGroupName The resource group with the resources to get.
* @param filter The filter to apply on the operation.<br><br>The properties you can use for eq (equals)
* or ne (not equals) are: location, resourceType, name, resourceGroup, identity, identity/principalId, plan,
* plan/publisher, plan/product, plan/name, plan/version, and plan/promotionCode.<br><br>For
* example, to filter by a resource type, use: $filter=resourceType eq
* 'Microsoft.Network/virtualNetworks'<br><br>You can use substringof(value, property) in the
* filter. The properties you can use for substring are: name and resourceGroup.<br><br>For example,
* to get all resources with 'demo' anywhere in the name, use: $filter=substringof('demo',
* name)<br><br>You can link more than one substringof together by adding and/or
* operators.<br><br>You can filter by tag names and values. For example, to filter for a tag name
* and value, use $filter=tagName eq 'tag1' and tagValue eq 'Value1'. When you filter by a tag name and value,
* the tags for each resource are not returned in the results.<br><br>You can use some properties
* together when filtering. The combinations you can use are: substringof and/or resourceType, plan and
* plan/publisher and plan/name, identity and identity/principalId.
* @param expand Comma-separated list of additional properties to be included in the response. Valid values include
* `createdTime`, `changedTime` and `provisioningState`. For example, `$expand=createdTime,changedTime`.
* @param top The maximum number of results to return. If null is passed, returns all resources.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return all the resources for a resource group.
*/ | Get all the resources for a resource group | listByResourceGroupSinglePageAsync | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanagerhybrid/azure-resourcemanager-resources/src/main/java/com/azure/resourcemanager/resources/implementation/ResourcesClientImpl.java",
"license": "mit",
"size": 224761
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.PagedResponse",
"com.azure.core.http.rest.PagedResponseBase",
"com.azure.core.util.Context",
"com.azure.resourcemanager.resources.fluent.models.GenericResourceExpandedInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.util.Context; import com.azure.resourcemanager.resources.fluent.models.GenericResourceExpandedInner; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.resources.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 1,613,147 |
protected final void procTrans2FindNext(SrvTransactBuffer tbuf, SMBSrvPacket smbPkt)
throws IOException, SMBSrvException {
// Get the virtual circuit for the request
VirtualCircuit vc = m_sess.findVirtualCircuit(smbPkt.getUserId());
if ( vc == null) {
m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.NTInvalidParameter, SMBStatus.SRVNonSpecificError, SMBStatus.ErrSrv);
return;
}
// Get the tree connection details
int treeId = smbPkt.getTreeId();
TreeConnection conn = vc.findConnection(treeId);
if ( conn == null) {
m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.SRVInvalidTID, SMBStatus.ErrSrv);
return;
}
// Check if the user has the required access permission
if ( conn.hasReadAccess() == false) {
// User does not have the required access rights
m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.DOSAccessDenied, SMBStatus.ErrDos);
return;
}
// Get the search parameters
DataBuffer paramBuf = tbuf.getParameterBuffer();
int searchId = paramBuf.getShort();
int maxFiles = paramBuf.getShort();
int infoLevl = paramBuf.getShort();
int reskey = paramBuf.getInt();
int srchFlag = paramBuf.getShort();
String resumeName = paramBuf.getString(tbuf.isUnicode());
// Access the shared device disk interface
SearchContext ctx = null;
DiskInterface disk = null;
try {
// Access the disk interface
disk = (DiskInterface) conn.getSharedDevice().getInterface();
// Retrieve the search context
ctx = vc.getSearchContext(searchId);
if ( ctx == null) {
// DEBUG
if ( Debug.EnableError && m_sess.hasDebug(SMBSrvSession.DBG_SEARCH))
m_sess.debugPrintln("Search context null - [" + searchId + "]");
// Invalid search handle
m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.DOSNoMoreFiles, SMBStatus.ErrDos);
return;
}
// Debug
if ( Debug.EnableInfo && m_sess.hasDebug(SMBSrvSession.DBG_SEARCH))
m_sess.debugPrintln("Continue search [" + searchId + "] - " + resumeName + ", maxFiles=" + maxFiles
+ ", infoLevel=" + infoLevl + ", flags=0x" + Integer.toHexString(srchFlag));
// Create the reply transaction buffer
SrvTransactBuffer replyBuf = new SrvTransactBuffer(tbuf);
DataBuffer dataBuf = replyBuf.getDataBuffer();
// Determine the maximum return data length
int maxLen = replyBuf.getReturnDataLimit();
// Check if resume keys are required
boolean resumeReq = (srchFlag & FindFirstNext.ReturnResumeKey) != 0 ? true : false;
// Loop until we have filled the return buffer or there are no more files to return
int fileCnt = 0;
int packLen = 0;
int lastNameOff = 0;
boolean pktDone = false;
boolean searchDone = false;
FileInfo info = new FileInfo();
while (pktDone == false && fileCnt < maxFiles) {
// Get file information from the search
if ( ctx.nextFileInfo(info) == false) {
// No more files
pktDone = true;
searchDone = true;
}
// Check if the file information will fit into the return buffer
else if ( FindInfoPacker.calcInfoSize(info, infoLevl, false, true) <= maxLen) {
// Pack a dummy resume key, if required
if ( resumeReq)
dataBuf.putZeros(4);
// Save the offset to the last file information structure
lastNameOff = dataBuf.getPosition();
// Mask the file attributes
info.setFileAttributes(info.getFileAttributes() & StandardAttributes);
// Pack the file information
packLen = FindInfoPacker.packInfo(info, dataBuf, infoLevl, tbuf.isUnicode());
// Update the file count for this packet
fileCnt++;
// Recalculate the remaining buffer space
maxLen -= packLen;
}
else {
// Set the search restart point
ctx.restartAt(info);
// No more buffer space
pktDone = true;
}
}
// Pack the parameter block
paramBuf = replyBuf.getParameterBuffer();
paramBuf.putShort(fileCnt);
paramBuf.putShort(ctx.hasMoreFiles() ? 0 : 1);
paramBuf.putShort(0);
paramBuf.putShort(lastNameOff);
// Send the transaction response
SMBSrvTransPacket tpkt = new SMBSrvTransPacket(smbPkt);
tpkt.doTransactionResponse(m_sess, replyBuf, smbPkt);
// Debug
if ( Debug.EnableInfo && m_sess.hasDebug(SMBSrvSession.DBG_SEARCH))
m_sess.debugPrintln("Search [" + searchId + "] Returned " + fileCnt + " files, moreFiles=" + ctx.hasMoreFiles());
// Check if the search is complete
if ( searchDone == true) {
// Debug
if ( Debug.EnableInfo && m_sess.hasDebug(SMBSrvSession.DBG_SEARCH))
m_sess.debugPrintln("End start search [" + searchId + "] (Search complete)");
// Release the search context
vc.deallocateSearchSlot(searchId);
}
}
catch (FileNotFoundException ex) {
// Deallocate the search
if ( searchId != -1)
vc.deallocateSearchSlot(searchId);
// Search path does not exist
m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.DOSNoMoreFiles, SMBStatus.ErrDos);
}
catch (InvalidDeviceInterfaceException ex) {
// Deallocate the search
if ( searchId != -1)
vc.deallocateSearchSlot(searchId);
// Failed to get/initialize the disk interface
m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.DOSInvalidData, SMBStatus.ErrDos);
}
catch (UnsupportedInfoLevelException ex) {
// Deallocate the search
if ( searchId != -1)
vc.deallocateSearchSlot(searchId);
// Requested information level is not supported
m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.SRVNotSupported, SMBStatus.ErrSrv);
}
} | final void function(SrvTransactBuffer tbuf, SMBSrvPacket smbPkt) throws IOException, SMBSrvException { VirtualCircuit vc = m_sess.findVirtualCircuit(smbPkt.getUserId()); if ( vc == null) { m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.NTInvalidParameter, SMBStatus.SRVNonSpecificError, SMBStatus.ErrSrv); return; } int treeId = smbPkt.getTreeId(); TreeConnection conn = vc.findConnection(treeId); if ( conn == null) { m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.SRVInvalidTID, SMBStatus.ErrSrv); return; } if ( conn.hasReadAccess() == false) { m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.DOSAccessDenied, SMBStatus.ErrDos); return; } DataBuffer paramBuf = tbuf.getParameterBuffer(); int searchId = paramBuf.getShort(); int maxFiles = paramBuf.getShort(); int infoLevl = paramBuf.getShort(); int reskey = paramBuf.getInt(); int srchFlag = paramBuf.getShort(); String resumeName = paramBuf.getString(tbuf.isUnicode()); SearchContext ctx = null; DiskInterface disk = null; try { disk = (DiskInterface) conn.getSharedDevice().getInterface(); ctx = vc.getSearchContext(searchId); if ( ctx == null) { if ( Debug.EnableError && m_sess.hasDebug(SMBSrvSession.DBG_SEARCH)) m_sess.debugPrintln(STR + searchId + "]"); m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.DOSNoMoreFiles, SMBStatus.ErrDos); return; } if ( Debug.EnableInfo && m_sess.hasDebug(SMBSrvSession.DBG_SEARCH)) m_sess.debugPrintln(STR + searchId + STR + resumeName + STR + maxFiles + STR + infoLevl + STR + Integer.toHexString(srchFlag)); SrvTransactBuffer replyBuf = new SrvTransactBuffer(tbuf); DataBuffer dataBuf = replyBuf.getDataBuffer(); int maxLen = replyBuf.getReturnDataLimit(); boolean resumeReq = (srchFlag & FindFirstNext.ReturnResumeKey) != 0 ? true : false; int fileCnt = 0; int packLen = 0; int lastNameOff = 0; boolean pktDone = false; boolean searchDone = false; FileInfo info = new FileInfo(); while (pktDone == false && fileCnt < maxFiles) { if ( ctx.nextFileInfo(info) == false) { pktDone = true; searchDone = true; } else if ( FindInfoPacker.calcInfoSize(info, infoLevl, false, true) <= maxLen) { if ( resumeReq) dataBuf.putZeros(4); lastNameOff = dataBuf.getPosition(); info.setFileAttributes(info.getFileAttributes() & StandardAttributes); packLen = FindInfoPacker.packInfo(info, dataBuf, infoLevl, tbuf.isUnicode()); fileCnt++; maxLen -= packLen; } else { ctx.restartAt(info); pktDone = true; } } paramBuf = replyBuf.getParameterBuffer(); paramBuf.putShort(fileCnt); paramBuf.putShort(ctx.hasMoreFiles() ? 0 : 1); paramBuf.putShort(0); paramBuf.putShort(lastNameOff); SMBSrvTransPacket tpkt = new SMBSrvTransPacket(smbPkt); tpkt.doTransactionResponse(m_sess, replyBuf, smbPkt); if ( Debug.EnableInfo && m_sess.hasDebug(SMBSrvSession.DBG_SEARCH)) m_sess.debugPrintln(STR + searchId + STR + fileCnt + STR + ctx.hasMoreFiles()); if ( searchDone == true) { if ( Debug.EnableInfo && m_sess.hasDebug(SMBSrvSession.DBG_SEARCH)) m_sess.debugPrintln(STR + searchId + STR); vc.deallocateSearchSlot(searchId); } } catch (FileNotFoundException ex) { if ( searchId != -1) vc.deallocateSearchSlot(searchId); m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.DOSNoMoreFiles, SMBStatus.ErrDos); } catch (InvalidDeviceInterfaceException ex) { if ( searchId != -1) vc.deallocateSearchSlot(searchId); m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.DOSInvalidData, SMBStatus.ErrDos); } catch (UnsupportedInfoLevelException ex) { if ( searchId != -1) vc.deallocateSearchSlot(searchId); m_sess.sendErrorResponseSMB( smbPkt, SMBStatus.SRVNotSupported, SMBStatus.ErrSrv); } } | /**
* Process a transact2 file search continue request.
*
* @param tbuf Transaction request details
* @param smbPkt SMBSrvPacket
* @exception IOException
* @exception SMBSrvException
*/ | Process a transact2 file search continue request | procTrans2FindNext | {
"repo_name": "arcusys/Liferay-CIFS",
"path": "source/java/org/alfresco/jlan/smb/server/LanManProtocolHandler.java",
"license": "gpl-3.0",
"size": 104127
} | [
"java.io.FileNotFoundException",
"java.io.IOException",
"org.alfresco.jlan.debug.Debug",
"org.alfresco.jlan.server.core.InvalidDeviceInterfaceException",
"org.alfresco.jlan.server.filesys.DiskInterface",
"org.alfresco.jlan.server.filesys.FileInfo",
"org.alfresco.jlan.server.filesys.SearchContext",
"org.alfresco.jlan.server.filesys.TreeConnection",
"org.alfresco.jlan.server.filesys.UnsupportedInfoLevelException",
"org.alfresco.jlan.smb.FindFirstNext",
"org.alfresco.jlan.smb.SMBStatus",
"org.alfresco.jlan.util.DataBuffer"
] | import java.io.FileNotFoundException; import java.io.IOException; import org.alfresco.jlan.debug.Debug; import org.alfresco.jlan.server.core.InvalidDeviceInterfaceException; import org.alfresco.jlan.server.filesys.DiskInterface; import org.alfresco.jlan.server.filesys.FileInfo; import org.alfresco.jlan.server.filesys.SearchContext; import org.alfresco.jlan.server.filesys.TreeConnection; import org.alfresco.jlan.server.filesys.UnsupportedInfoLevelException; import org.alfresco.jlan.smb.FindFirstNext; import org.alfresco.jlan.smb.SMBStatus; import org.alfresco.jlan.util.DataBuffer; | import java.io.*; import org.alfresco.jlan.debug.*; import org.alfresco.jlan.server.core.*; import org.alfresco.jlan.server.filesys.*; import org.alfresco.jlan.smb.*; import org.alfresco.jlan.util.*; | [
"java.io",
"org.alfresco.jlan"
] | java.io; org.alfresco.jlan; | 1,039,626 |
public void removeEmitter(ConfigurableEmitter emitter) {
emitters.remove(emitter);
system.removeEmitter(emitter);
}
| void function(ConfigurableEmitter emitter) { emitters.remove(emitter); system.removeEmitter(emitter); } | /**
* Remove an emitter from the system held here
*
* @param emitter The emitter to be removed
*/ | Remove an emitter from the system held here | removeEmitter | {
"repo_name": "SenshiSentou/SourceFight",
"path": "slick_dev/tags/Slick0.19/tools/org/newdawn/slick/tools/peditor/ParticleCanvas.java",
"license": "bsd-2-clause",
"size": 8518
} | [
"org.newdawn.slick.particles.ConfigurableEmitter"
] | import org.newdawn.slick.particles.ConfigurableEmitter; | import org.newdawn.slick.particles.*; | [
"org.newdawn.slick"
] | org.newdawn.slick; | 1,950,549 |
public static Map<String, String> getDbToHumanReadableMap() {
Map<String, String> dbAliasToNameMap = new HashMap<String, String>();
try {
AnnotationFormat[] afs = ProjectController.getInstance().getCurrentAnnotationFormats();
for (AnnotationFormat af : afs) {
for (CustomField field : af.getCustomFields()) {
dbAliasToNameMap.put(field.getAlias(), field.getColumnName());
}
}
} catch (Exception e) {
e.printStackTrace();
}
return dbAliasToNameMap;
}
| static Map<String, String> function() { Map<String, String> dbAliasToNameMap = new HashMap<String, String>(); try { AnnotationFormat[] afs = ProjectController.getInstance().getCurrentAnnotationFormats(); for (AnnotationFormat af : afs) { for (CustomField field : af.getCustomFields()) { dbAliasToNameMap.put(field.getAlias(), field.getColumnName()); } } } catch (Exception e) { e.printStackTrace(); } return dbAliasToNameMap; } | /**
* Get the header for the table using the column aliases.
* @return a map of the column aliases to column names.
*/ | Get the header for the table using the column aliases | getDbToHumanReadableMap | {
"repo_name": "ronammar/pharmacogenomics",
"path": "src/pgx/PGXAnalysis.java",
"license": "lgpl-3.0",
"size": 17710
} | [
"java.util.HashMap",
"java.util.Map",
"org.ut.biolab.medsavant.client.project.ProjectController",
"org.ut.biolab.medsavant.shared.format.AnnotationFormat",
"org.ut.biolab.medsavant.shared.format.CustomField"
] | import java.util.HashMap; import java.util.Map; import org.ut.biolab.medsavant.client.project.ProjectController; import org.ut.biolab.medsavant.shared.format.AnnotationFormat; import org.ut.biolab.medsavant.shared.format.CustomField; | import java.util.*; import org.ut.biolab.medsavant.client.project.*; import org.ut.biolab.medsavant.shared.format.*; | [
"java.util",
"org.ut.biolab"
] | java.util; org.ut.biolab; | 1,960,692 |
WeekTO getWeekByDates( WeekTO weekTO ); | WeekTO getWeekByDates( WeekTO weekTO ); | /**
* Find Week by start date and final date.
*
* @param start
* @param end
* @return
*/ | Find Week by start date and final date | getWeekByDates | {
"repo_name": "sidlors/digital-booking",
"path": "digital-booking-persistence/src/main/java/mx/com/cinepolis/digital/booking/persistence/dao/WeekDAO.java",
"license": "epl-1.0",
"size": 2602
} | [
"mx.com.cinepolis.digital.booking.commons.to.WeekTO"
] | import mx.com.cinepolis.digital.booking.commons.to.WeekTO; | import mx.com.cinepolis.digital.booking.commons.to.*; | [
"mx.com.cinepolis"
] | mx.com.cinepolis; | 2,675,083 |
boolean isAnnotationToDelete(AnnotationData annotation)
{
if (toDelete == null || toDelete.size() == 0) return false;
Iterator<AnnotationData> i = toDelete.iterator();
AnnotationData data;
while (i.hasNext()) {
data = i.next();
if (data.getId() == annotation.getId())
return true;
}
return false;
} | boolean isAnnotationToDelete(AnnotationData annotation) { if (toDelete == null toDelete.size() == 0) return false; Iterator<AnnotationData> i = toDelete.iterator(); AnnotationData data; while (i.hasNext()) { data = i.next(); if (data.getId() == annotation.getId()) return true; } return false; } | /**
* Returns <code>true</code> if the passed annotation has to be
* deleted, <code>false</code> otherwise.
*
* @param annotation The annotation to handle.
* @return See above.
*/ | Returns <code>true</code> if the passed annotation has to be deleted, <code>false</code> otherwise | isAnnotationToDelete | {
"repo_name": "dpwrussell/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/agents/metadata/editor/EditorModel.java",
"license": "gpl-2.0",
"size": 130987
} | [
"java.util.Iterator"
] | import java.util.Iterator; | import java.util.*; | [
"java.util"
] | java.util; | 2,735,800 |
public static ExoPlaybackException createForRemote(String message) {
return new ExoPlaybackException(TYPE_REMOTE, message);
}
private ExoPlaybackException(@Type int type, Throwable cause) {
this(
type,
cause,
null,
null,
C.INDEX_UNSET,
null,
C.FORMAT_HANDLED,
false);
}
private ExoPlaybackException(@Type int type, String message) {
this(
type,
null,
message,
null,
C.INDEX_UNSET,
null,
C.FORMAT_HANDLED,
false);
}
private ExoPlaybackException(
@Type int type,
@Nullable Throwable cause,
@Nullable String customMessage,
@Nullable String rendererName,
int rendererIndex,
@Nullable Format rendererFormat,
@FormatSupport int rendererFormatSupport,
boolean isRecoverable) {
this(
deriveMessage(
type,
customMessage,
rendererName,
rendererIndex,
rendererFormat,
rendererFormatSupport),
cause,
type,
rendererName,
rendererIndex,
rendererFormat,
rendererFormatSupport,
null,
SystemClock.elapsedRealtime(),
isRecoverable);
}
private ExoPlaybackException(
@Nullable String message,
@Nullable Throwable cause,
@Type int type,
@Nullable String rendererName,
int rendererIndex,
@Nullable Format rendererFormat,
@FormatSupport int rendererFormatSupport,
@Nullable MediaPeriodId mediaPeriodId,
long timestampMs,
boolean isRecoverable) {
super(message, cause);
this.type = type;
this.cause = cause;
this.rendererName = rendererName;
this.rendererIndex = rendererIndex;
this.rendererFormat = rendererFormat;
this.rendererFormatSupport = rendererFormatSupport;
this.mediaPeriodId = mediaPeriodId;
this.timestampMs = timestampMs;
this.isRecoverable = isRecoverable;
} | static ExoPlaybackException function(String message) { return new ExoPlaybackException(TYPE_REMOTE, message); } private ExoPlaybackException(@Type int type, Throwable cause) { this( type, cause, null, null, C.INDEX_UNSET, null, C.FORMAT_HANDLED, false); } private ExoPlaybackException(@Type int type, String message) { this( type, null, message, null, C.INDEX_UNSET, null, C.FORMAT_HANDLED, false); } private ExoPlaybackException( @Type int type, @Nullable Throwable cause, @Nullable String customMessage, @Nullable String rendererName, int rendererIndex, @Nullable Format rendererFormat, @FormatSupport int rendererFormatSupport, boolean isRecoverable) { this( deriveMessage( type, customMessage, rendererName, rendererIndex, rendererFormat, rendererFormatSupport), cause, type, rendererName, rendererIndex, rendererFormat, rendererFormatSupport, null, SystemClock.elapsedRealtime(), isRecoverable); } private ExoPlaybackException( @Nullable String message, @Nullable Throwable cause, @Type int type, @Nullable String rendererName, int rendererIndex, @Nullable Format rendererFormat, @FormatSupport int rendererFormatSupport, @Nullable MediaPeriodId mediaPeriodId, long timestampMs, boolean isRecoverable) { super(message, cause); this.type = type; this.cause = cause; this.rendererName = rendererName; this.rendererIndex = rendererIndex; this.rendererFormat = rendererFormat; this.rendererFormatSupport = rendererFormatSupport; this.mediaPeriodId = mediaPeriodId; this.timestampMs = timestampMs; this.isRecoverable = isRecoverable; } | /**
* Creates an instance of type {@link #TYPE_REMOTE}.
*
* @param message The message associated with the error.
* @return The created instance.
*/ | Creates an instance of type <code>#TYPE_REMOTE</code> | createForRemote | {
"repo_name": "amzn/exoplayer-amazon-port",
"path": "library/common/src/main/java/com/google/android/exoplayer2/ExoPlaybackException.java",
"license": "apache-2.0",
"size": 12776
} | [
"android.os.SystemClock",
"androidx.annotation.Nullable",
"com.google.android.exoplayer2.C",
"com.google.android.exoplayer2.source.MediaPeriodId"
] | import android.os.SystemClock; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.source.MediaPeriodId; | import android.os.*; import androidx.annotation.*; import com.google.android.exoplayer2.*; import com.google.android.exoplayer2.source.*; | [
"android.os",
"androidx.annotation",
"com.google.android"
] | android.os; androidx.annotation; com.google.android; | 2,845,590 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.