method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
protected void handleNotAuthorized(HttpServletRequest request, HttpServletResponse response, Object handler)
throws ServletException, IOException {
response.sendError(HttpServletResponse.SC_FORBIDDEN);
} | void function(HttpServletRequest request, HttpServletResponse response, Object handler) throws ServletException, IOException { response.sendError(HttpServletResponse.SC_FORBIDDEN); } | /**
* Handle a request that is not authorized according to this interceptor.
* Default implementation sends HTTP status code 403 ("forbidden").
* <p>This method can be overridden to write a custom message, forward or
* redirect to some error page or login page, or throw a ServletException.
* @param request current HTTP request
* @param response current HTTP response
* @param handler chosen handler to execute, for type and/or instance evaluation
* @throws javax.servlet.ServletException if there is an internal error
* @throws java.io.IOException in case of an I/O error when writing the response
*/ | Handle a request that is not authorized according to this interceptor. Default implementation sends HTTP status code 403 ("forbidden"). This method can be overridden to write a custom message, forward or redirect to some error page or login page, or throw a ServletException | handleNotAuthorized | {
"repo_name": "cbeams-archive/spring-framework-2.5.x",
"path": "src/org/springframework/web/servlet/handler/UserRoleAuthorizationInterceptor.java",
"license": "apache-2.0",
"size": 2704
} | [
"java.io.IOException",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse"
] | import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; | import java.io.*; import javax.servlet.*; import javax.servlet.http.*; | [
"java.io",
"javax.servlet"
] | java.io; javax.servlet; | 2,367,628 |
public PackageIdentifier getPackageId() {
return packageId;
} | PackageIdentifier function() { return packageId; } | /**
* Returns the package that "owns" this glob.
*
* <p>The glob evaluation code ensures that the boundaries of this package are not crossed.
*/ | Returns the package that "owns" this glob. The glob evaluation code ensures that the boundaries of this package are not crossed | getPackageId | {
"repo_name": "twitter-forks/bazel",
"path": "src/main/java/com/google/devtools/build/lib/skyframe/GlobDescriptor.java",
"license": "apache-2.0",
"size": 5410
} | [
"com.google.devtools.build.lib.cmdline.PackageIdentifier"
] | import com.google.devtools.build.lib.cmdline.PackageIdentifier; | import com.google.devtools.build.lib.cmdline.*; | [
"com.google.devtools"
] | com.google.devtools; | 1,728,830 |
public static void main(String [] args)
{
Scanner input = new Scanner(System.in);
System.out.print("Enter four integers: ");
int num1 = input.nextInt();
int num2 = input.nextInt();
int num3 = input.nextInt();
int num4 = input.nextInt();
input.close();
System.out.println("minA: " + MinOfFour.minA(num1, num2, num3, num4));
System.out.println("minB: " + MinOfFour.minB(num1, num2, num3, num4));
System.out.println("minC: " + MinOfFour.minC(num1, num2, num3, num4));
System.out.println("minD: " + MinOfFour.minD(num1, num2, num3, num4));
System.out.println("minE: " + MinOfFour.minE(num1, num2, num3, num4));
System.out.println("minF: " + MinOfFour.minF(num1, num2, num3, num4));
System.out.println("Math.min: " + Math.min(Math.min(num1, num2), Math.min(num3, num4)));
} | static void function(String [] args) { Scanner input = new Scanner(System.in); System.out.print(STR); int num1 = input.nextInt(); int num2 = input.nextInt(); int num3 = input.nextInt(); int num4 = input.nextInt(); input.close(); System.out.println(STR + MinOfFour.minA(num1, num2, num3, num4)); System.out.println(STR + MinOfFour.minB(num1, num2, num3, num4)); System.out.println(STR + MinOfFour.minC(num1, num2, num3, num4)); System.out.println(STR + MinOfFour.minD(num1, num2, num3, num4)); System.out.println(STR + MinOfFour.minE(num1, num2, num3, num4)); System.out.println(STR + MinOfFour.minF(num1, num2, num3, num4)); System.out.println(STR + Math.min(Math.min(num1, num2), Math.min(num3, num4))); } | /**
* Read four input values, compute the min various ways and print results.
* @param args
*/ | Read four input values, compute the min various ways and print results | main | {
"repo_name": "BoiseState/CS121-resources",
"path": "examples/chap04/MinOfFour.java",
"license": "mit",
"size": 3198
} | [
"java.util.Scanner"
] | import java.util.Scanner; | import java.util.*; | [
"java.util"
] | java.util; | 2,217,215 |
public static List<LogoutRequest> getLogoutRequests(final RequestContext context) {
return (List<LogoutRequest>) context.getFlowScope().get("logoutRequests");
} | static List<LogoutRequest> function(final RequestContext context) { return (List<LogoutRequest>) context.getFlowScope().get(STR); } | /**
* Gets the logout requests from flow scope.
*
* @param context the context
* @return the logout requests
*/ | Gets the logout requests from flow scope | getLogoutRequests | {
"repo_name": "xuchengdong/cas4.1.9",
"path": "cas-server-core/src/main/java/org/jasig/cas/web/support/WebUtils.java",
"license": "apache-2.0",
"size": 11872
} | [
"java.util.List",
"org.jasig.cas.logout.LogoutRequest",
"org.springframework.webflow.execution.RequestContext"
] | import java.util.List; import org.jasig.cas.logout.LogoutRequest; import org.springframework.webflow.execution.RequestContext; | import java.util.*; import org.jasig.cas.logout.*; import org.springframework.webflow.execution.*; | [
"java.util",
"org.jasig.cas",
"org.springframework.webflow"
] | java.util; org.jasig.cas; org.springframework.webflow; | 1,375,171 |
@Test
public void testTimedOutCommandDoesNotExecute() {
SingleThreadedPoolWithQueue pool = new SingleThreadedPoolWithQueue(5);
TestCircuitBreaker s1 = new TestCircuitBreaker();
TestCircuitBreaker s2 = new TestCircuitBreaker();
// execution will take 100ms, thread pool has a 600ms timeout
CommandWithCustomThreadPool c1 = new CommandWithCustomThreadPool(s1, pool, 500, HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionTimeoutInMilliseconds(600));
// execution will take 200ms, thread pool has a 20ms timeout
CommandWithCustomThreadPool c2 = new CommandWithCustomThreadPool(s2, pool, 200, HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionTimeoutInMilliseconds(20));
// queue up c1 first
Future<Boolean> c1f = c1.queue();
// now queue up c2 and wait on it
boolean receivedException = false;
try {
c2.queue().get();
} catch (Exception e) {
// we expect to get an exception here
receivedException = true;
}
if (!receivedException) {
fail("We expect to receive an exception for c2 as it's supposed to timeout.");
}
// c1 will complete after 100ms
try {
c1f.get();
} catch (Exception e1) {
e1.printStackTrace();
fail("we should not have failed while getting c1");
}
assertTrue("c1 is expected to executed but didn't", c1.didExecute);
// c2 will timeout after 20 ms ... we'll wait longer than the 200ms time to make sure
// the thread doesn't keep running in the background and execute
try {
Thread.sleep(400);
} catch (Exception e) {
throw new RuntimeException("Failed to sleep");
}
assertFalse("c2 is not expected to execute, but did", c2.didExecute);
assertEquals(1, s1.metrics.getRollingCount(HystrixRollingNumberEvent.SUCCESS));
assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.EXCEPTION_THROWN));
assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.FAILURE));
assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.BAD_REQUEST));
assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.FALLBACK_REJECTION));
assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.FALLBACK_FAILURE));
assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.FALLBACK_SUCCESS));
assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.SEMAPHORE_REJECTED));
assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.SHORT_CIRCUITED));
assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.THREAD_POOL_REJECTED));
assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.TIMEOUT));
assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.RESPONSE_FROM_CACHE));
assertEquals(0, s1.metrics.getHealthCounts().getErrorPercentage());
assertEquals(0, s1.metrics.getCurrentConcurrentExecutionCount());
assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.SUCCESS));
assertEquals(1, s2.metrics.getRollingCount(HystrixRollingNumberEvent.EXCEPTION_THROWN));
assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.FAILURE));
assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.BAD_REQUEST));
assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.FALLBACK_REJECTION));
assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.FALLBACK_FAILURE));
assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.FALLBACK_SUCCESS));
assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.SEMAPHORE_REJECTED));
assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.SHORT_CIRCUITED));
assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.THREAD_POOL_REJECTED));
assertEquals(1, s2.metrics.getRollingCount(HystrixRollingNumberEvent.TIMEOUT));
assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.RESPONSE_FROM_CACHE));
assertEquals(100, s2.metrics.getHealthCounts().getErrorPercentage());
assertEquals(0, s2.metrics.getCurrentConcurrentExecutionCount());
assertEquals(2, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size());
} | void function() { SingleThreadedPoolWithQueue pool = new SingleThreadedPoolWithQueue(5); TestCircuitBreaker s1 = new TestCircuitBreaker(); TestCircuitBreaker s2 = new TestCircuitBreaker(); CommandWithCustomThreadPool c1 = new CommandWithCustomThreadPool(s1, pool, 500, HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionTimeoutInMilliseconds(600)); CommandWithCustomThreadPool c2 = new CommandWithCustomThreadPool(s2, pool, 200, HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionTimeoutInMilliseconds(20)); Future<Boolean> c1f = c1.queue(); boolean receivedException = false; try { c2.queue().get(); } catch (Exception e) { receivedException = true; } if (!receivedException) { fail(STR); } try { c1f.get(); } catch (Exception e1) { e1.printStackTrace(); fail(STR); } assertTrue(STR, c1.didExecute); try { Thread.sleep(400); } catch (Exception e) { throw new RuntimeException(STR); } assertFalse(STR, c2.didExecute); assertEquals(1, s1.metrics.getRollingCount(HystrixRollingNumberEvent.SUCCESS)); assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.EXCEPTION_THROWN)); assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.FAILURE)); assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.BAD_REQUEST)); assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.FALLBACK_REJECTION)); assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.FALLBACK_FAILURE)); assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.FALLBACK_SUCCESS)); assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.SEMAPHORE_REJECTED)); assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.SHORT_CIRCUITED)); assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.THREAD_POOL_REJECTED)); assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.TIMEOUT)); assertEquals(0, s1.metrics.getRollingCount(HystrixRollingNumberEvent.RESPONSE_FROM_CACHE)); assertEquals(0, s1.metrics.getHealthCounts().getErrorPercentage()); assertEquals(0, s1.metrics.getCurrentConcurrentExecutionCount()); assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.SUCCESS)); assertEquals(1, s2.metrics.getRollingCount(HystrixRollingNumberEvent.EXCEPTION_THROWN)); assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.FAILURE)); assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.BAD_REQUEST)); assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.FALLBACK_REJECTION)); assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.FALLBACK_FAILURE)); assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.FALLBACK_SUCCESS)); assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.SEMAPHORE_REJECTED)); assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.SHORT_CIRCUITED)); assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.THREAD_POOL_REJECTED)); assertEquals(1, s2.metrics.getRollingCount(HystrixRollingNumberEvent.TIMEOUT)); assertEquals(0, s2.metrics.getRollingCount(HystrixRollingNumberEvent.RESPONSE_FROM_CACHE)); assertEquals(100, s2.metrics.getHealthCounts().getErrorPercentage()); assertEquals(0, s2.metrics.getCurrentConcurrentExecutionCount()); assertEquals(2, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size()); } | /**
* If it has been sitting in the queue, it should not execute if timed out by the time it hits the queue.
*/ | If it has been sitting in the queue, it should not execute if timed out by the time it hits the queue | testTimedOutCommandDoesNotExecute | {
"repo_name": "manwithharmonica/Hystrix",
"path": "hystrix-core/src/test/java/com/netflix/hystrix/HystrixCommandTest.java",
"license": "apache-2.0",
"size": 265467
} | [
"com.netflix.hystrix.HystrixCircuitBreakerTest",
"com.netflix.hystrix.util.HystrixRollingNumberEvent",
"java.util.concurrent.Future",
"org.junit.Assert"
] | import com.netflix.hystrix.HystrixCircuitBreakerTest; import com.netflix.hystrix.util.HystrixRollingNumberEvent; import java.util.concurrent.Future; import org.junit.Assert; | import com.netflix.hystrix.*; import com.netflix.hystrix.util.*; import java.util.concurrent.*; import org.junit.*; | [
"com.netflix.hystrix",
"java.util",
"org.junit"
] | com.netflix.hystrix; java.util; org.junit; | 920,196 |
public static String getVersionName() {
synchronized (lock) {
if (versionName == null) {
try {
versionName = getPackageManager().getPackageInfo(getContext().getPackageName(), 0).versionName;
} catch (NameNotFoundException e) {
WLog.e(TAG, "Couldn't find info about own package", e);
}
}
}
return versionName;
} | static String function() { synchronized (lock) { if (versionName == null) { try { versionName = getPackageManager().getPackageInfo(getContext().getPackageName(), 0).versionName; } catch (NameNotFoundException e) { WLog.e(TAG, STR, e); } } } return versionName; } | /**
* Returns the version name for this app, as specified by the android:versionName attribute in the
* <manifest> element of the manifest.
*/ | Returns the version name for this app, as specified by the android:versionName attribute in the element of the manifest | getVersionName | {
"repo_name": "wiaio/wia-android-sdk",
"path": "Wia/src/main/java/io/wia/ManifestInfo.java",
"license": "mit",
"size": 11164
} | [
"android.content.pm.PackageManager"
] | import android.content.pm.PackageManager; | import android.content.pm.*; | [
"android.content"
] | android.content; | 1,277,432 |
public static <T> boolean isNotEmpty(final Collection<T> collection) {
return collection != null && !collection.isEmpty();
} | static <T> boolean function(final Collection<T> collection) { return collection != null && !collection.isEmpty(); } | /**
* Check if collection is not null and not empty
*
* @param collection
* Collection to check
*
* @return empty or not
*/ | Check if collection is not null and not empty | isNotEmpty | {
"repo_name": "sauloperez/sos",
"path": "src/core/api/src/main/java/org/n52/sos/util/CollectionHelper.java",
"license": "apache-2.0",
"size": 11968
} | [
"java.util.Collection"
] | import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 2,037,952 |
boolean onAccept(Client client); | boolean onAccept(Client client); | /**
* Action to perform when a new client is accepted by the server
* @param client - newly accepted client
*/ | Action to perform when a new client is accepted by the server | onAccept | {
"repo_name": "maheshkhanwalkar/NioFlex",
"path": "src/main/java/com/revtekk/nioflex/config/ServerHooks.java",
"license": "apache-2.0",
"size": 694
} | [
"com.revtekk.nioflex.main.Client"
] | import com.revtekk.nioflex.main.Client; | import com.revtekk.nioflex.main.*; | [
"com.revtekk.nioflex"
] | com.revtekk.nioflex; | 634,002 |
@Override
public int hashCode() {
return Objects.hash(typeName, timeZone);
} | int function() { return Objects.hash(typeName, timeZone); } | /**
* Generate the hashCode for this TypeInfo.
*/ | Generate the hashCode for this TypeInfo | hashCode | {
"repo_name": "sankarh/hive",
"path": "serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TimestampLocalTZTypeInfo.java",
"license": "apache-2.0",
"size": 2833
} | [
"java.util.Objects"
] | import java.util.Objects; | import java.util.*; | [
"java.util"
] | java.util; | 982,279 |
public Cursor getAverageRssiByPlace(String placeId) {
String queryString =
"SELECT DISTINCT " + A_BSSI + "," + A_RSSI + "," + A_SSID + " " +
"FROM " + A_TABLE + " " +
"WHERE " + A_PLACE + " = ? " + " " +
"ORDER BY " + A_PLACE + " DESC";
String[] whereArgs = new String[]{
placeId
};
return database.rawQuery(queryString, whereArgs); // iterate to get each value.
} | Cursor function(String placeId) { String queryString = STR + A_BSSI + "," + A_RSSI + "," + A_SSID + " " + STR + A_TABLE + " " + STR + A_PLACE + STR + " " + STR + A_PLACE + STR; String[] whereArgs = new String[]{ placeId }; return database.rawQuery(queryString, whereArgs); } | /**
* Gets average rssi by place.
*
* @param placeId the place id to filter the averages DB
* @return Cursor linking to the average rssi for the queried place
*/ | Gets average rssi by place | getAverageRssiByPlace | {
"repo_name": "chenkel/kontextSmartWatch",
"path": "common/src/main/java/project/context/localization/common/db/DatabaseHelper.java",
"license": "gpl-3.0",
"size": 11396
} | [
"android.database.Cursor"
] | import android.database.Cursor; | import android.database.*; | [
"android.database"
] | android.database; | 1,251,009 |
@Test
public void testUnionOnSchema3Inputs() throws IOException, ParserException {
PigServer pig = new PigServer(ExecType.LOCAL);
String query =
" l1 = load '" + INP_FILE_2NUMS + "' as (i : int, j : int); "
+ "l2 = load '" + INP_FILE_2NUMS + "' as (i : double, x : int); "
+ "l3 = load '" + INP_FILE_2NUM_1CHAR_1BAG + "' as "
+ " (i : long, c : chararray, j : int "
+ ", b : bag { t : tuple (c1 : int, c2 : chararray)} ); "
+ "u = union onschema l1, l2, l3;"
;
Util.registerMultiLineQuery(pig, query);
pig.explain("u", System.out);
Iterator<Tuple> it = pig.openIterator("u");
List<Tuple> expectedRes =
Util.getTuplesFromConstantTupleStrings(
new String[] {
"(1.0,2,null,null,null)",
"(5.0,3,null,null,null)",
"(1.0,null,2,null,null)",
"(5.0,null,3,null,null)",
"(1.0,2,null,'abc',{(1,'a'),(1,'b')})",
"(5.0,3,null,'def',{(2,'a'),(2,'b')})",
});
Util.checkQueryOutputsAfterSort(it, expectedRes);
} | void function() throws IOException, ParserException { PigServer pig = new PigServer(ExecType.LOCAL); String query = STR + INP_FILE_2NUMS + STR + STR + INP_FILE_2NUMS + STR + STR + INP_FILE_2NUM_1CHAR_1BAG + STR + STR + STR + STR ; Util.registerMultiLineQuery(pig, query); pig.explain("u", System.out); Iterator<Tuple> it = pig.openIterator("u"); List<Tuple> expectedRes = Util.getTuplesFromConstantTupleStrings( new String[] { STR, STR, STR, STR, STR, STR, }); Util.checkQueryOutputsAfterSort(it, expectedRes); } | /**
* Test UNION ONSCHEMA on 3 inputs
* @throws IOException
* @throws ParserException
*/ | Test UNION ONSCHEMA on 3 inputs | testUnionOnSchema3Inputs | {
"repo_name": "sigmoidanalytics/spork",
"path": "test/org/apache/pig/test/TestUnionOnSchema.java",
"license": "apache-2.0",
"size": 36185
} | [
"java.io.IOException",
"java.util.Iterator",
"java.util.List",
"org.apache.pig.ExecType",
"org.apache.pig.PigServer",
"org.apache.pig.data.Tuple",
"org.apache.pig.parser.ParserException"
] | import java.io.IOException; import java.util.Iterator; import java.util.List; import org.apache.pig.ExecType; import org.apache.pig.PigServer; import org.apache.pig.data.Tuple; import org.apache.pig.parser.ParserException; | import java.io.*; import java.util.*; import org.apache.pig.*; import org.apache.pig.data.*; import org.apache.pig.parser.*; | [
"java.io",
"java.util",
"org.apache.pig"
] | java.io; java.util; org.apache.pig; | 825,604 |
List<JSModule> createJsModules(
List<String> specs, List<String> jsFiles)
throws FlagUsageException, IOException {
if (isInTestMode()) {
return modulesSupplierForTesting.get();
}
Preconditions.checkState(specs != null);
Preconditions.checkState(!specs.isEmpty());
Preconditions.checkState(jsFiles != null);
List<String> moduleNames = new ArrayList<>(specs.size());
Map<String, JSModule> modulesByName = new LinkedHashMap<>();
Map<String, Integer> modulesFileCountMap = new LinkedHashMap<>();
int numJsFilesExpected = 0, minJsFilesRequired = 0;
boolean isFirstModule = true;
for (String spec : specs) {
// Format is "<name>:<num-js-files>[:[<dep>,...][:]]".
String[] parts = spec.split(":");
if (parts.length < 2 || parts.length > 4) {
throw new FlagUsageException("Expected 2-4 colon-delimited parts in "
+ "module spec: " + spec);
}
// Parse module name.
String name = parts[0];
checkModuleName(name);
if (modulesByName.containsKey(name)) {
throw new FlagUsageException("Duplicate module name: " + name);
}
JSModule module = new JSModule(name);
if (parts.length > 2) {
// Parse module dependencies.
String depList = parts[2];
if (depList.length() > 0) {
String[] deps = depList.split(",");
for (String dep : deps) {
JSModule other = modulesByName.get(dep);
if (other == null) {
throw new FlagUsageException("Module '" + name
+ "' depends on unknown module '" + dep
+ "'. Be sure to list modules in dependency order.");
}
module.addDependency(other);
}
}
}
// Parse module inputs.
int numJsFiles = -1;
try {
numJsFiles = Integer.parseInt(parts[1]);
} catch (NumberFormatException ignored) {
numJsFiles = -1;
}
// We will allow modules of zero input.
if (numJsFiles < 0) {
// A size of 'auto' is only allowed on the base module if
// and it must also be the first module
if (parts.length == 2 && "auto".equals(parts[1])) {
if (isFirstModule) {
numJsFilesExpected = -1;
} else {
throw new FlagUsageException("Invalid JS file count '" + parts[1]
+ "' for module: " + name + ". Only the first module may specify " +
"a size of 'auto' and it must have no dependencies.");
}
} else {
throw new FlagUsageException("Invalid JS file count '" + parts[1]
+ "' for module: " + name);
}
} else {
minJsFilesRequired += numJsFiles;
}
if (numJsFilesExpected >= 0) {
numJsFilesExpected += numJsFiles;
}
// Add modules in reverse order so that source files are allocated to
// modules in reverse order. This allows the first module
// (presumably the base module) to have a size of 'auto'
moduleNames.add(0, name);
modulesFileCountMap.put(name, numJsFiles);
modulesByName.put(name, module);
}
final int totalNumJsFiles = jsFiles.size();
if (numJsFilesExpected >= 0 || minJsFilesRequired > totalNumJsFiles) {
if (minJsFilesRequired > totalNumJsFiles) {
numJsFilesExpected = minJsFilesRequired;
}
if (numJsFilesExpected > totalNumJsFiles) {
throw new FlagUsageException("Not enough JS files specified. Expected "
+ numJsFilesExpected + " but found " + totalNumJsFiles);
} else if (numJsFilesExpected < totalNumJsFiles) {
throw new FlagUsageException("Too many JS files specified. Expected "
+ numJsFilesExpected + " but found " + totalNumJsFiles);
}
}
int numJsFilesLeft = totalNumJsFiles, moduleIndex = 0;
for (String moduleName : moduleNames) {
// Parse module inputs.
int numJsFiles = modulesFileCountMap.get(moduleName);
JSModule module = modulesByName.get(moduleName);
// Check if the first module specified 'auto' for the number of files
if (moduleIndex == moduleNames.size() - 1 && numJsFiles == -1) {
numJsFiles = numJsFilesLeft;
}
List<String> moduleJsFiles =
jsFiles.subList(numJsFilesLeft - numJsFiles, numJsFilesLeft);
for (SourceFile input : createInputs(moduleJsFiles, false)) {
module.add(input);
}
numJsFilesLeft -= numJsFiles;
moduleIndex++;
}
return new ArrayList<>(modulesByName.values());
} | List<JSModule> createJsModules( List<String> specs, List<String> jsFiles) throws FlagUsageException, IOException { if (isInTestMode()) { return modulesSupplierForTesting.get(); } Preconditions.checkState(specs != null); Preconditions.checkState(!specs.isEmpty()); Preconditions.checkState(jsFiles != null); List<String> moduleNames = new ArrayList<>(specs.size()); Map<String, JSModule> modulesByName = new LinkedHashMap<>(); Map<String, Integer> modulesFileCountMap = new LinkedHashMap<>(); int numJsFilesExpected = 0, minJsFilesRequired = 0; boolean isFirstModule = true; for (String spec : specs) { String[] parts = spec.split(":"); if (parts.length < 2 parts.length > 4) { throw new FlagUsageException(STR + STR + spec); } String name = parts[0]; checkModuleName(name); if (modulesByName.containsKey(name)) { throw new FlagUsageException(STR + name); } JSModule module = new JSModule(name); if (parts.length > 2) { String depList = parts[2]; if (depList.length() > 0) { String[] deps = depList.split(","); for (String dep : deps) { JSModule other = modulesByName.get(dep); if (other == null) { throw new FlagUsageException(STR + name + STR + dep + STR); } module.addDependency(other); } } } int numJsFiles = -1; try { numJsFiles = Integer.parseInt(parts[1]); } catch (NumberFormatException ignored) { numJsFiles = -1; } if (numJsFiles < 0) { if (parts.length == 2 && "auto".equals(parts[1])) { if (isFirstModule) { numJsFilesExpected = -1; } else { throw new FlagUsageException(STR + parts[1] + STR + name + STR + STR); } } else { throw new FlagUsageException(STR + parts[1] + STR + name); } } else { minJsFilesRequired += numJsFiles; } if (numJsFilesExpected >= 0) { numJsFilesExpected += numJsFiles; } moduleNames.add(0, name); modulesFileCountMap.put(name, numJsFiles); modulesByName.put(name, module); } final int totalNumJsFiles = jsFiles.size(); if (numJsFilesExpected >= 0 minJsFilesRequired > totalNumJsFiles) { if (minJsFilesRequired > totalNumJsFiles) { numJsFilesExpected = minJsFilesRequired; } if (numJsFilesExpected > totalNumJsFiles) { throw new FlagUsageException(STR + numJsFilesExpected + STR + totalNumJsFiles); } else if (numJsFilesExpected < totalNumJsFiles) { throw new FlagUsageException(STR + numJsFilesExpected + STR + totalNumJsFiles); } } int numJsFilesLeft = totalNumJsFiles, moduleIndex = 0; for (String moduleName : moduleNames) { int numJsFiles = modulesFileCountMap.get(moduleName); JSModule module = modulesByName.get(moduleName); if (moduleIndex == moduleNames.size() - 1 && numJsFiles == -1) { numJsFiles = numJsFilesLeft; } List<String> moduleJsFiles = jsFiles.subList(numJsFilesLeft - numJsFiles, numJsFilesLeft); for (SourceFile input : createInputs(moduleJsFiles, false)) { module.add(input); } numJsFilesLeft -= numJsFiles; moduleIndex++; } return new ArrayList<>(modulesByName.values()); } | /**
* Creates module objects from a list of module specifications.
*
* @param specs A list of module specifications, not null or empty. The spec
* format is: <code>name:num-js-files[:[dep,...][:]]</code>. Module
* names must not contain the ':' character.
* @param jsFiles A list of JS file paths, not null
* @return An array of module objects
*/ | Creates module objects from a list of module specifications | createJsModules | {
"repo_name": "fvigotti/closure-compiler",
"path": "src/com/google/javascript/jscomp/AbstractCommandLineRunner.java",
"license": "apache-2.0",
"size": 69434
} | [
"com.google.common.base.Preconditions",
"java.io.IOException",
"java.util.ArrayList",
"java.util.LinkedHashMap",
"java.util.List",
"java.util.Map"
] | import com.google.common.base.Preconditions; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; | import com.google.common.base.*; import java.io.*; import java.util.*; | [
"com.google.common",
"java.io",
"java.util"
] | com.google.common; java.io; java.util; | 2,271,272 |
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<ApiContractInner> listByService(String resourceGroupName, String serviceName, String gatewayId); | @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<ApiContractInner> listByService(String resourceGroupName, String serviceName, String gatewayId); | /**
* Lists a collection of the APIs associated with a gateway.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param gatewayId Gateway entity identifier. Must be unique in the current API Management service instance. Must
* not have value 'managed'.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return paged Api list representation.
*/ | Lists a collection of the APIs associated with a gateway | listByService | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/apimanagement/azure-resourcemanager-apimanagement/src/main/java/com/azure/resourcemanager/apimanagement/fluent/GatewayApisClient.java",
"license": "mit",
"size": 9316
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.PagedIterable",
"com.azure.resourcemanager.apimanagement.fluent.models.ApiContractInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedIterable; import com.azure.resourcemanager.apimanagement.fluent.models.ApiContractInner; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.apimanagement.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 2,761,532 |
protected void run(PathData pathData) throws IOException {
run(pathData.path);
} | void function(PathData pathData) throws IOException { run(pathData.path); } | /**
* Execute the command on the input path data. Commands can override to make
* use of the resolved filesystem.
* @param pathData The input path with resolved filesystem
* @throws IOException
*/ | Execute the command on the input path data. Commands can override to make use of the resolved filesystem | run | {
"repo_name": "Ethanlm/hadoop",
"path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java",
"license": "apache-2.0",
"size": 16900
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 686,491 |
void openListener() throws Exception {
if (listener.getLocalPort() != -1) { // it's already bound
return;
}
if (listenerStartedExternally) { // Expect that listener was started securely
throw new Exception("Expected webserver's listener to be started " +
"previously but wasn't");
}
int port = listener.getPort();
while (true) {
// jetty has a bug where you can't reopen a listener that previously
// failed to open w/o issuing a close first, even if the port is changed
try {
listener.close();
listener.open();
break;
} catch (BindException ex) {
if (port == 0 || !findPort) {
BindException be = new BindException(
"Port in use: " + listener.getHost() + ":" + listener.getPort());
be.initCause(ex);
throw be;
}
}
// try the next port number
listener.setPort(++port);
Thread.sleep(100);
}
} | void openListener() throws Exception { if (listener.getLocalPort() != -1) { return; } if (listenerStartedExternally) { throw new Exception(STR + STR); } int port = listener.getPort(); while (true) { try { listener.close(); listener.open(); break; } catch (BindException ex) { if (port == 0 !findPort) { BindException be = new BindException( STR + listener.getHost() + ":" + listener.getPort()); be.initCause(ex); throw be; } } listener.setPort(++port); Thread.sleep(100); } } | /**
* Open the main listener for the server
* @throws Exception
*/ | Open the main listener for the server | openListener | {
"repo_name": "linpawslitap/mds_scaling",
"path": "hadoop/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java",
"license": "bsd-3-clause",
"size": 34535
} | [
"java.net.BindException"
] | import java.net.BindException; | import java.net.*; | [
"java.net"
] | java.net; | 1,872,504 |
private boolean isMeasurementScheduleEvent(Zevent event) {
return event instanceof MeasurementScheduleZevent;
} | boolean function(Zevent event) { return event instanceof MeasurementScheduleZevent; } | /**
* Is this a measurement schedule event?
*
* @param event The event.
* @return <code>true</code> if this is a {@link MeasurementScheduleZevent}.
*/ | Is this a measurement schedule event | isMeasurementScheduleEvent | {
"repo_name": "cc14514/hq6",
"path": "hq-server/src/main/java/org/hyperic/hq/measurement/galerts/MeasurementGtrigger.java",
"license": "unlicense",
"size": 35232
} | [
"org.hyperic.hq.measurement.server.session.MeasurementScheduleZevent",
"org.hyperic.hq.zevents.Zevent"
] | import org.hyperic.hq.measurement.server.session.MeasurementScheduleZevent; import org.hyperic.hq.zevents.Zevent; | import org.hyperic.hq.measurement.server.session.*; import org.hyperic.hq.zevents.*; | [
"org.hyperic.hq"
] | org.hyperic.hq; | 236,946 |
public void getEpisodes(DataResponse<ArrayList<Episode>> response, TvShow show, Season season, Context context) ;
| void function(DataResponse<ArrayList<Episode>> response, TvShow show, Season season, Context context) ; | /**
* Gets all Episodes for the specified show and season
* @param response
* @param show
* @param season
* @param context
*/ | Gets all Episodes for the specified show and season | getEpisodes | {
"repo_name": "r00li/RHome",
"path": "Android/RHome/lib-src/org/xbmc/api/business/ITvShowManager.java",
"license": "gpl-3.0",
"size": 2361
} | [
"android.content.Context",
"java.util.ArrayList",
"org.xbmc.api.object.Episode",
"org.xbmc.api.object.Season",
"org.xbmc.api.object.TvShow"
] | import android.content.Context; import java.util.ArrayList; import org.xbmc.api.object.Episode; import org.xbmc.api.object.Season; import org.xbmc.api.object.TvShow; | import android.content.*; import java.util.*; import org.xbmc.api.object.*; | [
"android.content",
"java.util",
"org.xbmc.api"
] | android.content; java.util; org.xbmc.api; | 890,756 |
public ByteBuffer encode( ByteBuffer buffer ) throws EncoderException
{
if ( buffer == null )
{
throw new EncoderException( I18n.err( I18n.ERR_148 ) );
}
try
{
// The HostAddresses SEQ Tag
buffer.put( UniversalTag.SEQUENCE.getValue() );
buffer.put( TLV.getBytes( addressesLength ) );
// The hostAddress list, if it's not empty
if ( ( addresses != null ) && !addresses.isEmpty() )
{
for ( HostAddress hostAddress : addresses )
{
hostAddress.encode( buffer );
}
}
}
catch ( BufferOverflowException boe )
{
LOG.error( I18n.err( I18n.ERR_144, 1 + TLV.getNbBytes( addressesLength )
+ addressesLength, buffer.capacity() ) );
throw new EncoderException( I18n.err( I18n.ERR_138 ), boe );
}
if ( IS_DEBUG )
{
LOG.debug( "HostAddresses encoding : {}", Strings.dumpBytes( buffer.array() ) );
LOG.debug( "HostAddresses initial value : {}", this );
}
return buffer;
} | ByteBuffer function( ByteBuffer buffer ) throws EncoderException { if ( buffer == null ) { throw new EncoderException( I18n.err( I18n.ERR_148 ) ); } try { buffer.put( UniversalTag.SEQUENCE.getValue() ); buffer.put( TLV.getBytes( addressesLength ) ); if ( ( addresses != null ) && !addresses.isEmpty() ) { for ( HostAddress hostAddress : addresses ) { hostAddress.encode( buffer ); } } } catch ( BufferOverflowException boe ) { LOG.error( I18n.err( I18n.ERR_144, 1 + TLV.getNbBytes( addressesLength ) + addressesLength, buffer.capacity() ) ); throw new EncoderException( I18n.err( I18n.ERR_138 ), boe ); } if ( IS_DEBUG ) { LOG.debug( STR, Strings.dumpBytes( buffer.array() ) ); LOG.debug( STR, this ); } return buffer; } | /**
* Encode the HostAddress message to a PDU.
* <pre>
* HostAddress :
*
* 0x30 LL
* 0x30 LL hostaddress[1]
* 0x30 LL hostaddress[1]
* ...
* 0x30 LL hostaddress[1]
* </pre>
* @param buffer The buffer where to put the PDU. It should have been allocated
* before, with the right size.
* @return The constructed PDU.
*/ | Encode the HostAddress message to a PDU. <code> HostAddress : 0x30 LL 0x30 LL hostaddress[1] 0x30 LL hostaddress[1] ... 0x30 LL hostaddress[1] </code> | encode | {
"repo_name": "apache/directory-server",
"path": "kerberos-codec/src/main/java/org/apache/directory/shared/kerberos/components/HostAddresses.java",
"license": "apache-2.0",
"size": 7857
} | [
"java.nio.BufferOverflowException",
"java.nio.ByteBuffer",
"org.apache.directory.api.asn1.EncoderException",
"org.apache.directory.api.asn1.ber.tlv.TLV",
"org.apache.directory.api.asn1.ber.tlv.UniversalTag",
"org.apache.directory.api.util.Strings",
"org.apache.directory.server.i18n.I18n"
] | import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import org.apache.directory.api.asn1.EncoderException; import org.apache.directory.api.asn1.ber.tlv.TLV; import org.apache.directory.api.asn1.ber.tlv.UniversalTag; import org.apache.directory.api.util.Strings; import org.apache.directory.server.i18n.I18n; | import java.nio.*; import org.apache.directory.api.asn1.*; import org.apache.directory.api.asn1.ber.tlv.*; import org.apache.directory.api.util.*; import org.apache.directory.server.i18n.*; | [
"java.nio",
"org.apache.directory"
] | java.nio; org.apache.directory; | 276,575 |
public void doSort(RunData data)
{
SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid());
// we are changing the sort, so start from the first page again
resetPaging(state);
setupSort(data, data.getParameters().getString("criteria"));
} | void function(RunData data) { SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid()); resetPaging(state); setupSort(data, data.getParameters().getString(STR)); } | /**
* Sort based on the given property
*/ | Sort based on the given property | doSort | {
"repo_name": "lorenamgUMU/sakai",
"path": "assignment/assignment-tool/tool/src/java/org/sakaiproject/assignment/tool/AssignmentAction.java",
"license": "apache-2.0",
"size": 677150
} | [
"org.sakaiproject.cheftool.JetspeedRunData",
"org.sakaiproject.cheftool.RunData",
"org.sakaiproject.event.api.SessionState"
] | import org.sakaiproject.cheftool.JetspeedRunData; import org.sakaiproject.cheftool.RunData; import org.sakaiproject.event.api.SessionState; | import org.sakaiproject.cheftool.*; import org.sakaiproject.event.api.*; | [
"org.sakaiproject.cheftool",
"org.sakaiproject.event"
] | org.sakaiproject.cheftool; org.sakaiproject.event; | 868,660 |
public void write(DataInputBuffer key, DataInputBuffer value)
throws IOException {
assert (key != null && value != null);
if (fileCache.isActive()) {
fileCache.write(key, value);
return;
}
if (memCache.reserveSpace(key, value)) {
memCache.write(key, value);
} else {
fileCache.activate();
fileCache.write(key, value);
}
} | void function(DataInputBuffer key, DataInputBuffer value) throws IOException { assert (key != null && value != null); if (fileCache.isActive()) { fileCache.write(key, value); return; } if (memCache.reserveSpace(key, value)) { memCache.write(key, value); } else { fileCache.activate(); fileCache.write(key, value); } } | /**
* Write the given K,V to the cache.
* Write to memcache if space is available, else write to the filecache
* @param key
* @param value
* @throws IOException
*/ | Write the given K,V to the cache. Write to memcache if space is available, else write to the filecache | write | {
"repo_name": "dennishuo/hadoop",
"path": "hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/BackupStore.java",
"license": "apache-2.0",
"size": 18966
} | [
"java.io.IOException",
"org.apache.hadoop.io.DataInputBuffer"
] | import java.io.IOException; import org.apache.hadoop.io.DataInputBuffer; | import java.io.*; import org.apache.hadoop.io.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 1,285,959 |
public List<String> getAdditionalResources() {
return m_additionalResources;
} | List<String> function() { return m_additionalResources; } | /**
* Returns the VFS resources to be exported additionally with the module.<p>
*
* @return the VFS resources to be exported additionally with the module
*/ | Returns the VFS resources to be exported additionally with the module | getAdditionalResources | {
"repo_name": "ggiudetti/opencms-core",
"path": "src/org/opencms/module/CmsModuleImportExportHandler.java",
"license": "lgpl-2.1",
"size": 27102
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,856,454 |
private void addMenuItems(JMenu menu) {
Locale locale = getRootPane().getLocale();
menu.setToolTipText(//"窗口相关操作.");
UIManager.getString("BETitlePane.titleMenuToolTipText", getLocale()));
JMenuItem mi;
int mnemonic;
if (getWindowDecorationStyle() == JRootPane.FRAME)//! 只有frame才有这些菜单项
{
mi = menu.add(restoreAction);
mnemonic = BEUtils.getInt("MetalTitlePane.restoreMnemonic", -1);
if (mnemonic != -1)
mi.setMnemonic(mnemonic);
mi = menu.add(iconifyAction);
mnemonic = BEUtils.getInt("MetalTitlePane.iconifyMnemonic", -1);
if (mnemonic != -1)
mi.setMnemonic(mnemonic);
if (Toolkit.getDefaultToolkit().isFrameStateSupported(
Frame.MAXIMIZED_BOTH)) {
mi = menu.add(maximizeAction);
mnemonic = BEUtils.getInt("MetalTitlePane.maximizeMnemonic",
-1);
if (mnemonic != -1)
mi.setMnemonic(mnemonic);
}
menu.add(new JSeparator());
}
mi = menu.add(closeAction);
mnemonic = BEUtils.getInt("MetalTitlePane.closeMnemonic", -1);
if (mnemonic != -1)
mi.setMnemonic(mnemonic);
} | void function(JMenu menu) { Locale locale = getRootPane().getLocale(); menu.setToolTipText( UIManager.getString(STR, getLocale())); JMenuItem mi; int mnemonic; if (getWindowDecorationStyle() == JRootPane.FRAME) { mi = menu.add(restoreAction); mnemonic = BEUtils.getInt(STR, -1); if (mnemonic != -1) mi.setMnemonic(mnemonic); mi = menu.add(iconifyAction); mnemonic = BEUtils.getInt(STR, -1); if (mnemonic != -1) mi.setMnemonic(mnemonic); if (Toolkit.getDefaultToolkit().isFrameStateSupported( Frame.MAXIMIZED_BOTH)) { mi = menu.add(maximizeAction); mnemonic = BEUtils.getInt(STR, -1); if (mnemonic != -1) mi.setMnemonic(mnemonic); } menu.add(new JSeparator()); } mi = menu.add(closeAction); mnemonic = BEUtils.getInt(STR, -1); if (mnemonic != -1) mi.setMnemonic(mnemonic); } | /**
* Adds the necessary <code>JMenuItem</code>s to the passed in menu.
*
* @param menu the menu
*/ | Adds the necessary <code>JMenuItem</code>s to the passed in menu | addMenuItems | {
"repo_name": "mclauncher/HMCL",
"path": "HMCLaF/src/main/java/org/jackhuang/hmcl/laf/titlepane/BETitlePane.java",
"license": "gpl-3.0",
"size": 43248
} | [
"java.awt.Frame",
"java.awt.Toolkit",
"java.util.Locale",
"javax.swing.JMenu",
"javax.swing.JMenuItem",
"javax.swing.JRootPane",
"javax.swing.JSeparator",
"javax.swing.UIManager",
"org.jackhuang.hmcl.laf.BEUtils"
] | import java.awt.Frame; import java.awt.Toolkit; import java.util.Locale; import javax.swing.JMenu; import javax.swing.JMenuItem; import javax.swing.JRootPane; import javax.swing.JSeparator; import javax.swing.UIManager; import org.jackhuang.hmcl.laf.BEUtils; | import java.awt.*; import java.util.*; import javax.swing.*; import org.jackhuang.hmcl.laf.*; | [
"java.awt",
"java.util",
"javax.swing",
"org.jackhuang.hmcl"
] | java.awt; java.util; javax.swing; org.jackhuang.hmcl; | 687,121 |
public int article() throws IOException
{
return sendCommand(NNTPCommand.ARTICLE);
} | int function() throws IOException { return sendCommand(NNTPCommand.ARTICLE); } | /***
* A convenience method to send the NNTP ARTICLE command to the server,
* receive the initial reply, and return the reply code.
* <p>
* @return The reply code received from the server.
* @throws NNTPConnectionClosedException
* If the NNTP server prematurely closes the connection as a result
* of the client being idle or some other reason causing the server
* to send NNTP reply code 400. This exception may be caught either
* as an IOException or independently as itself.
* @throws IOException If an I/O error occurs while either sending the
* command or receiving the server reply.
***/ | A convenience method to send the NNTP ARTICLE command to the server, receive the initial reply, and return the reply code. | article | {
"repo_name": "grtlinux/KIEA_JAVA7",
"path": "KIEA_JAVA7/src/tain/kr/com/commons/net/v01/nntp/NNTP.java",
"license": "gpl-3.0",
"size": 42708
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 502,420 |
public boolean onKey(View v, int keyCode, KeyEvent event) {
try {
// skip keys if we aren't connected yet or have been disconnected
if (bridge.isDisconnected() || bridge.transport == null)
return false;
final boolean interpretAsHardKeyboard = deviceHasHardKeyboard &&
!manager.hardKeyboardHidden;
final boolean rightModifiersAreSlashAndTab = interpretAsHardKeyboard &&
PreferenceConstants.KEYMODE_RIGHT.equals(keymode);
final boolean leftModifiersAreSlashAndTab = interpretAsHardKeyboard &&
PreferenceConstants.KEYMODE_LEFT.equals(keymode);
final boolean volumeKeysChangeFontSize = true;
final boolean shiftedNumbersAreFKeys = interpretAsHardKeyboard;
final boolean controlNumbersAreFKeys = !interpretAsHardKeyboard;
// Ignore all key-up events except for the special keys
if (event.getAction() == KeyEvent.ACTION_UP) {
if (rightModifiersAreSlashAndTab) {
if (keyCode == KeyEvent.KEYCODE_ALT_RIGHT
&& (ourMetaState & OUR_SLASH) != 0) {
ourMetaState &= ~OUR_TRANSIENT;
bridge.transport.write('/');
return true;
} else if (keyCode == KeyEvent.KEYCODE_SHIFT_RIGHT
&& (ourMetaState & OUR_TAB) != 0) {
ourMetaState &= ~OUR_TRANSIENT;
bridge.transport.write(0x09);
return true;
}
} else if (leftModifiersAreSlashAndTab) {
if (keyCode == KeyEvent.KEYCODE_ALT_LEFT
&& (ourMetaState & OUR_SLASH) != 0) {
ourMetaState &= ~OUR_TRANSIENT;
bridge.transport.write('/');
return true;
} else if (keyCode == KeyEvent.KEYCODE_SHIFT_LEFT
&& (ourMetaState & OUR_TAB) != 0) {
ourMetaState &= ~OUR_TRANSIENT;
bridge.transport.write(0x09);
return true;
}
}
return false;
}
if (volumeKeysChangeFontSize) {
if (keyCode == KeyEvent.KEYCODE_VOLUME_UP) {
bridge.increaseFontSize();
return true;
} else if(keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
bridge.decreaseFontSize();
return true;
}
}
bridge.resetScrollPosition();
// Handle potentially multi-character IME input.
if (keyCode == KeyEvent.KEYCODE_UNKNOWN &&
event.getAction() == KeyEvent.ACTION_MULTIPLE) {
byte[] input = event.getCharacters().getBytes(encoding);
bridge.transport.write(input);
return true;
}
/// Handle alt and shift keys if they aren't repeating
if (event.getRepeatCount() == 0) {
if (rightModifiersAreSlashAndTab) {
switch (keyCode) {
case KeyEvent.KEYCODE_ALT_RIGHT:
ourMetaState |= OUR_SLASH;
return true;
case KeyEvent.KEYCODE_SHIFT_RIGHT:
ourMetaState |= OUR_TAB;
return true;
case KeyEvent.KEYCODE_SHIFT_LEFT:
metaPress(OUR_SHIFT_ON);
return true;
case KeyEvent.KEYCODE_ALT_LEFT:
metaPress(OUR_ALT_ON);
return true;
}
} else if (leftModifiersAreSlashAndTab) {
switch (keyCode) {
case KeyEvent.KEYCODE_ALT_LEFT:
ourMetaState |= OUR_SLASH;
return true;
case KeyEvent.KEYCODE_SHIFT_LEFT:
ourMetaState |= OUR_TAB;
return true;
case KeyEvent.KEYCODE_SHIFT_RIGHT:
metaPress(OUR_SHIFT_ON);
return true;
case KeyEvent.KEYCODE_ALT_RIGHT:
metaPress(OUR_ALT_ON);
return true;
}
} else {
switch (keyCode) {
case KeyEvent.KEYCODE_ALT_LEFT:
case KeyEvent.KEYCODE_ALT_RIGHT:
metaPress(OUR_ALT_ON);
return true;
case KeyEvent.KEYCODE_SHIFT_LEFT:
case KeyEvent.KEYCODE_SHIFT_RIGHT:
metaPress(OUR_SHIFT_ON);
return true;
}
}
}
if (keyCode == KeyEvent.KEYCODE_DPAD_CENTER) {
if (selectingForCopy) {
if (selectionArea.isSelectingOrigin())
selectionArea.finishSelectingOrigin();
else {
if (clipboard != null) {
// copy selected area to clipboard
String copiedText = selectionArea.copyFrom(buffer);
clipboard.setText(copiedText);
// XXX STOPSHIP
// manager.notifyUser(manager.getString(
// R.string.console_copy_done,
// copiedText.length()));
selectingForCopy = false;
selectionArea.reset();
}
}
} else {
if ((ourMetaState & OUR_CTRL_ON) != 0) {
sendEscape();
ourMetaState &= ~OUR_CTRL_ON;
} else
metaPress(OUR_CTRL_ON);
}
bridge.redraw();
return true;
}
int derivedMetaState = event.getMetaState();
if ((ourMetaState & OUR_SHIFT_MASK) != 0)
derivedMetaState |= KeyEvent.META_SHIFT_ON;
if ((ourMetaState & OUR_ALT_MASK) != 0)
derivedMetaState |= KeyEvent.META_ALT_ON;
if ((ourMetaState & OUR_CTRL_MASK) != 0)
derivedMetaState |= HC_META_CTRL_ON;
if ((ourMetaState & OUR_TRANSIENT) != 0) {
ourMetaState &= ~OUR_TRANSIENT;
bridge.redraw();
}
// Test for modified numbers becoming function keys
if (shiftedNumbersAreFKeys && (derivedMetaState & KeyEvent.META_SHIFT_ON) != 0) {
if (sendFunctionKey(keyCode))
return true;
}
if (controlNumbersAreFKeys && (derivedMetaState & HC_META_CTRL_ON) != 0) {
if (sendFunctionKey(keyCode))
return true;
}
// Ask the system to use the keymap to give us the unicode character for this key,
// with our derived modifier state applied.
int uchar = event.getUnicodeChar(derivedMetaState & ~HC_META_CTRL_ON);
int ucharWithoutAlt = event.getUnicodeChar(
derivedMetaState & ~(HC_META_ALT_MASK | HC_META_CTRL_ON));
if (uchar != ucharWithoutAlt) {
// The alt key was used to modify the character returned; therefore, drop the alt
// modifier from the state so we don't end up sending alt+key.
derivedMetaState &= ~HC_META_ALT_MASK;
}
// Remove shift from the modifier state as it has already been used by getUnicodeChar.
derivedMetaState &= ~KeyEvent.META_SHIFT_ON;
if ((uchar & KeyCharacterMap.COMBINING_ACCENT) != 0) {
mDeadKey = uchar & KeyCharacterMap.COMBINING_ACCENT_MASK;
return true;
}
if (mDeadKey != 0) {
uchar = KeyCharacterMap.getDeadChar(mDeadKey, keyCode);
mDeadKey = 0;
}
// If we have a defined non-control character
if (uchar >= 0x20) {
if ((derivedMetaState & HC_META_CTRL_ON) != 0)
uchar = keyAsControl(uchar);
if ((derivedMetaState & KeyEvent.META_ALT_ON) != 0)
sendEscape();
if (uchar < 0x80)
bridge.transport.write(uchar);
else
// TODO write encoding routine that doesn't allocate each time
bridge.transport.write(new String(Character.toChars(uchar))
.getBytes(encoding));
return true;
}
// look for special chars
switch(keyCode) {
case KEYCODE_ESCAPE:
sendEscape();
return true;
case KeyEvent.KEYCODE_TAB:
bridge.transport.write(0x09);
return true;
case KeyEvent.KEYCODE_CAMERA:
// check to see which shortcut the camera button triggers
String camera = manager.prefs.getString(
PreferenceConstants.CAMERA,
PreferenceConstants.CAMERA_CTRLA_SPACE);
if(PreferenceConstants.CAMERA_CTRLA_SPACE.equals(camera)) {
bridge.transport.write(0x01);
bridge.transport.write(' ');
} else if(PreferenceConstants.CAMERA_CTRLA.equals(camera)) {
bridge.transport.write(0x01);
} else if(PreferenceConstants.CAMERA_ESC.equals(camera)) {
((vt320)buffer).keyTyped(vt320.KEY_ESCAPE, ' ', 0);
} else if(PreferenceConstants.CAMERA_ESC_A.equals(camera)) {
((vt320)buffer).keyTyped(vt320.KEY_ESCAPE, ' ', 0);
bridge.transport.write('a');
}
break;
case KeyEvent.KEYCODE_DEL:
((vt320) buffer).keyPressed(vt320.KEY_BACK_SPACE, ' ',
getStateForBuffer());
return true;
case KeyEvent.KEYCODE_ENTER:
((vt320)buffer).keyTyped(vt320.KEY_ENTER, ' ', 0);
return true;
case KeyEvent.KEYCODE_DPAD_LEFT:
if (selectingForCopy) {
selectionArea.decrementColumn();
bridge.redraw();
} else {
((vt320) buffer).keyPressed(vt320.KEY_LEFT, ' ',
getStateForBuffer());
bridge.tryKeyVibrate();
}
return true;
case KeyEvent.KEYCODE_DPAD_UP:
if (selectingForCopy) {
selectionArea.decrementRow();
bridge.redraw();
} else {
((vt320) buffer).keyPressed(vt320.KEY_UP, ' ',
getStateForBuffer());
bridge.tryKeyVibrate();
}
return true;
case KeyEvent.KEYCODE_DPAD_DOWN:
if (selectingForCopy) {
selectionArea.incrementRow();
bridge.redraw();
} else {
((vt320) buffer).keyPressed(vt320.KEY_DOWN, ' ',
getStateForBuffer());
bridge.tryKeyVibrate();
}
return true;
case KeyEvent.KEYCODE_DPAD_RIGHT:
if (selectingForCopy) {
selectionArea.incrementColumn();
bridge.redraw();
} else {
((vt320) buffer).keyPressed(vt320.KEY_RIGHT, ' ',
getStateForBuffer());
bridge.tryKeyVibrate();
}
return true;
}
} catch (IOException e) {
Log.e(TAG, "Problem while trying to handle an onKey() event", e);
try {
bridge.transport.flush();
} catch (IOException ioe) {
Log.d(TAG, "Our transport was closed, dispatching disconnect event");
bridge.dispatchDisconnect(false);
}
} catch (NullPointerException npe) {
Log.d(TAG, "Input before connection established ignored.");
return true;
}
return false;
} | boolean function(View v, int keyCode, KeyEvent event) { try { if (bridge.isDisconnected() bridge.transport == null) return false; final boolean interpretAsHardKeyboard = deviceHasHardKeyboard && !manager.hardKeyboardHidden; final boolean rightModifiersAreSlashAndTab = interpretAsHardKeyboard && PreferenceConstants.KEYMODE_RIGHT.equals(keymode); final boolean leftModifiersAreSlashAndTab = interpretAsHardKeyboard && PreferenceConstants.KEYMODE_LEFT.equals(keymode); final boolean volumeKeysChangeFontSize = true; final boolean shiftedNumbersAreFKeys = interpretAsHardKeyboard; final boolean controlNumbersAreFKeys = !interpretAsHardKeyboard; if (event.getAction() == KeyEvent.ACTION_UP) { if (rightModifiersAreSlashAndTab) { if (keyCode == KeyEvent.KEYCODE_ALT_RIGHT && (ourMetaState & OUR_SLASH) != 0) { ourMetaState &= ~OUR_TRANSIENT; bridge.transport.write('/'); return true; } else if (keyCode == KeyEvent.KEYCODE_SHIFT_RIGHT && (ourMetaState & OUR_TAB) != 0) { ourMetaState &= ~OUR_TRANSIENT; bridge.transport.write(0x09); return true; } } else if (leftModifiersAreSlashAndTab) { if (keyCode == KeyEvent.KEYCODE_ALT_LEFT && (ourMetaState & OUR_SLASH) != 0) { ourMetaState &= ~OUR_TRANSIENT; bridge.transport.write('/'); return true; } else if (keyCode == KeyEvent.KEYCODE_SHIFT_LEFT && (ourMetaState & OUR_TAB) != 0) { ourMetaState &= ~OUR_TRANSIENT; bridge.transport.write(0x09); return true; } } return false; } if (volumeKeysChangeFontSize) { if (keyCode == KeyEvent.KEYCODE_VOLUME_UP) { bridge.increaseFontSize(); return true; } else if(keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) { bridge.decreaseFontSize(); return true; } } bridge.resetScrollPosition(); if (keyCode == KeyEvent.KEYCODE_UNKNOWN && event.getAction() == KeyEvent.ACTION_MULTIPLE) { byte[] input = event.getCharacters().getBytes(encoding); bridge.transport.write(input); return true; } if (event.getRepeatCount() == 0) { if (rightModifiersAreSlashAndTab) { switch (keyCode) { case KeyEvent.KEYCODE_ALT_RIGHT: ourMetaState = OUR_SLASH; return true; case KeyEvent.KEYCODE_SHIFT_RIGHT: ourMetaState = OUR_TAB; return true; case KeyEvent.KEYCODE_SHIFT_LEFT: metaPress(OUR_SHIFT_ON); return true; case KeyEvent.KEYCODE_ALT_LEFT: metaPress(OUR_ALT_ON); return true; } } else if (leftModifiersAreSlashAndTab) { switch (keyCode) { case KeyEvent.KEYCODE_ALT_LEFT: ourMetaState = OUR_SLASH; return true; case KeyEvent.KEYCODE_SHIFT_LEFT: ourMetaState = OUR_TAB; return true; case KeyEvent.KEYCODE_SHIFT_RIGHT: metaPress(OUR_SHIFT_ON); return true; case KeyEvent.KEYCODE_ALT_RIGHT: metaPress(OUR_ALT_ON); return true; } } else { switch (keyCode) { case KeyEvent.KEYCODE_ALT_LEFT: case KeyEvent.KEYCODE_ALT_RIGHT: metaPress(OUR_ALT_ON); return true; case KeyEvent.KEYCODE_SHIFT_LEFT: case KeyEvent.KEYCODE_SHIFT_RIGHT: metaPress(OUR_SHIFT_ON); return true; } } } if (keyCode == KeyEvent.KEYCODE_DPAD_CENTER) { if (selectingForCopy) { if (selectionArea.isSelectingOrigin()) selectionArea.finishSelectingOrigin(); else { if (clipboard != null) { String copiedText = selectionArea.copyFrom(buffer); clipboard.setText(copiedText); selectingForCopy = false; selectionArea.reset(); } } } else { if ((ourMetaState & OUR_CTRL_ON) != 0) { sendEscape(); ourMetaState &= ~OUR_CTRL_ON; } else metaPress(OUR_CTRL_ON); } bridge.redraw(); return true; } int derivedMetaState = event.getMetaState(); if ((ourMetaState & OUR_SHIFT_MASK) != 0) derivedMetaState = KeyEvent.META_SHIFT_ON; if ((ourMetaState & OUR_ALT_MASK) != 0) derivedMetaState = KeyEvent.META_ALT_ON; if ((ourMetaState & OUR_CTRL_MASK) != 0) derivedMetaState = HC_META_CTRL_ON; if ((ourMetaState & OUR_TRANSIENT) != 0) { ourMetaState &= ~OUR_TRANSIENT; bridge.redraw(); } if (shiftedNumbersAreFKeys && (derivedMetaState & KeyEvent.META_SHIFT_ON) != 0) { if (sendFunctionKey(keyCode)) return true; } if (controlNumbersAreFKeys && (derivedMetaState & HC_META_CTRL_ON) != 0) { if (sendFunctionKey(keyCode)) return true; } int uchar = event.getUnicodeChar(derivedMetaState & ~HC_META_CTRL_ON); int ucharWithoutAlt = event.getUnicodeChar( derivedMetaState & ~(HC_META_ALT_MASK HC_META_CTRL_ON)); if (uchar != ucharWithoutAlt) { derivedMetaState &= ~HC_META_ALT_MASK; } derivedMetaState &= ~KeyEvent.META_SHIFT_ON; if ((uchar & KeyCharacterMap.COMBINING_ACCENT) != 0) { mDeadKey = uchar & KeyCharacterMap.COMBINING_ACCENT_MASK; return true; } if (mDeadKey != 0) { uchar = KeyCharacterMap.getDeadChar(mDeadKey, keyCode); mDeadKey = 0; } if (uchar >= 0x20) { if ((derivedMetaState & HC_META_CTRL_ON) != 0) uchar = keyAsControl(uchar); if ((derivedMetaState & KeyEvent.META_ALT_ON) != 0) sendEscape(); if (uchar < 0x80) bridge.transport.write(uchar); else bridge.transport.write(new String(Character.toChars(uchar)) .getBytes(encoding)); return true; } switch(keyCode) { case KEYCODE_ESCAPE: sendEscape(); return true; case KeyEvent.KEYCODE_TAB: bridge.transport.write(0x09); return true; case KeyEvent.KEYCODE_CAMERA: String camera = manager.prefs.getString( PreferenceConstants.CAMERA, PreferenceConstants.CAMERA_CTRLA_SPACE); if(PreferenceConstants.CAMERA_CTRLA_SPACE.equals(camera)) { bridge.transport.write(0x01); bridge.transport.write(' '); } else if(PreferenceConstants.CAMERA_CTRLA.equals(camera)) { bridge.transport.write(0x01); } else if(PreferenceConstants.CAMERA_ESC.equals(camera)) { ((vt320)buffer).keyTyped(vt320.KEY_ESCAPE, ' ', 0); } else if(PreferenceConstants.CAMERA_ESC_A.equals(camera)) { ((vt320)buffer).keyTyped(vt320.KEY_ESCAPE, ' ', 0); bridge.transport.write('a'); } break; case KeyEvent.KEYCODE_DEL: ((vt320) buffer).keyPressed(vt320.KEY_BACK_SPACE, ' ', getStateForBuffer()); return true; case KeyEvent.KEYCODE_ENTER: ((vt320)buffer).keyTyped(vt320.KEY_ENTER, ' ', 0); return true; case KeyEvent.KEYCODE_DPAD_LEFT: if (selectingForCopy) { selectionArea.decrementColumn(); bridge.redraw(); } else { ((vt320) buffer).keyPressed(vt320.KEY_LEFT, ' ', getStateForBuffer()); bridge.tryKeyVibrate(); } return true; case KeyEvent.KEYCODE_DPAD_UP: if (selectingForCopy) { selectionArea.decrementRow(); bridge.redraw(); } else { ((vt320) buffer).keyPressed(vt320.KEY_UP, ' ', getStateForBuffer()); bridge.tryKeyVibrate(); } return true; case KeyEvent.KEYCODE_DPAD_DOWN: if (selectingForCopy) { selectionArea.incrementRow(); bridge.redraw(); } else { ((vt320) buffer).keyPressed(vt320.KEY_DOWN, ' ', getStateForBuffer()); bridge.tryKeyVibrate(); } return true; case KeyEvent.KEYCODE_DPAD_RIGHT: if (selectingForCopy) { selectionArea.incrementColumn(); bridge.redraw(); } else { ((vt320) buffer).keyPressed(vt320.KEY_RIGHT, ' ', getStateForBuffer()); bridge.tryKeyVibrate(); } return true; } } catch (IOException e) { Log.e(TAG, STR, e); try { bridge.transport.flush(); } catch (IOException ioe) { Log.d(TAG, STR); bridge.dispatchDisconnect(false); } } catch (NullPointerException npe) { Log.d(TAG, STR); return true; } return false; } | /**
* Handle onKey() events coming down from a {@link TerminalView} above us.
* Modify the keys to make more sense to a host then pass it to the transport.
*/ | Handle onKey() events coming down from a <code>TerminalView</code> above us. Modify the keys to make more sense to a host then pass it to the transport | onKey | {
"repo_name": "gertcuykens/connectbot",
"path": "src/org/connectbot/service/TerminalKeyListener.java",
"license": "apache-2.0",
"size": 16123
} | [
"android.util.Log",
"android.view.KeyCharacterMap",
"android.view.KeyEvent",
"android.view.View",
"java.io.IOException",
"org.connectbot.util.PreferenceConstants"
] | import android.util.Log; import android.view.KeyCharacterMap; import android.view.KeyEvent; import android.view.View; import java.io.IOException; import org.connectbot.util.PreferenceConstants; | import android.util.*; import android.view.*; import java.io.*; import org.connectbot.util.*; | [
"android.util",
"android.view",
"java.io",
"org.connectbot.util"
] | android.util; android.view; java.io; org.connectbot.util; | 566,926 |
public int refreshNodes() throws IOException {
int exitCode = -1;
if (!(fs instanceof DistributedFileSystem)) {
System.err.println("FileSystem is " + fs.getUri());
return exitCode;
}
DistributedFileSystem dfs = (DistributedFileSystem) fs;
dfs.refreshNodes();
exitCode = 0;
return exitCode;
} | int function() throws IOException { int exitCode = -1; if (!(fs instanceof DistributedFileSystem)) { System.err.println(STR + fs.getUri()); return exitCode; } DistributedFileSystem dfs = (DistributedFileSystem) fs; dfs.refreshNodes(); exitCode = 0; return exitCode; } | /**
* Command to ask the namenode to reread the hosts and excluded hosts
* file.
* Usage: java DFSAdmin -refreshNodes
* @exception IOException
*/ | Command to ask the namenode to reread the hosts and excluded hosts file. Usage: java DFSAdmin -refreshNodes | refreshNodes | {
"repo_name": "hanhlh/hadoop-0.20.2_FatBTree",
"path": "src/hdfs/org/apache/hadoop/hdfs/tools/DFSAdmin.java",
"license": "apache-2.0",
"size": 31876
} | [
"java.io.IOException",
"org.apache.hadoop.hdfs.DistributedFileSystem"
] | import java.io.IOException; import org.apache.hadoop.hdfs.DistributedFileSystem; | import java.io.*; import org.apache.hadoop.hdfs.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 2,010,756 |
public AggregateDefinition aggregateController(AggregateController aggregateController) {
setAggregateController(aggregateController);
return this;
}
// Section - Methods from ExpressionNode
// Needed to copy methods from ExpressionNode here so that I could specify the
// correlation expression as optional in JAXB | AggregateDefinition function(AggregateController aggregateController) { setAggregateController(aggregateController); return this; } | /**
* To use a {@link org.apache.camel.processor.aggregate.AggregateController} to allow external sources to control
* this aggregator.
*/ | To use a <code>org.apache.camel.processor.aggregate.AggregateController</code> to allow external sources to control this aggregator | aggregateController | {
"repo_name": "pkletsko/camel",
"path": "camel-core/src/main/java/org/apache/camel/model/AggregateDefinition.java",
"license": "apache-2.0",
"size": 46049
} | [
"org.apache.camel.processor.aggregate.AggregateController"
] | import org.apache.camel.processor.aggregate.AggregateController; | import org.apache.camel.processor.aggregate.*; | [
"org.apache.camel"
] | org.apache.camel; | 1,993,884 |
public void updateRenderView() {
int currentScreenRotation = getWindowManager().getDefaultDisplay()
.getRotation();
if (currentScreenRotation != mLastScreenRotation) {
// Set projection matrix if there is already a valid one:
if (QCAR.isInitialized()
&& (mAppStatus == APPSTATUS_CAMERA_RUNNING)) {
DebugLog.LOGD("TextReco::updateRenderView");
// Query display dimensions:
storeScreenDimensions();
// Update viewport via renderer:
mRenderer.updateRendering(mScreenWidth, mScreenHeight);
// Update projection matrix:
setProjectionMatrix();
// Cache last rotation used for setting projection matrix:
mLastScreenRotation = currentScreenRotation;
}
}
} | void function() { int currentScreenRotation = getWindowManager().getDefaultDisplay() .getRotation(); if (currentScreenRotation != mLastScreenRotation) { if (QCAR.isInitialized() && (mAppStatus == APPSTATUS_CAMERA_RUNNING)) { DebugLog.LOGD(STR); storeScreenDimensions(); mRenderer.updateRendering(mScreenWidth, mScreenHeight); setProjectionMatrix(); mLastScreenRotation = currentScreenRotation; } } } | /**
* Updates projection matrix and viewport after a screen rotation change was
* detected.
*/ | Updates projection matrix and viewport after a screen rotation change was detected | updateRenderView | {
"repo_name": "varunkumar/aRed",
"path": "mobile/src/com/codered/ared/TextReco.java",
"license": "mit",
"size": 36697
} | [
"com.qualcomm.QCAR"
] | import com.qualcomm.QCAR; | import com.qualcomm.*; | [
"com.qualcomm"
] | com.qualcomm; | 1,001,892 |
public static Object[] joinArray(Object[] o1, Object[] o2) {
if (isArrayEmpty(o1)) {
return o2;
}
if (isArrayEmpty(o2)) {
return o1;
}
Class<?> type1 = o1[0].getClass();
Class<?> type2 = o2[0].getClass();
if (!type1.equals(type2)) {
throw new IllegalArgumentException(Messages.getString(
Message.ARRAY_TYPE_UNMATCH,
new Object[] { type1.toString(), type2.toString() }));
}
Object[] result = (Object[]) Array
.newInstance(type1, o1.length + o2.length);
System.arraycopy(o1, 0, result, 0, o1.length);
System.arraycopy(o2, 0, result, o1.length, o2.length);
return result;
} | static Object[] function(Object[] o1, Object[] o2) { if (isArrayEmpty(o1)) { return o2; } if (isArrayEmpty(o2)) { return o1; } Class<?> type1 = o1[0].getClass(); Class<?> type2 = o2[0].getClass(); if (!type1.equals(type2)) { throw new IllegalArgumentException(Messages.getString( Message.ARRAY_TYPE_UNMATCH, new Object[] { type1.toString(), type2.toString() })); } Object[] result = (Object[]) Array .newInstance(type1, o1.length + o2.length); System.arraycopy(o1, 0, result, 0, o1.length); System.arraycopy(o2, 0, result, o1.length, o2.length); return result; } | /**
* Join specified arrays to a new array
*
* @param o1
* the fist array to join
* @param o2
* the second array to join
* @return a new array contains all elements in specified arrays
*/ | Join specified arrays to a new array | joinArray | {
"repo_name": "elminsterjimmy/java",
"path": "Commons/src/main/java/com/elminster/common/util/ObjectUtil.java",
"license": "apache-2.0",
"size": 14824
} | [
"com.elminster.common.util.Messages",
"java.lang.reflect.Array"
] | import com.elminster.common.util.Messages; import java.lang.reflect.Array; | import com.elminster.common.util.*; import java.lang.reflect.*; | [
"com.elminster.common",
"java.lang"
] | com.elminster.common; java.lang; | 744,879 |
public void updatePersistedGlobalMetadata(String wizardDataId, GlobalMetadata globalMetadata)
throws DataRetrievalFailureException; | void function(String wizardDataId, GlobalMetadata globalMetadata) throws DataRetrievalFailureException; | /**
* Updated the information corresponding to the given globalMetadata object.
*
* @param wizardDataId The id of the corresponding WizardData object.
* @param globalMetadata The globalMetadata object to update.
* @throws DataRetrievalFailureException If unable to update persisted GlobalMetadata object.
*/ | Updated the information corresponding to the given globalMetadata object | updatePersistedGlobalMetadata | {
"repo_name": "Unidata/rosetta",
"path": "src/main/java/edu/ucar/unidata/rosetta/repository/wizard/GlobalMetadataDao.java",
"license": "bsd-3-clause",
"size": 3395
} | [
"edu.ucar.unidata.rosetta.domain.GlobalMetadata",
"org.springframework.dao.DataRetrievalFailureException"
] | import edu.ucar.unidata.rosetta.domain.GlobalMetadata; import org.springframework.dao.DataRetrievalFailureException; | import edu.ucar.unidata.rosetta.domain.*; import org.springframework.dao.*; | [
"edu.ucar.unidata",
"org.springframework.dao"
] | edu.ucar.unidata; org.springframework.dao; | 1,564,875 |
protected void init()
{
sLog.fine("Executing init");
// values might be EL expression
String connectionNameConfig = PersistenceConfig.getPropertyValue("mcs.connection");
if (connectionNameConfig!=null)
{
connectionName = AdfmfJavaUtilities.evaluateELExpression(connectionNameConfig).toString();
sLog.fine("MCS default connectionName="+connectionName);
}
String anonymousKeyConfig = PersistenceConfig.getPropertyValue("mcs.anonymous-key");
if (anonymousKeyConfig!=null)
{
anonymousKey = AdfmfJavaUtilities.evaluateELExpression(anonymousKeyConfig).toString();
sLog.fine("MCS default anonymousKey="+anonymousKey);
}
String mobileBackendIdConfig = PersistenceConfig.getPropertyValue("mcs.mobile-backend-id");
if (mobileBackendIdConfig!=null)
{
mobileBackendId = AdfmfJavaUtilities.evaluateELExpression(mobileBackendIdConfig).toString();
sLog.fine("MCS default mobileBackendId="+mobileBackendId);
}
} | void function() { sLog.fine(STR); String connectionNameConfig = PersistenceConfig.getPropertyValue(STR); if (connectionNameConfig!=null) { connectionName = AdfmfJavaUtilities.evaluateELExpression(connectionNameConfig).toString(); sLog.fine(STR+connectionName); } String anonymousKeyConfig = PersistenceConfig.getPropertyValue(STR); if (anonymousKeyConfig!=null) { anonymousKey = AdfmfJavaUtilities.evaluateELExpression(anonymousKeyConfig).toString(); sLog.fine(STR+anonymousKey); } String mobileBackendIdConfig = PersistenceConfig.getPropertyValue(STR); if (mobileBackendIdConfig!=null) { mobileBackendId = AdfmfJavaUtilities.evaluateELExpression(mobileBackendIdConfig).toString(); sLog.fine(STR+mobileBackendId); } } | /**
* Initialize connectionName, mobileBackendId and anonymousAccessKey from
* mobile-persistence-config.properties. EL expressions are allowed for these
* values.
*/ | Initialize connectionName, mobileBackendId and anonymousAccessKey from mobile-persistence-config.properties. EL expressions are allowed for these values | init | {
"repo_name": "oracle/mobile-persistence",
"path": "Projects/Framework/Runtime/src/oracle/ateam/sample/mobile/v2/persistence/manager/MCSPersistenceManager.java",
"license": "mit",
"size": 30853
} | [
"oracle.adfmf.framework.api.AdfmfJavaUtilities",
"oracle.ateam.sample.mobile.v2.persistence.metadata.PersistenceConfig"
] | import oracle.adfmf.framework.api.AdfmfJavaUtilities; import oracle.ateam.sample.mobile.v2.persistence.metadata.PersistenceConfig; | import oracle.adfmf.framework.api.*; import oracle.ateam.sample.mobile.v2.persistence.metadata.*; | [
"oracle.adfmf.framework",
"oracle.ateam.sample"
] | oracle.adfmf.framework; oracle.ateam.sample; | 2,689,988 |
@ApiModelProperty(required = false, value = "generate pdf compliant with PDF/A-3A ISO 19005-3")
@JsonProperty("pdfaCompliant")
public Boolean getPdfaCompliant() {
return pdfaCompliant;
} | @ApiModelProperty(required = false, value = STR) @JsonProperty(STR) Boolean function() { return pdfaCompliant; } | /**
* generate pdf compliant with PDF/A-3A ISO 19005-3
**/ | generate pdf compliant with PDF/A-3A ISO 19005-3 | getPdfaCompliant | {
"repo_name": "viavansi/documents-sdk-java",
"path": "src/main/java/com/viafirma/documents/sdk/java/model/Document.java",
"license": "gpl-3.0",
"size": 9606
} | [
"com.fasterxml.jackson.annotation.JsonProperty",
"com.wordnik.swagger.annotations.ApiModelProperty"
] | import com.fasterxml.jackson.annotation.JsonProperty; import com.wordnik.swagger.annotations.ApiModelProperty; | import com.fasterxml.jackson.annotation.*; import com.wordnik.swagger.annotations.*; | [
"com.fasterxml.jackson",
"com.wordnik.swagger"
] | com.fasterxml.jackson; com.wordnik.swagger; | 2,673,042 |
public List<Structure> fetchData(BlenderInputStream inputStream) throws BlenderFileException {
if (oldMemoryAddress == 0) {
throw new NullPointerException("The pointer points to nothing!");
}
List<Structure> structures = null;
FileBlockHeader dataFileBlock = blenderContext.getFileBlock(oldMemoryAddress);
if (dataFileBlock == null) {
throw new BlenderFileException("No data stored for address: " + oldMemoryAddress + ". Rarely blender makes mistakes when storing data. Try resaving the model after making minor changes. This usually helps.");
}
if (pointerLevel > 1) {
int pointersAmount = dataFileBlock.getSize() / inputStream.getPointerSize() * dataFileBlock.getCount();
for (int i = 0; i < pointersAmount; ++i) {
inputStream.setPosition(dataFileBlock.getBlockPosition() + inputStream.getPointerSize() * i);
long oldMemoryAddress = inputStream.readPointer();
if (oldMemoryAddress != 0L) {
Pointer p = new Pointer(pointerLevel - 1, this.function, blenderContext);
p.oldMemoryAddress = oldMemoryAddress;
if (structures == null) {
structures = p.fetchData(inputStream);
} else {
structures.addAll(p.fetchData(inputStream));
}
} else {
// it is necessary to put null's if the pointer is null, ie. in materials array that is attached to the mesh, the index
// of the material is important, that is why we need null's to indicate that some materials' slots are empty
if (structures == null) {
structures = new ArrayList<Structure>();
}
structures.add(null);
}
}
} else {
inputStream.setPosition(dataFileBlock.getBlockPosition());
structures = new ArrayList<Structure>(dataFileBlock.getCount());
for (int i = 0; i < dataFileBlock.getCount(); ++i) {
Structure structure = blenderContext.getDnaBlockData().getStructure(dataFileBlock.getSdnaIndex());
structure.fill(inputStream);
structures.add(structure);
}
return structures;
}
return structures;
}
| List<Structure> function(BlenderInputStream inputStream) throws BlenderFileException { if (oldMemoryAddress == 0) { throw new NullPointerException(STR); } List<Structure> structures = null; FileBlockHeader dataFileBlock = blenderContext.getFileBlock(oldMemoryAddress); if (dataFileBlock == null) { throw new BlenderFileException(STR + oldMemoryAddress + STR); } if (pointerLevel > 1) { int pointersAmount = dataFileBlock.getSize() / inputStream.getPointerSize() * dataFileBlock.getCount(); for (int i = 0; i < pointersAmount; ++i) { inputStream.setPosition(dataFileBlock.getBlockPosition() + inputStream.getPointerSize() * i); long oldMemoryAddress = inputStream.readPointer(); if (oldMemoryAddress != 0L) { Pointer p = new Pointer(pointerLevel - 1, this.function, blenderContext); p.oldMemoryAddress = oldMemoryAddress; if (structures == null) { structures = p.fetchData(inputStream); } else { structures.addAll(p.fetchData(inputStream)); } } else { if (structures == null) { structures = new ArrayList<Structure>(); } structures.add(null); } } } else { inputStream.setPosition(dataFileBlock.getBlockPosition()); structures = new ArrayList<Structure>(dataFileBlock.getCount()); for (int i = 0; i < dataFileBlock.getCount(); ++i) { Structure structure = blenderContext.getDnaBlockData().getStructure(dataFileBlock.getSdnaIndex()); structure.fill(inputStream); structures.add(structure); } return structures; } return structures; } | /**
* This method fetches the data stored under the given address.
* @param inputStream
* the stream we read data from
* @return the data read from the file
* @throws BlenderFileException
* this exception is thrown when the blend file structure is somehow invalid or corrupted
*/ | This method fetches the data stored under the given address | fetchData | {
"repo_name": "chototsu/MikuMikuStudio",
"path": "engine/src/blender/com/jme3/scene/plugins/blender/file/Pointer.java",
"license": "bsd-2-clause",
"size": 7867
} | [
"com.jme3.scene.plugins.blender.exceptions.BlenderFileException",
"java.util.ArrayList",
"java.util.List"
] | import com.jme3.scene.plugins.blender.exceptions.BlenderFileException; import java.util.ArrayList; import java.util.List; | import com.jme3.scene.plugins.blender.exceptions.*; import java.util.*; | [
"com.jme3.scene",
"java.util"
] | com.jme3.scene; java.util; | 1,936,010 |
public T caseArchimateConcept(IArchimateConcept object) {
return null;
}
| T function(IArchimateConcept object) { return null; } | /**
* Returns the result of interpreting the object as an instance of '<em>Concept</em>'.
* <!-- begin-user-doc -->
* This implementation returns null;
* returning a non-null result will terminate the switch.
* <!-- end-user-doc -->
* @param object the target of the switch.
* @return the result of interpreting the object as an instance of '<em>Concept</em>'.
* @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject)
* @generated
*/ | Returns the result of interpreting the object as an instance of 'Concept'. This implementation returns null; returning a non-null result will terminate the switch. | caseArchimateConcept | {
"repo_name": "archimatetool/archi",
"path": "com.archimatetool.model/src/com/archimatetool/model/util/ArchimateSwitch.java",
"license": "mit",
"size": 256079
} | [
"com.archimatetool.model.IArchimateConcept"
] | import com.archimatetool.model.IArchimateConcept; | import com.archimatetool.model.*; | [
"com.archimatetool.model"
] | com.archimatetool.model; | 2,036,820 |
public void getSearch() {
if (getSelectedRow() >= 0) {
long id = Long.parseLong(table.getText(getSelectedRow(), 2));
Main.get().mainPanel.search.searchBrowser.searchResult.getSearch(data.get(id));
}
} | void function() { if (getSelectedRow() >= 0) { long id = Long.parseLong(table.getText(getSelectedRow(), 2)); Main.get().mainPanel.search.searchBrowser.searchResult.getSearch(data.get(id)); } } | /**
* Gets a search
*/ | Gets a search | getSearch | {
"repo_name": "codelibs/n2dms",
"path": "src/main/java/com/openkm/frontend/client/widget/searchsaved/SearchSaved.java",
"license": "gpl-2.0",
"size": 7985
} | [
"com.openkm.frontend.client.Main"
] | import com.openkm.frontend.client.Main; | import com.openkm.frontend.client.*; | [
"com.openkm.frontend"
] | com.openkm.frontend; | 1,910,278 |
public void disconnect() {
if (mConnectThread == null)
throw new NullPointerException(
"BluetoothSocket is not initialised!");
try {
unregisterReceiver();
mIsConnected = false;
mIsConnecting = false;
mBluetoothSocket.close();
} catch (IOException e) {
e.printStackTrace();
}
onBtcDisconnected();
}
/**
* Initiates listening of data from the connected remote BT Classic device.
* Whenever data is being retrieved the {@link #onBtcDataRead(byte[])} | void function() { if (mConnectThread == null) throw new NullPointerException( STR); try { unregisterReceiver(); mIsConnected = false; mIsConnecting = false; mBluetoothSocket.close(); } catch (IOException e) { e.printStackTrace(); } onBtcDisconnected(); } /** * Initiates listening of data from the connected remote BT Classic device. * Whenever data is being retrieved the {@link #onBtcDataRead(byte[])} | /**
* Will cancel an in-progress connection, and close the socket
*/ | Will cancel an in-progress connection, and close the socket | disconnect | {
"repo_name": "KyriakosAlexandrou/Bluetooth-Toolkit",
"path": "app/src/main/java/com/kyriakosalexandrou/bluetoothtoolkit/managers/BtcBaseDeviceManager.java",
"license": "mit",
"size": 14285
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 156,854 |
private Criterion constructNameCriterion(String columnName,
List<String> include, List<String> exclude) {
Conjunction and = Restrictions.conjunction();
if (!include.isEmpty()) {
Disjunction or = Restrictions.disjunction();
for (String rule : include) {
String value = rule.replace("*", "");
or.add(Restrictions
.ilike(columnName, value, getMatchMode(rule)));
}
and.add(or);
}
if (!exclude.isEmpty()) {
for (String rule : exclude) {
String value = rule.replace("*", "");
and.add(Restrictions.not(Restrictions.ilike(columnName, value,
getMatchMode(rule))));
}
}
return and;
} | Criterion function(String columnName, List<String> include, List<String> exclude) { Conjunction and = Restrictions.conjunction(); if (!include.isEmpty()) { Disjunction or = Restrictions.disjunction(); for (String rule : include) { String value = rule.replace("*", STR*STR"); and.add(Restrictions.not(Restrictions.ilike(columnName, value, getMatchMode(rule)))); } } return and; } | /**
* Construct name criterion.
*
* @param columnName
* the column name
* @param include
* the list with include patterns
* @param exclude
* the list with exclude patterns
* @return the criterion
*/ | Construct name criterion | constructNameCriterion | {
"repo_name": "SirmaITT/conservation-space-1.7.0",
"path": "docker/sep-alfresco/alfresco-emf-integration/alfresco-integration-migration/src/main/java/com/sirma/itt/migration/register/FileRegisterServiceImpl.java",
"license": "lgpl-3.0",
"size": 15520
} | [
"java.util.List",
"org.hibernate.criterion.Conjunction",
"org.hibernate.criterion.Criterion",
"org.hibernate.criterion.Disjunction",
"org.hibernate.criterion.Restrictions"
] | import java.util.List; import org.hibernate.criterion.Conjunction; import org.hibernate.criterion.Criterion; import org.hibernate.criterion.Disjunction; import org.hibernate.criterion.Restrictions; | import java.util.*; import org.hibernate.criterion.*; | [
"java.util",
"org.hibernate.criterion"
] | java.util; org.hibernate.criterion; | 2,889,522 |
List<FeatureBin> timeBins = this.get(mzTrace);
timeBins.remove(index);
timeBins.add(timeBin);
super.put(mzTrace, timeBins);
} | List<FeatureBin> timeBins = this.get(mzTrace); timeBins.remove(index); timeBins.add(timeBin); super.put(mzTrace, timeBins); } | /**
* Updates an existing time bin with a new time bin.
*
* @param mzTrace the m/z trace key
* @param timeBin the new time bin
* @param index the index of the old time bin
*/ | Updates an existing time bin with a new time bin | add | {
"repo_name": "tomas-pluskal/masscascade",
"path": "MassCascadeCore/src/main/java/uk/ac/ebi/masscascade/alignment/featurebins/FeatureMap.java",
"license": "gpl-3.0",
"size": 2369
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,361,272 |
protected void deregisterWindow(Window window) {
if (getRoot() instanceof Flow)
((Flow) getRoot()).deregisterWindow(window);
} | void function(Window window) { if (getRoot() instanceof Flow) ((Flow) getRoot()).deregisterWindow(window); } | /**
* Deregisters the window with the flow.
*
* @param window the window
*/ | Deregisters the window with the flow | deregisterWindow | {
"repo_name": "waikato-datamining/adams-base",
"path": "adams-core/src/main/java/adams/flow/source/AbstractInteractiveSource.java",
"license": "gpl-3.0",
"size": 10099
} | [
"java.awt.Window"
] | import java.awt.Window; | import java.awt.*; | [
"java.awt"
] | java.awt; | 2,416,896 |
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof DateAxis)) {
return false;
}
DateAxis that = (DateAxis) obj;
if (!ObjectUtilities.equal(this.tickUnit, that.tickUnit)) {
return false;
}
if (!ObjectUtilities.equal(this.dateFormatOverride,
that.dateFormatOverride)) {
return false;
}
if (!ObjectUtilities.equal(this.tickMarkPosition,
that.tickMarkPosition)) {
return false;
}
if (!ObjectUtilities.equal(this.timeline, that.timeline)) {
return false;
}
if (!super.equals(obj)) {
return false;
}
return true;
} | boolean function(Object obj) { if (obj == this) { return true; } if (!(obj instanceof DateAxis)) { return false; } DateAxis that = (DateAxis) obj; if (!ObjectUtilities.equal(this.tickUnit, that.tickUnit)) { return false; } if (!ObjectUtilities.equal(this.dateFormatOverride, that.dateFormatOverride)) { return false; } if (!ObjectUtilities.equal(this.tickMarkPosition, that.tickMarkPosition)) { return false; } if (!ObjectUtilities.equal(this.timeline, that.timeline)) { return false; } if (!super.equals(obj)) { return false; } return true; } | /**
* Tests this axis for equality with an arbitrary object.
*
* @param obj the object (<code>null</code> permitted).
*
* @return A boolean.
*/ | Tests this axis for equality with an arbitrary object | equals | {
"repo_name": "ibestvina/multithread-centiscape",
"path": "CentiScaPe2.1/src/main/java/org/jfree/chart/axis/DateAxis.java",
"license": "mit",
"size": 66431
} | [
"org.jfree.util.ObjectUtilities"
] | import org.jfree.util.ObjectUtilities; | import org.jfree.util.*; | [
"org.jfree.util"
] | org.jfree.util; | 2,456,116 |
String providerName = "schoolchapters";
OAuthProperties oauthProperties = new OAuthProperties();
oauthProperties
.setAccessTokenUrl("https://vigrior.schoolchapters.com/oauth/access_token");
oauthProperties.setApplicationName("API-Test");
oauthProperties
.setAuthorizeUrl("https://vigrior.schoolchapters.com/oauth/authorize");
oauthProperties
.setRequestTokenUrl("https://vigrior.schoolchapters.com/oauth/request_token");
AuthStore authStore = new AuthStore("user1", "schoolchapters",
"0TMbbIx0CcLzEQqrINWM5oPAPUse9IHhR3USNuZj",
"QdtIrDaqftj1Xi9AQDmURsbQBO9UFkDYIuXSskdF");
} | String providerName = STR; OAuthProperties oauthProperties = new OAuthProperties(); oauthProperties .setAccessTokenUrl(STRAPI-TestSTRhttps: oauthProperties .setRequestTokenUrl(STRuser1", STR, "0TMbbIx0CcLzEQqrINWM5oPAPUse9IHhR3USNuZjSTRQdtIrDaqftj1Xi9AQDmURsbQBO9UFkDYIuXSskdF"); } | /**
* Disable this test if it causes any error in the future
*/ | Disable this test if it causes any error in the future | testSchoolChapters | {
"repo_name": "wfuedu/Inotado",
"path": "inotado-impl/impl/src/test/edu/wfu/inotado/OAuthHelperTest.java",
"license": "apache-2.0",
"size": 2049
} | [
"edu.wfu.inotado.api.OAuthProperties"
] | import edu.wfu.inotado.api.OAuthProperties; | import edu.wfu.inotado.api.*; | [
"edu.wfu.inotado"
] | edu.wfu.inotado; | 2,471,420 |
public static Set<XMethod> resolveMethodCallTargets(ReferenceType receiverType, InvokeInstruction invokeInstruction,
ConstantPoolGen cpg, boolean receiverTypeIsExact) throws ClassNotFoundException {
if (invokeInstruction.getOpcode() == Constants.INVOKESTATIC)
throw new IllegalArgumentException();
String methodName = invokeInstruction.getName(cpg);
String methodSig = invokeInstruction.getSignature(cpg);
// Array method calls aren't virtual.
// They should just resolve to Object methods.
if (receiverType instanceof ArrayType)
try {
return Util.emptyOrNonnullSingleton(getXClass(objectDescriptor).findMethod(methodName, methodSig, false));
} catch (CheckedAnalysisException e) {
return Collections.<XMethod> emptySet();
}
if (receiverType instanceof ObjectType) {
// Get the receiver class.
String receiverClassName = ((ObjectType) receiverType).getClassName();
return resolveVirtualMethodCallTargets(receiverClassName, methodName, methodSig, receiverTypeIsExact,
invokeInstruction instanceof INVOKESPECIAL);
}
assert receiverType instanceof NullType;
return Collections.<XMethod> emptySet();
} | static Set<XMethod> function(ReferenceType receiverType, InvokeInstruction invokeInstruction, ConstantPoolGen cpg, boolean receiverTypeIsExact) throws ClassNotFoundException { if (invokeInstruction.getOpcode() == Constants.INVOKESTATIC) throw new IllegalArgumentException(); String methodName = invokeInstruction.getName(cpg); String methodSig = invokeInstruction.getSignature(cpg); if (receiverType instanceof ArrayType) try { return Util.emptyOrNonnullSingleton(getXClass(objectDescriptor).findMethod(methodName, methodSig, false)); } catch (CheckedAnalysisException e) { return Collections.<XMethod> emptySet(); } if (receiverType instanceof ObjectType) { String receiverClassName = ((ObjectType) receiverType).getClassName(); return resolveVirtualMethodCallTargets(receiverClassName, methodName, methodSig, receiverTypeIsExact, invokeInstruction instanceof INVOKESPECIAL); } assert receiverType instanceof NullType; return Collections.<XMethod> emptySet(); } | /**
* Resolve possible instance method call targets.
*
* @param receiverType
* type of the receiver object
* @param invokeInstruction
* the InvokeInstruction
* @param cpg
* the ConstantPoolGen
* @param receiverTypeIsExact
* if true, the receiver type is known exactly, which should
* allow a precise result
* @return Set of methods which might be called
* @throws ClassNotFoundException
*/ | Resolve possible instance method call targets | resolveMethodCallTargets | {
"repo_name": "OpenNTF/FindBug-for-Domino-Designer",
"path": "findBugsEclipsePlugin/src/edu/umd/cs/findbugs/ba/Hierarchy2.java",
"license": "lgpl-3.0",
"size": 20493
} | [
"edu.umd.cs.findbugs.ba.type.NullType",
"edu.umd.cs.findbugs.classfile.CheckedAnalysisException",
"edu.umd.cs.findbugs.util.Util",
"java.util.Collections",
"java.util.Set",
"org.apache.bcel.Constants",
"org.apache.bcel.generic.ArrayType",
"org.apache.bcel.generic.ConstantPoolGen",
"org.apache.bcel.generic.InvokeInstruction",
"org.apache.bcel.generic.ObjectType",
"org.apache.bcel.generic.ReferenceType"
] | import edu.umd.cs.findbugs.ba.type.NullType; import edu.umd.cs.findbugs.classfile.CheckedAnalysisException; import edu.umd.cs.findbugs.util.Util; import java.util.Collections; import java.util.Set; import org.apache.bcel.Constants; import org.apache.bcel.generic.ArrayType; import org.apache.bcel.generic.ConstantPoolGen; import org.apache.bcel.generic.InvokeInstruction; import org.apache.bcel.generic.ObjectType; import org.apache.bcel.generic.ReferenceType; | import edu.umd.cs.findbugs.ba.type.*; import edu.umd.cs.findbugs.classfile.*; import edu.umd.cs.findbugs.util.*; import java.util.*; import org.apache.bcel.*; import org.apache.bcel.generic.*; | [
"edu.umd.cs",
"java.util",
"org.apache.bcel"
] | edu.umd.cs; java.util; org.apache.bcel; | 691,674 |
public void setLocation(final Point location) {
setBounds(new Rectangle(location.x, location.y, bounds.width, bounds.height));
} | void function(final Point location) { setBounds(new Rectangle(location.x, location.y, bounds.width, bounds.height)); } | /**
* Sets the location of the drawing.
*
* @param location Location.
*/ | Sets the location of the drawing | setLocation | {
"repo_name": "debrief/debrief",
"path": "org.mwc.debrief.pepys/src/main/java/org/mwc/debrief/pepys/nebula/AbstractRenderer.java",
"license": "epl-1.0",
"size": 6217
} | [
"org.eclipse.swt.graphics.Point",
"org.eclipse.swt.graphics.Rectangle"
] | import org.eclipse.swt.graphics.Point; import org.eclipse.swt.graphics.Rectangle; | import org.eclipse.swt.graphics.*; | [
"org.eclipse.swt"
] | org.eclipse.swt; | 23,089 |
@Test(timeout = 300000)
public void testVerifyListReplicatedTable() throws Exception {
LOG.info("testVerifyListReplicatedTable");
final String tName = "VerifyListReplicated_";
final String colFam = "cf1";
final int numOfTables = 3;
HBaseAdmin hadmin = new HBaseAdmin(conf1);
// Create Tables
for (int i = 0; i < numOfTables; i++) {
HTableDescriptor ht = new HTableDescriptor(TableName.valueOf(tName + i));
HColumnDescriptor cfd = new HColumnDescriptor(colFam);
cfd.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
ht.addFamily(cfd);
hadmin.createTable(ht);
}
// verify the result
List<HashMap<String, String>> replicationColFams = admin.listReplicated();
int[] match = new int[numOfTables]; // array of 3 with init value of zero
for (int i = 0; i < replicationColFams.size(); i++) {
HashMap<String, String> replicationEntry = replicationColFams.get(i);
String tn = replicationEntry.get(ReplicationAdmin.TNAME);
if ((tn.startsWith(tName)) && replicationEntry.get(ReplicationAdmin.CFNAME).equals(colFam)) {
int m = Integer.parseInt(tn.substring(tn.length() - 1)); // get the last digit
match[m]++; // should only increase once
}
}
// check the matching result
for (int i = 0; i < match.length; i++) {
assertTrue("listReplicated() does not match table " + i, (match[i] == 1));
}
// drop tables
for (int i = 0; i < numOfTables; i++) {
String ht = tName + i;
hadmin.disableTable(ht);
hadmin.deleteTable(ht);
}
hadmin.close();
} | @Test(timeout = 300000) void function() throws Exception { LOG.info(STR); final String tName = STR; final String colFam = "cf1"; final int numOfTables = 3; HBaseAdmin hadmin = new HBaseAdmin(conf1); for (int i = 0; i < numOfTables; i++) { HTableDescriptor ht = new HTableDescriptor(TableName.valueOf(tName + i)); HColumnDescriptor cfd = new HColumnDescriptor(colFam); cfd.setScope(HConstants.REPLICATION_SCOPE_GLOBAL); ht.addFamily(cfd); hadmin.createTable(ht); } List<HashMap<String, String>> replicationColFams = admin.listReplicated(); int[] match = new int[numOfTables]; for (int i = 0; i < replicationColFams.size(); i++) { HashMap<String, String> replicationEntry = replicationColFams.get(i); String tn = replicationEntry.get(ReplicationAdmin.TNAME); if ((tn.startsWith(tName)) && replicationEntry.get(ReplicationAdmin.CFNAME).equals(colFam)) { int m = Integer.parseInt(tn.substring(tn.length() - 1)); match[m]++; } } for (int i = 0; i < match.length; i++) { assertTrue(STR + i, (match[i] == 1)); } for (int i = 0; i < numOfTables; i++) { String ht = tName + i; hadmin.disableTable(ht); hadmin.deleteTable(ht); } hadmin.close(); } | /**
* Test for HBASE-8663
* Create two new Tables with colfamilies enabled for replication then run
* ReplicationAdmin.listReplicated(). Finally verify the table:colfamilies. Note:
* TestReplicationAdmin is a better place for this testing but it would need mocks.
* @throws Exception
*/ | Test for HBASE-8663 Create two new Tables with colfamilies enabled for replication then run ReplicationAdmin.listReplicated(). Finally verify the table:colfamilies. Note: TestReplicationAdmin is a better place for this testing but it would need mocks | testVerifyListReplicatedTable | {
"repo_name": "Jackygq1982/hbase_src",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java",
"license": "apache-2.0",
"size": 18202
} | [
"java.util.HashMap",
"java.util.List",
"org.apache.hadoop.hbase.HColumnDescriptor",
"org.apache.hadoop.hbase.HConstants",
"org.apache.hadoop.hbase.HTableDescriptor",
"org.apache.hadoop.hbase.TableName",
"org.apache.hadoop.hbase.client.HBaseAdmin",
"org.apache.hadoop.hbase.client.replication.ReplicationAdmin",
"org.junit.Assert",
"org.junit.Test"
] | import java.util.HashMap; import java.util.List; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; import org.junit.Assert; import org.junit.Test; | import java.util.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.client.replication.*; import org.junit.*; | [
"java.util",
"org.apache.hadoop",
"org.junit"
] | java.util; org.apache.hadoop; org.junit; | 593,020 |
public Config getConfig()
{
try
{
return Config.getConfigByName("tester.properties");
}
catch(Exception e)
{
return null ;
}
} | Config function() { try { return Config.getConfigByName(STR); } catch(Exception e) { return null ; } } | /**
* Returns the config.
* @return Config
*/ | Returns the config | getConfig | {
"repo_name": "yongs2/mts-project",
"path": "mts/src/main/java/com/devoteam/srit/xmlloader/core/Tester.java",
"license": "gpl-3.0",
"size": 4566
} | [
"com.devoteam.srit.xmlloader.core.utils.Config"
] | import com.devoteam.srit.xmlloader.core.utils.Config; | import com.devoteam.srit.xmlloader.core.utils.*; | [
"com.devoteam.srit"
] | com.devoteam.srit; | 2,617,140 |
@Test
public void testRequestCache1UsingThreadIsolation() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SuccessfulCacheableCommand<String> command1 = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "A");
SuccessfulCacheableCommand<String> command2 = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "A");
assertTrue(command1.isCommandRunningInThread());
Future<String> f1 = command1.observe().toBlocking().toFuture();
Future<String> f2 = command2.observe().toBlocking().toFuture();
try {
assertEquals("A", f1.get());
assertEquals("A", f2.get());
} catch (Exception e) {
throw new RuntimeException(e);
}
assertTrue(command1.executed);
// the second one should not have executed as it should have received the cached value instead
assertFalse(command2.executed);
assertCommandExecutionEvents(command1, HystrixEventType.EMIT, HystrixEventType.SUCCESS);
assertTrue(command1.getExecutionTimeInMilliseconds() > -1);
assertFalse(command1.isResponseFromCache());
assertNull(command1.getExecutionException());
// the execution log for command2 should show it came from cache
assertCommandExecutionEvents(command2, HystrixEventType.EMIT, HystrixEventType.SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE);
assertTrue(command2.getExecutionTimeInMilliseconds() == -1);
assertTrue(command2.isResponseFromCache());
assertNull(command2.getExecutionException());
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(2);
} | void function() { TestCircuitBreaker circuitBreaker = new TestCircuitBreaker(); SuccessfulCacheableCommand<String> command1 = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "A"); SuccessfulCacheableCommand<String> command2 = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "A"); assertTrue(command1.isCommandRunningInThread()); Future<String> f1 = command1.observe().toBlocking().toFuture(); Future<String> f2 = command2.observe().toBlocking().toFuture(); try { assertEquals("A", f1.get()); assertEquals("A", f2.get()); } catch (Exception e) { throw new RuntimeException(e); } assertTrue(command1.executed); assertFalse(command2.executed); assertCommandExecutionEvents(command1, HystrixEventType.EMIT, HystrixEventType.SUCCESS); assertTrue(command1.getExecutionTimeInMilliseconds() > -1); assertFalse(command1.isResponseFromCache()); assertNull(command1.getExecutionException()); assertCommandExecutionEvents(command2, HystrixEventType.EMIT, HystrixEventType.SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE); assertTrue(command2.getExecutionTimeInMilliseconds() == -1); assertTrue(command2.isResponseFromCache()); assertNull(command2.getExecutionException()); assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount()); assertSaneHystrixRequestLog(2); } | /**
* Test Request scoped caching of commands so that a 2nd duplicate call doesn't execute but returns the previous Future
*/ | Test Request scoped caching of commands so that a 2nd duplicate call doesn't execute but returns the previous Future | testRequestCache1UsingThreadIsolation | {
"repo_name": "sasrin/Hystrix",
"path": "hystrix-core/src/test/java/com/netflix/hystrix/HystrixObservableCommandTest.java",
"license": "apache-2.0",
"size": 272384
} | [
"com.netflix.hystrix.HystrixCircuitBreakerTest",
"java.util.concurrent.Future",
"org.junit.Assert"
] | import com.netflix.hystrix.HystrixCircuitBreakerTest; import java.util.concurrent.Future; import org.junit.Assert; | import com.netflix.hystrix.*; import java.util.concurrent.*; import org.junit.*; | [
"com.netflix.hystrix",
"java.util",
"org.junit"
] | com.netflix.hystrix; java.util; org.junit; | 2,069,766 |
private void checkPreload() throws Exception {
assert cacheMode == PARTITIONED;
startUp();
// Perform writes.
Collection<IgfsFile> files = write();
// Check sizes.
Map<UUID, Integer> expSizes = new HashMap<>(GRID_CNT, 1.0f);
for (IgfsFile file : files) {
for (IgfsBlock block : file.blocks()) {
Collection<UUID> ids = primaryOrBackups(block.key());
for (UUID id : ids) {
if (expSizes.get(id) == null)
expSizes.put(id, block.length());
else
expSizes.put(id, expSizes.get(id) + block.length());
}
}
}
info("Size map before node start: " + expSizes);
for (int i = 0; i < GRID_CNT; i++) {
UUID id = grid(i).localNode().id();
GridCacheAdapter<IgfsBlockKey, byte[]> cache = cache(id);
int expSize = expSizes.get(id) != null ? expSizes.get(id) : 0;
assertEquals(expSize, cache.igfsDataSpaceUsed());
}
Ignite g = startGrid(GRID_CNT);
info("Started grid: " + g.cluster().localNode().id());
// Wait partitions are evicted.
awaitPartitionMapExchange();
// Check sizes again.
expSizes.clear();
for (IgfsFile file : files) {
for (IgfsBlock block : file.blocks()) {
Collection<UUID> ids = primaryOrBackups(block.key());
assert !ids.isEmpty();
for (UUID id : ids) {
if (expSizes.get(id) == null)
expSizes.put(id, block.length());
else
expSizes.put(id, expSizes.get(id) + block.length());
}
}
}
info("Size map after node start: " + expSizes);
for (int i = 0; i < GRID_CNT - 1; i++) {
UUID id = grid(i).localNode().id();
GridCacheAdapter<IgfsBlockKey, byte[]> cache = cache(id);
int expSize = expSizes.get(id) != null ? expSizes.get(id) : 0;
assertEquals("For node: " + id, expSize, cache.igfsDataSpaceUsed());
}
} | void function() throws Exception { assert cacheMode == PARTITIONED; startUp(); Collection<IgfsFile> files = write(); Map<UUID, Integer> expSizes = new HashMap<>(GRID_CNT, 1.0f); for (IgfsFile file : files) { for (IgfsBlock block : file.blocks()) { Collection<UUID> ids = primaryOrBackups(block.key()); for (UUID id : ids) { if (expSizes.get(id) == null) expSizes.put(id, block.length()); else expSizes.put(id, expSizes.get(id) + block.length()); } } } info(STR + expSizes); for (int i = 0; i < GRID_CNT; i++) { UUID id = grid(i).localNode().id(); GridCacheAdapter<IgfsBlockKey, byte[]> cache = cache(id); int expSize = expSizes.get(id) != null ? expSizes.get(id) : 0; assertEquals(expSize, cache.igfsDataSpaceUsed()); } Ignite g = startGrid(GRID_CNT); info(STR + g.cluster().localNode().id()); awaitPartitionMapExchange(); expSizes.clear(); for (IgfsFile file : files) { for (IgfsBlock block : file.blocks()) { Collection<UUID> ids = primaryOrBackups(block.key()); assert !ids.isEmpty(); for (UUID id : ids) { if (expSizes.get(id) == null) expSizes.put(id, block.length()); else expSizes.put(id, expSizes.get(id) + block.length()); } } } info(STR + expSizes); for (int i = 0; i < GRID_CNT - 1; i++) { UUID id = grid(i).localNode().id(); GridCacheAdapter<IgfsBlockKey, byte[]> cache = cache(id); int expSize = expSizes.get(id) != null ? expSizes.get(id) : 0; assertEquals(STR + id, expSize, cache.igfsDataSpaceUsed()); } } | /**
* Ensure that IGFS size is correctly updated in case of preloading.
*
* @throws Exception If failed.
*/ | Ensure that IGFS size is correctly updated in case of preloading | checkPreload | {
"repo_name": "WilliamDo/ignite",
"path": "modules/core/src/test/java/org/apache/ignite/internal/processors/igfs/IgfsSizeSelfTest.java",
"license": "apache-2.0",
"size": 21762
} | [
"java.util.Collection",
"java.util.HashMap",
"java.util.Map",
"org.apache.ignite.Ignite",
"org.apache.ignite.internal.processors.cache.GridCacheAdapter"
] | import java.util.Collection; import java.util.HashMap; import java.util.Map; import org.apache.ignite.Ignite; import org.apache.ignite.internal.processors.cache.GridCacheAdapter; | import java.util.*; import org.apache.ignite.*; import org.apache.ignite.internal.processors.cache.*; | [
"java.util",
"org.apache.ignite"
] | java.util; org.apache.ignite; | 247,191 |
@Test
public void hbaseShimRegistered() {
assertRegistered( HBaseShim.class );
} | void function() { assertRegistered( HBaseShim.class ); } | /**
* Make sure we've registered our HBase Shim
*/ | Make sure we've registered our HBase Shim | hbaseShimRegistered | {
"repo_name": "andrei-viaryshka/pentaho-hadoop-shims",
"path": "common/modern/src/test/java/org/pentaho/hadoop/shim/common/ShimRegistrationTest.java",
"license": "apache-2.0",
"size": 2524
} | [
"org.pentaho.hbase.shim.spi.HBaseShim"
] | import org.pentaho.hbase.shim.spi.HBaseShim; | import org.pentaho.hbase.shim.spi.*; | [
"org.pentaho.hbase"
] | org.pentaho.hbase; | 2,116,663 |
private static double round(double value, int places) {
if (places < 0) throw new IllegalArgumentException();
BigDecimal bd = new BigDecimal(value);
bd = bd.setScale(places, RoundingMode.HALF_UP);
return bd.doubleValue();
}
/**
* Function used by DataBinding to load the release date into a TextView
* by calling {@link Movie#releaseDate} | static double function(double value, int places) { if (places < 0) throw new IllegalArgumentException(); BigDecimal bd = new BigDecimal(value); bd = bd.setScale(places, RoundingMode.HALF_UP); return bd.doubleValue(); } /** * Function used by DataBinding to load the release date into a TextView * by calling {@link Movie#releaseDate} | /**
* Safely round a double to required places.
* <br/>
* <strong>Read:</strong> http://stackoverflow.com/a/2808648/1558717
*
* @param value The value to be rounded
* @param places The decimal places to round the value to
* @return The rounded value
*/ | Safely round a double to required places. Read: HREF | round | {
"repo_name": "dakshj/TMDb_Sample",
"path": "app/src/main/java/com/daksh/tmdbsample/data/model/Movie.java",
"license": "apache-2.0",
"size": 6877
} | [
"android.widget.TextView",
"java.math.BigDecimal",
"java.math.RoundingMode"
] | import android.widget.TextView; import java.math.BigDecimal; import java.math.RoundingMode; | import android.widget.*; import java.math.*; | [
"android.widget",
"java.math"
] | android.widget; java.math; | 2,108,963 |
@Override
public void open(Map<String, Object> conf,
TopologyContext context,
SpoutOutputCollector collector) {
if (this.jmsProvider == null) {
throw new IllegalStateException("JMS provider has not been set.");
}
if (this.tupleProducer == null) {
throw new IllegalStateException("JMS Tuple Producer has not been set.");
}
// TODO get the default value from storm instead of hard coding 30 secs
Long topologyTimeout =
((Number) conf.getOrDefault(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS, DEFAULT_MESSAGE_TIMEOUT_SECS)).longValue();
if ((TimeUnit.SECONDS.toMillis(topologyTimeout)) > this.recoveryPeriodMs) {
LOG.warn("*** WARNING *** : "
+ "Recovery period (" + this.recoveryPeriodMs + " ms.) is less then the configured "
+ "'topology.message.timeout.secs' of " + topologyTimeout
+ " secs. This could lead to a message replay flood!");
}
this.queue = new LinkedBlockingQueue<Message>();
this.toCommit = new TreeSet<JmsMessageID>();
this.pendingMessages = new HashMap<JmsMessageID, Message>();
this.collector = collector;
try {
ConnectionFactory cf = this.jmsProvider.connectionFactory();
Destination dest = this.jmsProvider.destination();
this.connection = cf.createConnection();
this.session = connection.createSession(false, this.jmsAcknowledgeMode);
MessageConsumer consumer = session.createConsumer(dest);
consumer.setMessageListener(this);
this.connection.start();
if (this.isDurableSubscription() && this.recoveryPeriodMs > 0) {
this.recoveryTimer = new Timer();
this.recoveryTimer.scheduleAtFixedRate(new RecoveryTask(), RECOVERY_DELAY_MS, this.recoveryPeriodMs);
}
} catch (Exception e) {
LOG.warn("Error creating JMS connection.", e);
}
} | void function(Map<String, Object> conf, TopologyContext context, SpoutOutputCollector collector) { if (this.jmsProvider == null) { throw new IllegalStateException(STR); } if (this.tupleProducer == null) { throw new IllegalStateException(STR); } Long topologyTimeout = ((Number) conf.getOrDefault(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS, DEFAULT_MESSAGE_TIMEOUT_SECS)).longValue(); if ((TimeUnit.SECONDS.toMillis(topologyTimeout)) > this.recoveryPeriodMs) { LOG.warn(STR + STR + this.recoveryPeriodMs + STR + STR + topologyTimeout + STR); } this.queue = new LinkedBlockingQueue<Message>(); this.toCommit = new TreeSet<JmsMessageID>(); this.pendingMessages = new HashMap<JmsMessageID, Message>(); this.collector = collector; try { ConnectionFactory cf = this.jmsProvider.connectionFactory(); Destination dest = this.jmsProvider.destination(); this.connection = cf.createConnection(); this.session = connection.createSession(false, this.jmsAcknowledgeMode); MessageConsumer consumer = session.createConsumer(dest); consumer.setMessageListener(this); this.connection.start(); if (this.isDurableSubscription() && this.recoveryPeriodMs > 0) { this.recoveryTimer = new Timer(); this.recoveryTimer.scheduleAtFixedRate(new RecoveryTask(), RECOVERY_DELAY_MS, this.recoveryPeriodMs); } } catch (Exception e) { LOG.warn(STR, e); } } | /**
* <code>ISpout</code> implementation.
*
* <p>Connects the JMS spout to the configured JMS destination
* topic/queue.
*/ | <code>ISpout</code> implementation. Connects the JMS spout to the configured JMS destination topic/queue | open | {
"repo_name": "wangcy6/storm_app",
"path": "frame/storm-master/external/storm-jms/src/main/java/org/apache/storm/jms/spout/JmsSpout.java",
"license": "apache-2.0",
"size": 17936
} | [
"java.util.HashMap",
"java.util.Map",
"java.util.Timer",
"java.util.TreeSet",
"java.util.concurrent.LinkedBlockingQueue",
"java.util.concurrent.TimeUnit",
"javax.jms.ConnectionFactory",
"javax.jms.Destination",
"javax.jms.Message",
"javax.jms.MessageConsumer",
"org.apache.storm.Config",
"org.apache.storm.spout.SpoutOutputCollector",
"org.apache.storm.task.TopologyContext"
] | import java.util.HashMap; import java.util.Map; import java.util.Timer; import java.util.TreeSet; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import javax.jms.ConnectionFactory; import javax.jms.Destination; import javax.jms.Message; import javax.jms.MessageConsumer; import org.apache.storm.Config; import org.apache.storm.spout.SpoutOutputCollector; import org.apache.storm.task.TopologyContext; | import java.util.*; import java.util.concurrent.*; import javax.jms.*; import org.apache.storm.*; import org.apache.storm.spout.*; import org.apache.storm.task.*; | [
"java.util",
"javax.jms",
"org.apache.storm"
] | java.util; javax.jms; org.apache.storm; | 1,771,121 |
public void testReferenceFieldUpdaterGetAndAccumulate() {
AtomicReferenceFieldUpdater<Atomic8Test,Integer> a = anIntegerFieldUpdater();
a.set(this, one);
assertEquals((Integer) 1, a.getAndAccumulate(this, 2, Atomic8Test::sumInteger));
assertEquals((Integer) 3, a.getAndAccumulate(this, 3, Atomic8Test::sumInteger));
assertEquals((Integer) 6, a.get(this));
assertEquals((Integer) 6, anIntegerField);
} | void function() { AtomicReferenceFieldUpdater<Atomic8Test,Integer> a = anIntegerFieldUpdater(); a.set(this, one); assertEquals((Integer) 1, a.getAndAccumulate(this, 2, Atomic8Test::sumInteger)); assertEquals((Integer) 3, a.getAndAccumulate(this, 3, Atomic8Test::sumInteger)); assertEquals((Integer) 6, a.get(this)); assertEquals((Integer) 6, anIntegerField); } | /**
* AtomicReferenceFieldUpdater returns previous value and updates
* with supplied function.
*/ | AtomicReferenceFieldUpdater returns previous value and updates with supplied function | testReferenceFieldUpdaterGetAndAccumulate | {
"repo_name": "YouDiSN/OpenJDK-Research",
"path": "jdk9/jdk/test/java/util/concurrent/tck/Atomic8Test.java",
"license": "gpl-2.0",
"size": 24432
} | [
"java.util.concurrent.atomic.AtomicReferenceFieldUpdater"
] | import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; | import java.util.concurrent.atomic.*; | [
"java.util"
] | java.util; | 2,721,500 |
public static void positiveTest(int minRatio, boolean useXminf,
int maxRatio, boolean useXmaxf, boolean shrinkHeapInSteps,
LinkedList<String> options) throws Exception {
LinkedList<String> vmOptions = new LinkedList<>(options);
Collections.addAll(vmOptions,
(useXminf ? "-Xminf" + minRatio / 100.0 : "-XX:MinHeapFreeRatio=" + minRatio),
(useXmaxf ? "-Xmaxf" + maxRatio / 100.0 : "-XX:MaxHeapFreeRatio=" + maxRatio),
"-Xmx" + MAX_HEAP_SIZE,
"-Xms" + HEAP_SIZE,
"--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED",
"-XX:NewSize=" + NEW_SIZE,
"-XX:MaxNewSize=" + MAX_NEW_SIZE,
"-XX:" + (shrinkHeapInSteps ? '+' : '-') + "ShrinkHeapInSteps",
RatioVerifier.class.getName(),
Integer.toString(minRatio),
Integer.toString(maxRatio),
Boolean.toString(shrinkHeapInSteps)
);
ProcessBuilder procBuilder = ProcessTools.createJavaProcessBuilder(vmOptions.toArray(new String[vmOptions.size()]));
OutputAnalyzer analyzer = new OutputAnalyzer(procBuilder.start());
analyzer.shouldHaveExitValue(0);
} | static void function(int minRatio, boolean useXminf, int maxRatio, boolean useXmaxf, boolean shrinkHeapInSteps, LinkedList<String> options) throws Exception { LinkedList<String> vmOptions = new LinkedList<>(options); Collections.addAll(vmOptions, (useXminf ? STR + minRatio / 100.0 : STR + minRatio), (useXmaxf ? STR + maxRatio / 100.0 : STR + maxRatio), "-Xmx" + MAX_HEAP_SIZE, "-Xms" + HEAP_SIZE, STR, STR + NEW_SIZE, STR + MAX_NEW_SIZE, "-XX:" + (shrinkHeapInSteps ? '+' : '-') + STR, RatioVerifier.class.getName(), Integer.toString(minRatio), Integer.toString(maxRatio), Boolean.toString(shrinkHeapInSteps) ); ProcessBuilder procBuilder = ProcessTools.createJavaProcessBuilder(vmOptions.toArray(new String[vmOptions.size()])); OutputAnalyzer analyzer = new OutputAnalyzer(procBuilder.start()); analyzer.shouldHaveExitValue(0); } | /**
* Verify that heap size will be changed to conform
* min and max heap free ratios.
*
* @param minRatio value of MinHeapFreeRatio option
* @param useXminf used Xminf option instead of MinHeapFreeRatio
* @param maxRatio value of MaxHeapFreeRatio option
* @param useXmaxf used Xmaxf option instead of MaxHeapFreeRatio
* @param options additional options for JVM
*/ | Verify that heap size will be changed to conform min and max heap free ratios | positiveTest | {
"repo_name": "YouDiSN/OpenJDK-Research",
"path": "jdk9/hotspot/test/gc/arguments/TestMaxMinHeapFreeRatioFlags.java",
"license": "gpl-2.0",
"size": 13202
} | [
"java.util.Collections",
"java.util.LinkedList"
] | import java.util.Collections; import java.util.LinkedList; | import java.util.*; | [
"java.util"
] | java.util; | 2,667,688 |
public ListSelectDialogBuilder<T> addListItems(T... items) {
this.content.addAll(Arrays.asList(items));
return this;
} | ListSelectDialogBuilder<T> function(T... items) { this.content.addAll(Arrays.asList(items)); return this; } | /**
* Adds a list of items to the list box at the end, in the order they are passed in
* @param items Items to add to the list box
* @return Itself
*/ | Adds a list of items to the list box at the end, in the order they are passed in | addListItems | {
"repo_name": "wknishio/variable-terminal",
"path": "src/lanterna/com/googlecode/lanterna/gui2/dialogs/ListSelectDialogBuilder.java",
"license": "mit",
"size": 4131
} | [
"java.util.Arrays"
] | import java.util.Arrays; | import java.util.*; | [
"java.util"
] | java.util; | 137,809 |
public Locale getTextLocale() {
return mLocale;
} | Locale function() { return mLocale; } | /**
* Get the text Locale.
*
* @return the paint's Locale used for drawing text, never null.
*/ | Get the text Locale | getTextLocale | {
"repo_name": "rex-xxx/mt6572_x201",
"path": "frameworks/base/graphics/java/android/graphics/Paint.java",
"license": "gpl-2.0",
"size": 83574
} | [
"java.util.Locale"
] | import java.util.Locale; | import java.util.*; | [
"java.util"
] | java.util; | 1,356,012 |
public int findColumn (String columnName) throws SQLException
{
validateResultSet();
return resultSet_.findColumn(columnName);
} | int function (String columnName) throws SQLException { validateResultSet(); return resultSet_.findColumn(columnName); } | /**
* Returns the column index for the specified column name.
*
* @param columnName The column name.
* @return The column index (1-based).
*
* @exception SQLException If the result set is not open
* or the column name is not found.
**/ | Returns the column index for the specified column name | findColumn | {
"repo_name": "piguangming/jt400",
"path": "src/com/ibm/as400/access/AS400JDBCRowSet.java",
"license": "epl-1.0",
"size": 312066
} | [
"java.sql.SQLException"
] | import java.sql.SQLException; | import java.sql.*; | [
"java.sql"
] | java.sql; | 1,436,064 |
public void setValues(PropertyValuesHolder... values) {
int numValues = values.length;
mValues = values;
mValuesMap = new HashMap<String, PropertyValuesHolder>(numValues);
for (int i = 0; i < numValues; ++i) {
PropertyValuesHolder valuesHolder = values[i];
mValuesMap.put(valuesHolder.getPropertyName(), valuesHolder);
}
// New property/values/target should cause re-initialization prior to starting
mInitialized = false;
} | void function(PropertyValuesHolder... values) { int numValues = values.length; mValues = values; mValuesMap = new HashMap<String, PropertyValuesHolder>(numValues); for (int i = 0; i < numValues; ++i) { PropertyValuesHolder valuesHolder = values[i]; mValuesMap.put(valuesHolder.getPropertyName(), valuesHolder); } mInitialized = false; } | /**
* Sets the values, per property, being animated between. This function is called internally
* by the constructors of ValueAnimator that take a list of values. But an ValueAnimator can
* be constructed without values and this method can be called to set the values manually
* instead.
*
* @param values The set of values, per property, being animated between.
*/ | Sets the values, per property, being animated between. This function is called internally by the constructors of ValueAnimator that take a list of values. But an ValueAnimator can be constructed without values and this method can be called to set the values manually instead | setValues | {
"repo_name": "yesterdaylike/DailyLady",
"path": "src/com/yesterday/like/ActionBar/internal/nineoldandroids/animation/ValueAnimator.java",
"license": "apache-2.0",
"size": 53264
} | [
"java.util.HashMap"
] | import java.util.HashMap; | import java.util.*; | [
"java.util"
] | java.util; | 2,095,397 |
public int getSimilaritySearchDenseVectorSize() {
return similaritySearchDVS;
}
/**
* Returns the property type. If no explicit type is defined the default is assumed
* to be {@link PropertyType#STRING} | int function() { return similaritySearchDVS; } /** * Returns the property type. If no explicit type is defined the default is assumed * to be {@link PropertyType#STRING} | /**
* Returns size of dense vector used for similarity search using this index.
* @return dense vector size
*/ | Returns size of dense vector used for similarity search using this index | getSimilaritySearchDenseVectorSize | {
"repo_name": "trekawek/jackrabbit-oak",
"path": "oak-search/src/main/java/org/apache/jackrabbit/oak/plugins/index/search/PropertyDefinition.java",
"license": "apache-2.0",
"size": 13181
} | [
"javax.jcr.PropertyType"
] | import javax.jcr.PropertyType; | import javax.jcr.*; | [
"javax.jcr"
] | javax.jcr; | 281,205 |
public boolean drawImage(Image img,
int dx1, int dy1, int dx2, int dy2,
int sx1, int sy1, int sx2, int sy2,
Color bgcolor, ImageObserver observer) {
if (img == null) {
return true;
}
if (dx1 == dx2 || dy1 == dy2 ||
sx1 == sx2 || sy1 == sy2)
{
return true;
}
if (((sx2 - sx1) == (dx2 - dx1)) &&
((sy2 - sy1) == (dy2 - dy1)))
{
// Not a scale - forward it to a copy routine
int srcX, srcY, dstX, dstY, width, height;
if (sx2 > sx1) {
width = sx2 - sx1;
srcX = sx1;
dstX = dx1;
} else {
width = sx1 - sx2;
srcX = sx2;
dstX = dx2;
}
if (sy2 > sy1) {
height = sy2-sy1;
srcY = sy1;
dstY = dy1;
} else {
height = sy1-sy2;
srcY = sy2;
dstY = dy2;
}
return copyImage(img, dstX, dstY, srcX, srcY,
width, height, bgcolor, observer);
}
try {
return imagepipe.scaleImage(this, img, dx1, dy1, dx2, dy2,
sx1, sy1, sx2, sy2, bgcolor,
observer);
} catch (InvalidPipeException e) {
try {
revalidateAll();
return imagepipe.scaleImage(this, img, dx1, dy1, dx2, dy2,
sx1, sy1, sx2, sy2, bgcolor,
observer);
} catch (InvalidPipeException e2) {
// Still catching the exception; we are not yet ready to
// validate the surfaceData correctly. Fail for now and
// try again next time around.
return false;
}
} finally {
surfaceData.markDirty();
}
} | boolean function(Image img, int dx1, int dy1, int dx2, int dy2, int sx1, int sy1, int sx2, int sy2, Color bgcolor, ImageObserver observer) { if (img == null) { return true; } if (dx1 == dx2 dy1 == dy2 sx1 == sx2 sy1 == sy2) { return true; } if (((sx2 - sx1) == (dx2 - dx1)) && ((sy2 - sy1) == (dy2 - dy1))) { int srcX, srcY, dstX, dstY, width, height; if (sx2 > sx1) { width = sx2 - sx1; srcX = sx1; dstX = dx1; } else { width = sx1 - sx2; srcX = sx2; dstX = dx2; } if (sy2 > sy1) { height = sy2-sy1; srcY = sy1; dstY = dy1; } else { height = sy1-sy2; srcY = sy2; dstY = dy2; } return copyImage(img, dstX, dstY, srcX, srcY, width, height, bgcolor, observer); } try { return imagepipe.scaleImage(this, img, dx1, dy1, dx2, dy2, sx1, sy1, sx2, sy2, bgcolor, observer); } catch (InvalidPipeException e) { try { revalidateAll(); return imagepipe.scaleImage(this, img, dx1, dy1, dx2, dy2, sx1, sy1, sx2, sy2, bgcolor, observer); } catch (InvalidPipeException e2) { return false; } } finally { surfaceData.markDirty(); } } | /**
* Draws a subrectangle of an image scaled to a destination rectangle in
* nonblocking mode with a solid background color and a callback object.
*/ | Draws a subrectangle of an image scaled to a destination rectangle in nonblocking mode with a solid background color and a callback object | drawImage | {
"repo_name": "karianna/jdk8_tl",
"path": "jdk/src/share/classes/sun/java2d/SunGraphics2D.java",
"license": "gpl-2.0",
"size": 125632
} | [
"java.awt.Color",
"java.awt.Image",
"java.awt.image.ImageObserver"
] | import java.awt.Color; import java.awt.Image; import java.awt.image.ImageObserver; | import java.awt.*; import java.awt.image.*; | [
"java.awt"
] | java.awt; | 92,278 |
public static List<String> getAllNodeNames(DFSClient hdfsClient) throws IOException {
DatanodeInfo[] allNodes = hdfsClient.datanodeReport(HdfsConstants.DatanodeReportType.LIVE);
List<String> allNodeNames = new ArrayList<String>(allNodes.length);
for (DatanodeInfo nodeInfo : allNodes) {
allNodeNames.add(TerrapinUtil.getHelixInstanceFromHDFSHost(nodeInfo.getHostName()));
}
return allNodeNames;
} | static List<String> function(DFSClient hdfsClient) throws IOException { DatanodeInfo[] allNodes = hdfsClient.datanodeReport(HdfsConstants.DatanodeReportType.LIVE); List<String> allNodeNames = new ArrayList<String>(allNodes.length); for (DatanodeInfo nodeInfo : allNodes) { allNodeNames.add(TerrapinUtil.getHelixInstanceFromHDFSHost(nodeInfo.getHostName())); } return allNodeNames; } | /**
* Get all data nodes
* @param hdfsClient client instance for HDFS
* @return live data nodes
* @throws IOException if client goes wrong when communicating with server
*/ | Get all data nodes | getAllNodeNames | {
"repo_name": "bowlofstew/terrapin",
"path": "controller/src/main/java/com/pinterest/terrapin/controller/ClusterStatusServlet.java",
"license": "apache-2.0",
"size": 10364
} | [
"com.pinterest.terrapin.TerrapinUtil",
"java.io.IOException",
"java.util.ArrayList",
"java.util.List",
"org.apache.hadoop.hdfs.DFSClient",
"org.apache.hadoop.hdfs.protocol.DatanodeInfo",
"org.apache.hadoop.hdfs.protocol.HdfsConstants"
] | import com.pinterest.terrapin.TerrapinUtil; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.HdfsConstants; | import com.pinterest.terrapin.*; import java.io.*; import java.util.*; import org.apache.hadoop.hdfs.*; import org.apache.hadoop.hdfs.protocol.*; | [
"com.pinterest.terrapin",
"java.io",
"java.util",
"org.apache.hadoop"
] | com.pinterest.terrapin; java.io; java.util; org.apache.hadoop; | 2,661,345 |
@Override
public void doSaveAs() {
SaveAsDialog saveAsDialog = new SaveAsDialog(getSite().getShell());
saveAsDialog.open();
IPath path = saveAsDialog.getResult();
if (path != null) {
IFile file = ResourcesPlugin.getWorkspace().getRoot().getFile(path);
if (file != null) {
doSaveAs(URI.createPlatformResourceURI(file.getFullPath().toString(), true), new FileEditorInput(file));
}
}
}
| void function() { SaveAsDialog saveAsDialog = new SaveAsDialog(getSite().getShell()); saveAsDialog.open(); IPath path = saveAsDialog.getResult(); if (path != null) { IFile file = ResourcesPlugin.getWorkspace().getRoot().getFile(path); if (file != null) { doSaveAs(URI.createPlatformResourceURI(file.getFullPath().toString(), true), new FileEditorInput(file)); } } } | /**
* This also changes the editor's input.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This also changes the editor's input. | doSaveAs | {
"repo_name": "7xMatthx2/E4Training",
"path": "com.sii.airline.editor/src/com/sii/airline/airline/presentation/AirlineEditor.java",
"license": "epl-1.0",
"size": 55870
} | [
"org.eclipse.core.resources.IFile",
"org.eclipse.core.resources.ResourcesPlugin",
"org.eclipse.core.runtime.IPath",
"org.eclipse.emf.common.util.URI",
"org.eclipse.ui.dialogs.SaveAsDialog",
"org.eclipse.ui.part.FileEditorInput"
] | import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.IPath; import org.eclipse.emf.common.util.URI; import org.eclipse.ui.dialogs.SaveAsDialog; import org.eclipse.ui.part.FileEditorInput; | import org.eclipse.core.resources.*; import org.eclipse.core.runtime.*; import org.eclipse.emf.common.util.*; import org.eclipse.ui.dialogs.*; import org.eclipse.ui.part.*; | [
"org.eclipse.core",
"org.eclipse.emf",
"org.eclipse.ui"
] | org.eclipse.core; org.eclipse.emf; org.eclipse.ui; | 966,834 |
public static OpenRegionRequest
buildOpenRegionRequest(ServerName server, final List<Pair<HRegionInfo,
List<ServerName>>> regionOpenInfos, Boolean openForReplay) {
OpenRegionRequest.Builder builder = OpenRegionRequest.newBuilder();
for (Pair<HRegionInfo, List<ServerName>> regionOpenInfo: regionOpenInfos) {
builder.addOpenInfo(buildRegionOpenInfo(regionOpenInfo.getFirst(),
regionOpenInfo.getSecond(), openForReplay));
}
if (server != null) {
builder.setServerStartCode(server.getStartcode());
}
// send the master's wall clock time as well, so that the RS can refer to it
builder.setMasterSystemTime(EnvironmentEdgeManager.currentTime());
return builder.build();
} | static OpenRegionRequest function(ServerName server, final List<Pair<HRegionInfo, List<ServerName>>> regionOpenInfos, Boolean openForReplay) { OpenRegionRequest.Builder builder = OpenRegionRequest.newBuilder(); for (Pair<HRegionInfo, List<ServerName>> regionOpenInfo: regionOpenInfos) { builder.addOpenInfo(buildRegionOpenInfo(regionOpenInfo.getFirst(), regionOpenInfo.getSecond(), openForReplay)); } if (server != null) { builder.setServerStartCode(server.getStartcode()); } builder.setMasterSystemTime(EnvironmentEdgeManager.currentTime()); return builder.build(); } | /**
* Create a protocol buffer OpenRegionRequest to open a list of regions
*
* @param server the serverName for the RPC
* @param regionOpenInfos info of a list of regions to open
* @param openForReplay
* @return a protocol buffer OpenRegionRequest
*/ | Create a protocol buffer OpenRegionRequest to open a list of regions | buildOpenRegionRequest | {
"repo_name": "lshmouse/hbase",
"path": "hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java",
"license": "apache-2.0",
"size": 65486
} | [
"java.util.List",
"org.apache.hadoop.hbase.HRegionInfo",
"org.apache.hadoop.hbase.ServerName",
"org.apache.hadoop.hbase.protobuf.generated.AdminProtos",
"org.apache.hadoop.hbase.util.EnvironmentEdgeManager",
"org.apache.hadoop.hbase.util.Pair"
] | import java.util.List; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; | import java.util.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.protobuf.generated.*; import org.apache.hadoop.hbase.util.*; | [
"java.util",
"org.apache.hadoop"
] | java.util; org.apache.hadoop; | 83,629 |
@Override
public void setAmtAcctCr (java.math.BigDecimal AmtAcctCr)
{
set_Value (COLUMNNAME_AmtAcctCr, AmtAcctCr);
} | void function (java.math.BigDecimal AmtAcctCr) { set_Value (COLUMNNAME_AmtAcctCr, AmtAcctCr); } | /** Set Haben.
@param AmtAcctCr
Ausgewiesener Forderungsbetrag
*/ | Set Haben | setAmtAcctCr | {
"repo_name": "klst-com/metasfresh",
"path": "de.metas.acct.base/src/main/java-gen/de/metas/acct/model/X_Fact_Acct_Summary.java",
"license": "gpl-2.0",
"size": 25384
} | [
"java.math.BigDecimal"
] | import java.math.BigDecimal; | import java.math.*; | [
"java.math"
] | java.math; | 1,358,062 |
public KualiDecimal getFinancialDocumentOtherCentAmount() {
return financialDocumentOtherCentAmount;
} | KualiDecimal function() { return financialDocumentOtherCentAmount; } | /**
* Gets the financialDocumentOtherCentAmount attribute.
*
* @return Returns the financialDocumentOtherCentAmount
*/ | Gets the financialDocumentOtherCentAmount attribute | getFinancialDocumentOtherCentAmount | {
"repo_name": "Ariah-Group/Finance",
"path": "af_webapp/src/main/java/org/kuali/kfs/fp/businessobject/CoinDetail.java",
"license": "apache-2.0",
"size": 23019
} | [
"org.kuali.rice.core.api.util.type.KualiDecimal"
] | import org.kuali.rice.core.api.util.type.KualiDecimal; | import org.kuali.rice.core.api.util.type.*; | [
"org.kuali.rice"
] | org.kuali.rice; | 2,849,873 |
private int getType(final File file) {
if (file.isDirectory()) {
return 1;
} else {
return 2;
}
} | int function(final File file) { if (file.isDirectory()) { return 1; } else { return 2; } } | /**
* Convert type to numeric value.
*
* @param file The file
* @return 1 for directories and 2 for files
*/ | Convert type to numeric value | getType | {
"repo_name": "stereokrauts/stereoscope",
"path": "org.apache.commons.io/src/main/java/org/apache/commons/io/comparator/DirectoryFileComparator.java",
"license": "gpl-2.0",
"size": 2914
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 2,019,155 |
public void reset()
{
Map<String, Object> map = getPersistenceMap();
for (String key : map.keySet()) {
session.removeAttribute(key);
if (log.isDebugEnabled()) {
log.debug("Session attribute [" + key + "] cleared.");
}
}
getPersistenceMap().clear();
log.debug("persistenceMap cleared");
}
| void function() { Map<String, Object> map = getPersistenceMap(); for (String key : map.keySet()) { session.removeAttribute(key); if (log.isDebugEnabled()) { log.debug(STR + key + STR); } } getPersistenceMap().clear(); log.debug(STR); } | /**
* Clear all persistent variables from execContext and HttpSession
*/ | Clear all persistent variables from execContext and HttpSession | reset | {
"repo_name": "mwjmurphy/Axel-Framework",
"path": "axel-web/src/main/java/org/xmlactions/pager/context/PersistenceExecContext.java",
"license": "apache-2.0",
"size": 2407
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 1,680,534 |
public static String getPackageNameToUse(Context context) {
if (sPackageNameToUse != null) return sPackageNameToUse;
PackageManager pm = context.getPackageManager();
// Get default VIEW intent handler.
Intent activityIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("http://www.example.com"));
ResolveInfo defaultViewHandlerInfo = pm.resolveActivity(activityIntent, 0);
String defaultViewHandlerPackageName = null;
if (defaultViewHandlerInfo != null) {
defaultViewHandlerPackageName = defaultViewHandlerInfo.activityInfo.packageName;
}
// Get all apps that can handle VIEW intents.
List<ResolveInfo> resolvedActivityList = pm.queryIntentActivities(activityIntent, 0);
List<String> packagesSupportingCustomTabs = new ArrayList<>();
for (ResolveInfo info : resolvedActivityList) {
Intent serviceIntent = new Intent();
serviceIntent.setAction(CustomTabsService.ACTION_CUSTOM_TABS_CONNECTION);
serviceIntent.setPackage(info.activityInfo.packageName);
if (pm.resolveService(serviceIntent, 0) != null) {
packagesSupportingCustomTabs.add(info.activityInfo.packageName);
}
}
// Now packagesSupportingCustomTabs contains all apps that can handle both VIEW intents
// and service calls.
if (packagesSupportingCustomTabs.isEmpty()) {
sPackageNameToUse = null;
} else if (packagesSupportingCustomTabs.size() == 1) {
sPackageNameToUse = packagesSupportingCustomTabs.get(0);
} else if (!TextUtils.isEmpty(defaultViewHandlerPackageName)
&& !hasSpecializedHandlerIntents(context, activityIntent)
&& packagesSupportingCustomTabs.contains(defaultViewHandlerPackageName)) {
sPackageNameToUse = defaultViewHandlerPackageName;
} else if (packagesSupportingCustomTabs.contains(STABLE_PACKAGE)) {
sPackageNameToUse = STABLE_PACKAGE;
} else if (packagesSupportingCustomTabs.contains(BETA_PACKAGE)) {
sPackageNameToUse = BETA_PACKAGE;
} else if (packagesSupportingCustomTabs.contains(DEV_PACKAGE)) {
sPackageNameToUse = DEV_PACKAGE;
} else if (packagesSupportingCustomTabs.contains(LOCAL_PACKAGE)) {
sPackageNameToUse = LOCAL_PACKAGE;
}
return sPackageNameToUse;
} | static String function(Context context) { if (sPackageNameToUse != null) return sPackageNameToUse; PackageManager pm = context.getPackageManager(); Intent activityIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("http: ResolveInfo defaultViewHandlerInfo = pm.resolveActivity(activityIntent, 0); String defaultViewHandlerPackageName = null; if (defaultViewHandlerInfo != null) { defaultViewHandlerPackageName = defaultViewHandlerInfo.activityInfo.packageName; } List<ResolveInfo> resolvedActivityList = pm.queryIntentActivities(activityIntent, 0); List<String> packagesSupportingCustomTabs = new ArrayList<>(); for (ResolveInfo info : resolvedActivityList) { Intent serviceIntent = new Intent(); serviceIntent.setAction(CustomTabsService.ACTION_CUSTOM_TABS_CONNECTION); serviceIntent.setPackage(info.activityInfo.packageName); if (pm.resolveService(serviceIntent, 0) != null) { packagesSupportingCustomTabs.add(info.activityInfo.packageName); } } if (packagesSupportingCustomTabs.isEmpty()) { sPackageNameToUse = null; } else if (packagesSupportingCustomTabs.size() == 1) { sPackageNameToUse = packagesSupportingCustomTabs.get(0); } else if (!TextUtils.isEmpty(defaultViewHandlerPackageName) && !hasSpecializedHandlerIntents(context, activityIntent) && packagesSupportingCustomTabs.contains(defaultViewHandlerPackageName)) { sPackageNameToUse = defaultViewHandlerPackageName; } else if (packagesSupportingCustomTabs.contains(STABLE_PACKAGE)) { sPackageNameToUse = STABLE_PACKAGE; } else if (packagesSupportingCustomTabs.contains(BETA_PACKAGE)) { sPackageNameToUse = BETA_PACKAGE; } else if (packagesSupportingCustomTabs.contains(DEV_PACKAGE)) { sPackageNameToUse = DEV_PACKAGE; } else if (packagesSupportingCustomTabs.contains(LOCAL_PACKAGE)) { sPackageNameToUse = LOCAL_PACKAGE; } return sPackageNameToUse; } | /**
* Goes through all apps that handle VIEW intents and have a warmup service. Picks
* the one chosen by the user if there is one, otherwise makes a best effort to return a
* valid package name.
* <p/>
* This is <strong>not</strong> threadsafe.
*
* @param context {@link Context} to use for accessing {@link PackageManager}.
* @return The package name recommended to use for connecting to custom tabs related components.
*/ | Goes through all apps that handle VIEW intents and have a warmup service. Picks the one chosen by the user if there is one, otherwise makes a best effort to return a valid package name. This is not threadsafe | getPackageNameToUse | {
"repo_name": "SpaceAppsXploration/android-xpreader",
"path": "app/src/main/java/uk/projectchronos/xplorationreader/utils/CustomTabsHelper.java",
"license": "apache-2.0",
"size": 6240
} | [
"android.content.Context",
"android.content.Intent",
"android.content.pm.PackageManager",
"android.content.pm.ResolveInfo",
"android.net.Uri",
"android.support.customtabs.CustomTabsService",
"android.text.TextUtils",
"java.util.ArrayList",
"java.util.List"
] | import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.net.Uri; import android.support.customtabs.CustomTabsService; import android.text.TextUtils; import java.util.ArrayList; import java.util.List; | import android.content.*; import android.content.pm.*; import android.net.*; import android.support.customtabs.*; import android.text.*; import java.util.*; | [
"android.content",
"android.net",
"android.support",
"android.text",
"java.util"
] | android.content; android.net; android.support; android.text; java.util; | 141,586 |
private String getDescriptionFromAttributeConsumingService(String lang) {
List<RoleDescriptor> roles;
AttributeConsumingService acs = null;
EntityDescriptor sp = getSPEntityDescriptor();
if (null == sp) {
log.debug("No relying party, nothing to display");
return null;
}
roles = sp.getRoleDescriptors(SPSSODescriptor.DEFAULT_ELEMENT_NAME);
if (!roles.isEmpty()) {
SPSSODescriptor spssod = (SPSSODescriptor) roles.get(0);
acs = spssod.getDefaultAttributeConsumingService();
}
if (acs != null) {
for (ServiceDescription desc : acs.getDescriptions()) {
LocalizedString localDescription = desc.getDescription();
if (log.isDebugEnabled()) {
log.debug("Found name in AttributeConsumingService, language=" + localDescription.getLanguage());
}
if (localDescription.getLanguage().equals(lang)) {
if (log.isDebugEnabled()) {
log.debug("returning name from AttributeConsumingService "
+ desc.getDescription().getLocalString());
}
return localDescription.getLocalString();
}
}
if (log.isDebugEnabled()) {
log.debug("No description in AttributeConsumingService");
}
}
return null;
} | String function(String lang) { List<RoleDescriptor> roles; AttributeConsumingService acs = null; EntityDescriptor sp = getSPEntityDescriptor(); if (null == sp) { log.debug(STR); return null; } roles = sp.getRoleDescriptors(SPSSODescriptor.DEFAULT_ELEMENT_NAME); if (!roles.isEmpty()) { SPSSODescriptor spssod = (SPSSODescriptor) roles.get(0); acs = spssod.getDefaultAttributeConsumingService(); } if (acs != null) { for (ServiceDescription desc : acs.getDescriptions()) { LocalizedString localDescription = desc.getDescription(); if (log.isDebugEnabled()) { log.debug(STR + localDescription.getLanguage()); } if (localDescription.getLanguage().equals(lang)) { if (log.isDebugEnabled()) { log.debug(STR + desc.getDescription().getLocalString()); } return localDescription.getLocalString(); } } if (log.isDebugEnabled()) { log.debug(STR); } } return null; } | /**
* look for an <AttributeConsumeService> and if its there look for an appropriate description.
*
* @param lang - which language to look up
* @return null or an appropriate description
*/ | look for an <AttributeConsumeService> and if its there look for an appropriate description | getDescriptionFromAttributeConsumingService | {
"repo_name": "jagheterfredrik/java-idp",
"path": "src/main/java/edu/internet2/middleware/shibboleth/idp/ui/ServiceDescriptionTag.java",
"license": "apache-2.0",
"size": 6071
} | [
"java.util.List",
"org.opensaml.saml2.metadata.AttributeConsumingService",
"org.opensaml.saml2.metadata.EntityDescriptor",
"org.opensaml.saml2.metadata.LocalizedString",
"org.opensaml.saml2.metadata.RoleDescriptor",
"org.opensaml.saml2.metadata.SPSSODescriptor",
"org.opensaml.saml2.metadata.ServiceDescription"
] | import java.util.List; import org.opensaml.saml2.metadata.AttributeConsumingService; import org.opensaml.saml2.metadata.EntityDescriptor; import org.opensaml.saml2.metadata.LocalizedString; import org.opensaml.saml2.metadata.RoleDescriptor; import org.opensaml.saml2.metadata.SPSSODescriptor; import org.opensaml.saml2.metadata.ServiceDescription; | import java.util.*; import org.opensaml.saml2.metadata.*; | [
"java.util",
"org.opensaml.saml2"
] | java.util; org.opensaml.saml2; | 1,745,432 |
public List<PhysicalPlan> getPlans() {
return myPlans;
} | List<PhysicalPlan> function() { return myPlans; } | /**
* Returns the list of nested plans.
* @return the list of the nested plans
* @see org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PlanPrinter
*/ | Returns the list of nested plans | getPlans | {
"repo_name": "kellyzly/pig",
"path": "src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/POSplit.java",
"license": "apache-2.0",
"size": 10258
} | [
"java.util.List",
"org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan"
] | import java.util.List; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan; | import java.util.*; import org.apache.pig.backend.hadoop.executionengine.*; | [
"java.util",
"org.apache.pig"
] | java.util; org.apache.pig; | 1,981,241 |
public static String getRelativeNameFrom(Item p, ItemGroup g, boolean useDisplayName) {
if (p == null) return null;
if (g == null) return useDisplayName ? p.getFullDisplayName() : p.getFullName();
String separationString = useDisplayName ? " » " : "/";
// first list up all the parents
Map<ItemGroup,Integer> parents = new HashMap<ItemGroup,Integer>();
int depth=0;
while (g!=null) {
parents.put(g, depth++);
if (g instanceof Item)
g = ((Item)g).getParent();
else
g = null;
}
StringBuilder buf = new StringBuilder();
Item i=p;
while (true) {
if (buf.length()>0) buf.insert(0,separationString);
buf.insert(0,useDisplayName ? i.getDisplayName() : i.getName());
ItemGroup gr = i.getParent();
Integer d = parents.get(gr);
if (d!=null) {
for (int j=d; j>0; j--) {
buf.insert(0,separationString);
buf.insert(0,"..");
}
return buf.toString();
}
if (gr instanceof Item)
i = (Item)gr;
else
return null;
}
} | static String function(Item p, ItemGroup g, boolean useDisplayName) { if (p == null) return null; if (g == null) return useDisplayName ? p.getFullDisplayName() : p.getFullName(); String separationString = useDisplayName ? STR : "/"; Map<ItemGroup,Integer> parents = new HashMap<ItemGroup,Integer>(); int depth=0; while (g!=null) { parents.put(g, depth++); if (g instanceof Item) g = ((Item)g).getParent(); else g = null; } StringBuilder buf = new StringBuilder(); Item i=p; while (true) { if (buf.length()>0) buf.insert(0,separationString); buf.insert(0,useDisplayName ? i.getDisplayName() : i.getName()); ItemGroup gr = i.getParent(); Integer d = parents.get(gr); if (d!=null) { for (int j=d; j>0; j--) { buf.insert(0,separationString); buf.insert(0,".."); } return buf.toString(); } if (gr instanceof Item) i = (Item)gr; else return null; } } | /**
* Gets the relative name or display name to the given item from the specified group.
*
* @since 1.515
* @param p the Item we want the relative display name
* @param g the ItemGroup used as point of reference for the item
* @param useDisplayName if true, returns a display name, otherwise returns a name
* @return
* String like "foo » bar"
*/ | Gets the relative name or display name to the given item from the specified group | getRelativeNameFrom | {
"repo_name": "lilyJi/jenkins",
"path": "core/src/main/java/hudson/Functions.java",
"license": "mit",
"size": 74776
} | [
"hudson.model.Item",
"hudson.model.ItemGroup",
"java.util.HashMap",
"java.util.Map"
] | import hudson.model.Item; import hudson.model.ItemGroup; import java.util.HashMap; import java.util.Map; | import hudson.model.*; import java.util.*; | [
"hudson.model",
"java.util"
] | hudson.model; java.util; | 2,678,365 |
OperationStatus insert(O object) throws OBStorageException, OBException,
IllegalAccessException, InstantiationException; | OperationStatus insert(O object) throws OBStorageException, OBException, IllegalAccessException, InstantiationException; | /**
* Inserts the given object into the index.
* @param object
* The object to be added.
* @return {@link net.obsearch.Status#OK} if the object was inserted.
* {@link net.obsearch.Status#EXISTS} if the object existed in the DB.
* The method getId() of {@link net.obsearch.OperationStatus} will return
* the id used by the object.
* @throws OBStorageException
* If something goes wrong with the DB
* @throws OBException
* User generated exception
* @throws IllegalAccessException
* If there is a problem when instantiating objects O
* @throws InstantiationException
* If there is a problem when instantiating objects O
* @since 0.0
*/ | Inserts the given object into the index | insert | {
"repo_name": "amuller/obsearch",
"path": "src/main/java/net/obsearch/Index.java",
"license": "gpl-3.0",
"size": 14522
} | [
"net.obsearch.exception.OBException",
"net.obsearch.exception.OBStorageException"
] | import net.obsearch.exception.OBException; import net.obsearch.exception.OBStorageException; | import net.obsearch.exception.*; | [
"net.obsearch.exception"
] | net.obsearch.exception; | 2,598,320 |
public float getGrayFill() {
if (backgroundColor instanceof GrayColor)
return ((GrayColor)backgroundColor).getGray();
return 0;
} | float function() { if (backgroundColor instanceof GrayColor) return ((GrayColor)backgroundColor).getGray(); return 0; } | /**
* Gets the grayscale.
*
* @return the grayscale color of the background
* or 0 if the background has no grayscale color.
*/ | Gets the grayscale | getGrayFill | {
"repo_name": "bullda/DroidText",
"path": "src/core/com/lowagie/text/Rectangle.java",
"license": "lgpl-3.0",
"size": 21602
} | [
"com.lowagie.text.pdf.GrayColor"
] | import com.lowagie.text.pdf.GrayColor; | import com.lowagie.text.pdf.*; | [
"com.lowagie.text"
] | com.lowagie.text; | 1,315,721 |
List<User> getUserLisOfCorp(int corpId); | List<User> getUserLisOfCorp(int corpId); | /**
* get user list of corporation
*
* @param corpId
* @return
*/ | get user list of corporation | getUserLisOfCorp | {
"repo_name": "thx/RAP",
"path": "src/main/java/com/taobao/rigel/rap/organization/service/OrganizationMgr.java",
"license": "gpl-3.0",
"size": 6479
} | [
"com.taobao.rigel.rap.account.bo.User",
"java.util.List"
] | import com.taobao.rigel.rap.account.bo.User; import java.util.List; | import com.taobao.rigel.rap.account.bo.*; import java.util.*; | [
"com.taobao.rigel",
"java.util"
] | com.taobao.rigel; java.util; | 2,532,565 |
ServiceResponse<List<Product>> getComplexItemEmpty() throws ErrorException, IOException; | ServiceResponse<List<Product>> getComplexItemEmpty() throws ErrorException, IOException; | /**
* Get array of complex type with empty item [{'integer': 1 'string': '2'}, {}, {'integer': 5, 'string': '6'}].
*
* @throws ErrorException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
* @return the List<Product> object wrapped in {@link ServiceResponse} if successful.
*/ | Get array of complex type with empty item [{'integer': 1 'string': '2'}, {}, {'integer': 5, 'string': '6'}] | getComplexItemEmpty | {
"repo_name": "haocs/autorest",
"path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/bodyarray/Arrays.java",
"license": "mit",
"size": 72234
} | [
"com.microsoft.rest.ServiceResponse",
"java.io.IOException",
"java.util.List"
] | import com.microsoft.rest.ServiceResponse; import java.io.IOException; import java.util.List; | import com.microsoft.rest.*; import java.io.*; import java.util.*; | [
"com.microsoft.rest",
"java.io",
"java.util"
] | com.microsoft.rest; java.io; java.util; | 2,501,252 |
void updateSheet() {
SwingUtilities.invokeLater(() -> {
this.setSheet(createSheet());
});
} | void updateSheet() { SwingUtilities.invokeLater(() -> { this.setSheet(createSheet()); }); } | /**
* Refreshes this node's property sheet.
*/ | Refreshes this node's property sheet | updateSheet | {
"repo_name": "sleuthkit/autopsy",
"path": "Core/src/org/sleuthkit/autopsy/datamodel/OsAccounts.java",
"license": "apache-2.0",
"size": 27548
} | [
"javax.swing.SwingUtilities"
] | import javax.swing.SwingUtilities; | import javax.swing.*; | [
"javax.swing"
] | javax.swing; | 42,870 |
public IndexSchema addField(SchemaField newField, boolean persist) {
return addFields(Collections.singletonList(newField), Collections.EMPTY_MAP, persist );
} | IndexSchema function(SchemaField newField, boolean persist) { return addFields(Collections.singletonList(newField), Collections.EMPTY_MAP, persist ); } | /**
* Copies this schema, adds the given field to the copy
* Requires synchronizing on the object returned by
* {@link #getSchemaUpdateLock()}.
*
* @param newField the SchemaField to add
* @param persist to persist the schema or not
* @return a new IndexSchema based on this schema with newField added
* @see #newField(String, String, Map)
*/ | Copies this schema, adds the given field to the copy Requires synchronizing on the object returned by <code>#getSchemaUpdateLock()</code> | addField | {
"repo_name": "yida-lxw/solr-5.3.1",
"path": "solr/core/src/java/org/apache/solr/schema/IndexSchema.java",
"license": "apache-2.0",
"size": 73638
} | [
"java.util.Collections"
] | import java.util.Collections; | import java.util.*; | [
"java.util"
] | java.util; | 2,885,599 |
SinkFunction<RowData> createSinkFunction(); | SinkFunction<RowData> createSinkFunction(); | /**
* Creates a {@link SinkFunction} instance.
*/ | Creates a <code>SinkFunction</code> instance | createSinkFunction | {
"repo_name": "greghogan/flink",
"path": "flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/connector/sink/SinkFunctionProvider.java",
"license": "apache-2.0",
"size": 2063
} | [
"org.apache.flink.streaming.api.functions.sink.SinkFunction",
"org.apache.flink.table.data.RowData"
] | import org.apache.flink.streaming.api.functions.sink.SinkFunction; import org.apache.flink.table.data.RowData; | import org.apache.flink.streaming.api.functions.sink.*; import org.apache.flink.table.data.*; | [
"org.apache.flink"
] | org.apache.flink; | 1,137,261 |
@Test
public void testCreateSilentIsReallySilent() throws InterruptedException,
KeeperException, IOException {
Configuration c = TEST_UTIL.getConfiguration();
String aclZnode = "/aclRoot";
String quorumServers = ZKConfig.getZKQuorumServersString(c);
int sessionTimeout = 5 * 1000; // 5 seconds
ZooKeeper zk = new ZooKeeper(quorumServers, sessionTimeout, EmptyWatcher.instance);
zk.addAuthInfo("digest", "hbase:rox".getBytes());
// Assumes the root of the ZooKeeper space is writable as it creates a node
// wherever the cluster home is defined.
ZooKeeperWatcher zk2 = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(),
"testMasterAddressManagerFromZK", null);
// I set this acl after the attempted creation of the cluster home node.
// Add retries in case of retryable zk exceptions.
while (true) {
try {
zk.setACL("/", ZooDefs.Ids.CREATOR_ALL_ACL, -1);
break;
} catch (KeeperException e) {
switch (e.code()) {
case CONNECTIONLOSS:
case SESSIONEXPIRED:
case OPERATIONTIMEOUT:
LOG.warn("Possibly transient ZooKeeper exception: " + e);
Threads.sleep(100);
break;
default:
throw e;
}
}
}
while (true) {
try {
zk.create(aclZnode, null, ZooDefs.Ids.CREATOR_ALL_ACL, CreateMode.PERSISTENT);
break;
} catch (KeeperException e) {
switch (e.code()) {
case CONNECTIONLOSS:
case SESSIONEXPIRED:
case OPERATIONTIMEOUT:
LOG.warn("Possibly transient ZooKeeper exception: " + e);
Threads.sleep(100);
break;
default:
throw e;
}
}
}
zk.close();
ZKUtil.createAndFailSilent(zk2, aclZnode);
} | void function() throws InterruptedException, KeeperException, IOException { Configuration c = TEST_UTIL.getConfiguration(); String aclZnode = STR; String quorumServers = ZKConfig.getZKQuorumServersString(c); int sessionTimeout = 5 * 1000; ZooKeeper zk = new ZooKeeper(quorumServers, sessionTimeout, EmptyWatcher.instance); zk.addAuthInfo(STR, STR.getBytes()); ZooKeeperWatcher zk2 = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(), STR, null); while (true) { try { zk.setACL("/", ZooDefs.Ids.CREATOR_ALL_ACL, -1); break; } catch (KeeperException e) { switch (e.code()) { case CONNECTIONLOSS: case SESSIONEXPIRED: case OPERATIONTIMEOUT: LOG.warn(STR + e); Threads.sleep(100); break; default: throw e; } } } while (true) { try { zk.create(aclZnode, null, ZooDefs.Ids.CREATOR_ALL_ACL, CreateMode.PERSISTENT); break; } catch (KeeperException e) { switch (e.code()) { case CONNECTIONLOSS: case SESSIONEXPIRED: case OPERATIONTIMEOUT: LOG.warn(STR + e); Threads.sleep(100); break; default: throw e; } } } zk.close(); ZKUtil.createAndFailSilent(zk2, aclZnode); } | /**
* A test for HBASE-3238
* @throws IOException A connection attempt to zk failed
* @throws InterruptedException One of the non ZKUtil actions was interrupted
* @throws KeeperException Any of the zookeeper connections had a
* KeeperException
*/ | A test for HBASE-3238 | testCreateSilentIsReallySilent | {
"repo_name": "ddraj/hbase-trunk-mttr",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java",
"license": "apache-2.0",
"size": 18894
} | [
"java.io.IOException",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.hbase.util.Threads",
"org.apache.hadoop.hbase.zookeeper.EmptyWatcher",
"org.apache.hadoop.hbase.zookeeper.ZKConfig",
"org.apache.hadoop.hbase.zookeeper.ZKUtil",
"org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher",
"org.apache.zookeeper.CreateMode",
"org.apache.zookeeper.KeeperException",
"org.apache.zookeeper.ZooDefs",
"org.apache.zookeeper.ZooKeeper"
] | import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.zookeeper.EmptyWatcher; import org.apache.hadoop.hbase.zookeeper.ZKConfig; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.ZooKeeper; | import java.io.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.util.*; import org.apache.hadoop.hbase.zookeeper.*; import org.apache.zookeeper.*; | [
"java.io",
"org.apache.hadoop",
"org.apache.zookeeper"
] | java.io; org.apache.hadoop; org.apache.zookeeper; | 1,474,000 |
public static boolean getOpenIDConnectSkipeUserConsent() {
return OAuthServerConfiguration.getInstance().getOpenIDConnectSkipeUserConsentConfig();
} | static boolean function() { return OAuthServerConfiguration.getInstance().getOpenIDConnectSkipeUserConsentConfig(); } | /**
* Returns the OpenIDConnect User Consent.
*
* @return
*/ | Returns the OpenIDConnect User Consent | getOpenIDConnectSkipeUserConsent | {
"repo_name": "IsuraD/identity-inbound-auth-oauth",
"path": "components/org.wso2.carbon.identity.oidc.session/src/main/java/org/wso2/carbon/identity/oidc/session/util/OIDCSessionManagementUtil.java",
"license": "apache-2.0",
"size": 11981
} | [
"org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration"
] | import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration; | import org.wso2.carbon.identity.oauth.config.*; | [
"org.wso2.carbon"
] | org.wso2.carbon; | 99,486 |
public Double getPdop() {
Object object = getValue(KEY_PDOP);
return SdlDataTypeConverter.objectToDouble(object);
}
| Double function() { Object object = getValue(KEY_PDOP); return SdlDataTypeConverter.objectToDouble(object); } | /**
* get the positional dilution of precision
*/ | get the positional dilution of precision | getPdop | {
"repo_name": "smartdevicelink/sdl_android",
"path": "base/src/main/java/com/smartdevicelink/proxy/rpc/GPSData.java",
"license": "bsd-3-clause",
"size": 15815
} | [
"com.smartdevicelink.util.SdlDataTypeConverter"
] | import com.smartdevicelink.util.SdlDataTypeConverter; | import com.smartdevicelink.util.*; | [
"com.smartdevicelink.util"
] | com.smartdevicelink.util; | 444,657 |
public static Key<?> get(Type type, Annotation annotation) {
return new Key<Object>(type, strategyFor(annotation));
} | static Key<?> function(Type type, Annotation annotation) { return new Key<Object>(type, strategyFor(annotation)); } | /**
* Gets a key for an injection type and an annotation.
*/ | Gets a key for an injection type and an annotation | get | {
"repo_name": "utopiazh/google-guice",
"path": "core/src/com/google/inject/Key.java",
"license": "apache-2.0",
"size": 14234
} | [
"java.lang.annotation.Annotation",
"java.lang.reflect.Type"
] | import java.lang.annotation.Annotation; import java.lang.reflect.Type; | import java.lang.annotation.*; import java.lang.reflect.*; | [
"java.lang"
] | java.lang; | 43,072 |
public static <K, V> Builder<K, V> builder() {
return new Builder<K, V>();
}
private static class BuilderMultimap<K, V> extends AbstractMultimap<K, V> {
BuilderMultimap() {
super(new LinkedHashMap<K, Collection<V>>());
} | static <K, V> Builder<K, V> function() { return new Builder<K, V>(); } private static class BuilderMultimap<K, V> extends AbstractMultimap<K, V> { BuilderMultimap() { super(new LinkedHashMap<K, Collection<V>>()); } | /**
* Returns a new builder. The generated builder is equivalent to the builder
* created by the {@link Builder} constructor.
*/ | Returns a new builder. The generated builder is equivalent to the builder created by the <code>Builder</code> constructor | builder | {
"repo_name": "hceylan/guava",
"path": "guava-gwt/src-super/com/google/common/collect/super/com/google/common/collect/ImmutableMultimap.java",
"license": "apache-2.0",
"size": 18652
} | [
"java.util.Collection",
"java.util.LinkedHashMap"
] | import java.util.Collection; import java.util.LinkedHashMap; | import java.util.*; | [
"java.util"
] | java.util; | 771,473 |
private int[] selectPreviewFpsRange(Camera camera, float desiredPreviewFps) {
// The camera API uses integers scaled by a factor of 1000 instead of floating-point frame
// rates.
int desiredPreviewFpsScaled = (int) (desiredPreviewFps * 1000.0f);
// The method for selecting the best range is to minimize the sum of the differences between
// the desired value and the upper and lower bounds of the range. This may select a range
// that the desired value is outside of, but this is often preferred. For example, if the
// desired frame rate is 29.97, the range (30, 30) is probably more desirable than the
// range (15, 30).
int[] selectedFpsRange = null;
int minDiff = Integer.MAX_VALUE;
List<int[]> previewFpsRangeList = camera.getParameters().getSupportedPreviewFpsRange();
for (int[] range : previewFpsRangeList) {
int deltaMin = desiredPreviewFpsScaled - range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
int deltaMax = desiredPreviewFpsScaled - range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
int diff = Math.abs(deltaMin) + Math.abs(deltaMax);
if (diff < minDiff) {
selectedFpsRange = range;
minDiff = diff;
}
}
return selectedFpsRange;
} | int[] function(Camera camera, float desiredPreviewFps) { int desiredPreviewFpsScaled = (int) (desiredPreviewFps * 1000.0f); int[] selectedFpsRange = null; int minDiff = Integer.MAX_VALUE; List<int[]> previewFpsRangeList = camera.getParameters().getSupportedPreviewFpsRange(); for (int[] range : previewFpsRangeList) { int deltaMin = desiredPreviewFpsScaled - range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX]; int deltaMax = desiredPreviewFpsScaled - range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]; int diff = Math.abs(deltaMin) + Math.abs(deltaMax); if (diff < minDiff) { selectedFpsRange = range; minDiff = diff; } } return selectedFpsRange; } | /**
* Selects the most suitable preview frames per second range, given the desired frames per
* second.
*
* @param camera the camera to select a frames per second range from
* @param desiredPreviewFps the desired frames per second for the camera preview frames
* @return the selected preview frames per second range
*/ | Selects the most suitable preview frames per second range, given the desired frames per second | selectPreviewFpsRange | {
"repo_name": "kglawrence/book-park",
"path": "app/src/main/java/me/kglawrence/kerilawrence/book_park/ui/camera/CameraSource.java",
"license": "mit",
"size": 48878
} | [
"android.hardware.Camera",
"java.util.List"
] | import android.hardware.Camera; import java.util.List; | import android.hardware.*; import java.util.*; | [
"android.hardware",
"java.util"
] | android.hardware; java.util; | 1,631,213 |
List<String> getTags(); | List<String> getTags(); | /**
* Gets the Champion tags.
*
* @return the {@link List} of tags
*/ | Gets the Champion tags | getTags | {
"repo_name": "drumonii/LeagueTrollBuild",
"path": "backend/src/main/java/com/drumonii/loltrollbuild/api/service/ChampionsApiService.java",
"license": "mit",
"size": 1075
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 265,750 |
public static void computeMeanScore(ClusterSet clusters) throws Exception {
logger.info("------ compute Sum ------");
for (String name : clusters) {
Cluster cluster = clusters.getCluster(name);
cluster.computeMeanScore();
logger.fine("Cluster: " + name);
cluster.debugSpeakerName();
}
} | static void function(ClusterSet clusters) throws Exception { logger.info(STR); for (String name : clusters) { Cluster cluster = clusters.getCluster(name); cluster.computeMeanScore(); logger.fine(STR + name); cluster.debugSpeakerName(); } } | /**
* Compute mean score.
*
* @param clusters the clusters
* @throws Exception the exception
*/ | Compute mean score | computeMeanScore | {
"repo_name": "Adirockzz95/GenderDetect",
"path": "src/src/fr/lium/experimental/spkDiarization/programs/SpeakerIdenificationDecision14.java",
"license": "gpl-3.0",
"size": 46876
} | [
"fr.lium.spkDiarization.libClusteringData.Cluster",
"fr.lium.spkDiarization.libClusteringData.ClusterSet"
] | import fr.lium.spkDiarization.libClusteringData.Cluster; import fr.lium.spkDiarization.libClusteringData.ClusterSet; | import fr.lium.*; | [
"fr.lium"
] | fr.lium; | 2,384,650 |
public Date getCreateDate(); | Date function(); | /**
* Returns the create date of this dossier proc agent.
*
* @return the create date of this dossier proc agent
*/ | Returns the create date of this dossier proc agent | getCreateDate | {
"repo_name": "openegovplatform/OEPv2",
"path": "oep-dossier-portlet/docroot/WEB-INF/service/org/oep/dossiermgt/model/DossierProcAgentModel.java",
"license": "apache-2.0",
"size": 6926
} | [
"java.util.Date"
] | import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 787,409 |
public void setPassword(Principal principal, PasswordCallback callback) throws AccountNotFoundException
{
if (_passwordFile == null)
{
throw new AccountNotFoundException("Unable to locate principal since no password file was specified during initialisation");
}
if (principal == null)
{
throw new IllegalArgumentException("principal must not be null");
}
char[] pwd = lookupPassword(principal.getName());
if (pwd != null)
{
callback.setPassword(pwd);
}
else
{
throw new AccountNotFoundException("No account found for principal " + principal);
}
} | void function(Principal principal, PasswordCallback callback) throws AccountNotFoundException { if (_passwordFile == null) { throw new AccountNotFoundException(STR); } if (principal == null) { throw new IllegalArgumentException(STR); } char[] pwd = lookupPassword(principal.getName()); if (pwd != null) { callback.setPassword(pwd); } else { throw new AccountNotFoundException(STR + principal); } } | /**
* SASL Callback Mechanism - sets the Password in the PasswordCallback based on the value in the PasswordFile
* If you want to change the password for a user, use updatePassword instead.
*
* @param principal The Principal to set the password for
* @param callback The PasswordCallback to call setPassword on
*
* @throws AccountNotFoundException If the Principal cannont be found in this Database
*/ | SASL Callback Mechanism - sets the Password in the PasswordCallback based on the value in the PasswordFile If you want to change the password for a user, use updatePassword instead | setPassword | {
"repo_name": "dhanuka84/andes",
"path": "modules/andes-core/broker/src/main/java/org/wso2/andes/server/security/auth/database/Base64MD5PasswordFilePrincipalDatabase.java",
"license": "apache-2.0",
"size": 17846
} | [
"java.security.Principal",
"javax.security.auth.callback.PasswordCallback",
"javax.security.auth.login.AccountNotFoundException"
] | import java.security.Principal; import javax.security.auth.callback.PasswordCallback; import javax.security.auth.login.AccountNotFoundException; | import java.security.*; import javax.security.auth.callback.*; import javax.security.auth.login.*; | [
"java.security",
"javax.security"
] | java.security; javax.security; | 1,793,430 |
final String originalURL = user.optString(UserExt.USER_AVATAR_URL);
if (Symphonys.getBoolean("qiniu.enabled")) {
if (!StringUtils.contains(originalURL, "qnssl.com") && !StringUtils.contains(originalURL, "clouddn.com")) {
user.put(UserExt.USER_AVATAR_URL, DEFAULT_AVATAR_URL);
return;
}
}
user.put(UserExt.USER_AVATAR_URL, StringUtils.substringBeforeLast(originalURL, "?"));
} | final String originalURL = user.optString(UserExt.USER_AVATAR_URL); if (Symphonys.getBoolean(STR)) { if (!StringUtils.contains(originalURL, STR) && !StringUtils.contains(originalURL, STR)) { user.put(UserExt.USER_AVATAR_URL, DEFAULT_AVATAR_URL); return; } } user.put(UserExt.USER_AVATAR_URL, StringUtils.substringBeforeLast(originalURL, "?")); } | /**
* Fills the specified user thumbnail URL.
*
* @param user the specified user
*/ | Fills the specified user thumbnail URL | fillUserAvatarURL | {
"repo_name": "FangStarNet/symphonyx",
"path": "src/main/java/org/b3log/symphony/service/AvatarQueryService.java",
"license": "apache-2.0",
"size": 6391
} | [
"org.apache.commons.lang.StringUtils",
"org.b3log.symphony.model.UserExt",
"org.b3log.symphony.util.Symphonys"
] | import org.apache.commons.lang.StringUtils; import org.b3log.symphony.model.UserExt; import org.b3log.symphony.util.Symphonys; | import org.apache.commons.lang.*; import org.b3log.symphony.model.*; import org.b3log.symphony.util.*; | [
"org.apache.commons",
"org.b3log.symphony"
] | org.apache.commons; org.b3log.symphony; | 2,840,878 |
void addMouseListener(@Nonnull IMouseListener mouseListener); | void addMouseListener(@Nonnull IMouseListener mouseListener); | /**
* Add a listener that will be notified when a mouse event occurs.
* @param mouseListener the listener to add
*/ | Add a listener that will be notified when a mouse event occurs | addMouseListener | {
"repo_name": "bensmith87/ui",
"path": "src/main/java/ben/ui/input/mouse/IMouseHandler.java",
"license": "lgpl-3.0",
"size": 2238
} | [
"javax.annotation.Nonnull"
] | import javax.annotation.Nonnull; | import javax.annotation.*; | [
"javax.annotation"
] | javax.annotation; | 248,066 |
default Instant getNotBefore() {
return this.getClaimAsInstant(OAuth2IntrospectionClaimNames.NOT_BEFORE);
} | default Instant getNotBefore() { return this.getClaimAsInstant(OAuth2IntrospectionClaimNames.NOT_BEFORE); } | /**
* Returns a timestamp {@code (nbf)} indicating when the token is not to be used
* before
* @return a timestamp indicating when the token is not to be used before
*/ | Returns a timestamp (nbf) indicating when the token is not to be used before | getNotBefore | {
"repo_name": "fhanik/spring-security",
"path": "oauth2/oauth2-resource-server/src/main/java/org/springframework/security/oauth2/server/resource/introspection/OAuth2IntrospectionClaimAccessor.java",
"license": "apache-2.0",
"size": 4407
} | [
"java.time.Instant"
] | import java.time.Instant; | import java.time.*; | [
"java.time"
] | java.time; | 1,245,789 |
private static long hash64(@Nullable String value, long seed) {
if (value == null) {
return hash64(null, 0, 0, seed);
}
return hash64(value.getBytes(UTF_8), seed);
} | static long function(@Nullable String value, long seed) { if (value == null) { return hash64(null, 0, 0, seed); } return hash64(value.getBytes(UTF_8), seed); } | /**
* Hash a string to a 64 bit value using the supplied seed.
*
* @param value the string to hash
* @param seed the seed
* @return 64 bit hash value
*/ | Hash a string to a 64 bit value using the supplied seed | hash64 | {
"repo_name": "anomaly/closure-compiler",
"path": "src/com/google/javascript/jscomp/JsMessage.java",
"license": "apache-2.0",
"size": 20636
} | [
"javax.annotation.Nullable"
] | import javax.annotation.Nullable; | import javax.annotation.*; | [
"javax.annotation"
] | javax.annotation; | 2,474,603 |
@ViewDebug.CapturedViewProperty
public int getSelectedItemPosition() {
return mNextSelectedPosition;
} | @ViewDebug.CapturedViewProperty int function() { return mNextSelectedPosition; } | /**
* Return the position of the currently selected item within the adapter's data set.
*
* @return int Position (starting at 0), or {@link #INVALID_POSITION} if there is nothing selected.
*/ | Return the position of the currently selected item within the adapter's data set | getSelectedItemPosition | {
"repo_name": "photo/mobile-android",
"path": "submodules/Android-Feather/src/com/aviary/android/feather/widget/AdapterView.java",
"license": "apache-2.0",
"size": 35727
} | [
"android.view.ViewDebug"
] | import android.view.ViewDebug; | import android.view.*; | [
"android.view"
] | android.view; | 340,958 |
void sidelineBigOverlaps(
Collection<HbckInfo> bigOverlap) throws IOException {
int overlapsToSideline = bigOverlap.size() - maxMerge;
if (overlapsToSideline > maxOverlapsToSideline) {
overlapsToSideline = maxOverlapsToSideline;
}
List<HbckInfo> regionsToSideline =
RegionSplitCalculator.findBigRanges(bigOverlap, overlapsToSideline);
FileSystem fs = FileSystem.get(conf);
for (HbckInfo regionToSideline: regionsToSideline) {
try {
LOG.info("Closing region: " + regionToSideline);
closeRegion(regionToSideline);
} catch (IOException ioe) {
LOG.warn("Was unable to close region " + regionToSideline
+ ". Just continuing... ", ioe);
} catch (InterruptedException e) {
LOG.warn("Was unable to close region " + regionToSideline
+ ". Just continuing... ", e);
}
try {
LOG.info("Offlining region: " + regionToSideline);
offline(regionToSideline.getRegionName());
} catch (IOException ioe) {
LOG.warn("Unable to offline region from master: " + regionToSideline
+ ". Just continuing... ", ioe);
}
LOG.info("Before sideline big overlapped region: " + regionToSideline.toString());
Path sidelineRegionDir = sidelineRegionDir(fs, TO_BE_LOADED, regionToSideline);
if (sidelineRegionDir != null) {
sidelinedRegions.put(sidelineRegionDir, regionToSideline);
LOG.info("After sidelined big overlapped region: "
+ regionToSideline.getRegionNameAsString()
+ " to " + sidelineRegionDir.toString());
fixes++;
}
}
}
} | void sidelineBigOverlaps( Collection<HbckInfo> bigOverlap) throws IOException { int overlapsToSideline = bigOverlap.size() - maxMerge; if (overlapsToSideline > maxOverlapsToSideline) { overlapsToSideline = maxOverlapsToSideline; } List<HbckInfo> regionsToSideline = RegionSplitCalculator.findBigRanges(bigOverlap, overlapsToSideline); FileSystem fs = FileSystem.get(conf); for (HbckInfo regionToSideline: regionsToSideline) { try { LOG.info(STR + regionToSideline); closeRegion(regionToSideline); } catch (IOException ioe) { LOG.warn(STR + regionToSideline + STR, ioe); } catch (InterruptedException e) { LOG.warn(STR + regionToSideline + STR, e); } try { LOG.info(STR + regionToSideline); offline(regionToSideline.getRegionName()); } catch (IOException ioe) { LOG.warn(STR + regionToSideline + STR, ioe); } LOG.info(STR + regionToSideline.toString()); Path sidelineRegionDir = sidelineRegionDir(fs, TO_BE_LOADED, regionToSideline); if (sidelineRegionDir != null) { sidelinedRegions.put(sidelineRegionDir, regionToSideline); LOG.info(STR + regionToSideline.getRegionNameAsString() + STR + sidelineRegionDir.toString()); fixes++; } } } } | /**
* Sideline some regions in a big overlap group so that it
* will have fewer regions, and it is easier to merge them later on.
*
* @param bigOverlap the overlapped group with regions more than maxMerge
* @throws IOException
*/ | Sideline some regions in a big overlap group so that it will have fewer regions, and it is easier to merge them later on | sidelineBigOverlaps | {
"repo_name": "drewpope/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java",
"license": "apache-2.0",
"size": 166061
} | [
"java.io.IOException",
"java.util.Collection",
"java.util.List",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.Path"
] | import java.io.IOException; import java.util.Collection; import java.util.List; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; | import java.io.*; import java.util.*; import org.apache.hadoop.fs.*; | [
"java.io",
"java.util",
"org.apache.hadoop"
] | java.io; java.util; org.apache.hadoop; | 2,262,167 |
String getString(Charset charset) {
final byte[] entity = baos.toByteArray();
return new String(entity, 0, entity.length, charset);
} | String getString(Charset charset) { final byte[] entity = baos.toByteArray(); return new String(entity, 0, entity.length, charset); } | /**
* Get string represention of entity
* @param charset to use for new String(byte[], ..., charset)
* @return String for entity
*/ | Get string represention of entity | getString | {
"repo_name": "starksm64/wf-demo",
"path": "sso/src/main/java/client/LoggingFilter.java",
"license": "apache-2.0",
"size": 3438
} | [
"java.nio.charset.Charset"
] | import java.nio.charset.Charset; | import java.nio.charset.*; | [
"java.nio"
] | java.nio; | 93,013 |
//-----------------------------------------------------------------------
public final MetaProperty<ObjectId> objectId() {
return _objectId;
} | final MetaProperty<ObjectId> function() { return _objectId; } | /**
* The meta-property for the {@code objectId} property.
* @return the meta-property, not null
*/ | The meta-property for the objectId property | objectId | {
"repo_name": "jeorme/OG-Platform",
"path": "projects/OG-Master/src/main/java/com/opengamma/master/user/RoleEventHistoryRequest.java",
"license": "apache-2.0",
"size": 8579
} | [
"com.opengamma.id.ObjectId",
"org.joda.beans.MetaProperty"
] | import com.opengamma.id.ObjectId; import org.joda.beans.MetaProperty; | import com.opengamma.id.*; import org.joda.beans.*; | [
"com.opengamma.id",
"org.joda.beans"
] | com.opengamma.id; org.joda.beans; | 2,886,542 |
public static Comparator<MonetaryAmount> sortNumber(){
return NUMBER_COMPARATOR;
} | static Comparator<MonetaryAmount> function(){ return NUMBER_COMPARATOR; } | /**
* Get a comparator for sorting amount by number value ascending.
* @return the Comparator to sort by number in ascending way, not null.
*/ | Get a comparator for sorting amount by number value ascending | sortNumber | {
"repo_name": "JavaMoney/jsr354-ri-bp",
"path": "src/main/java/org/javamoney/moneta/function/MonetaryFunctions.java",
"license": "apache-2.0",
"size": 7202
} | [
"java.util.Comparator",
"javax.money.MonetaryAmount"
] | import java.util.Comparator; import javax.money.MonetaryAmount; | import java.util.*; import javax.money.*; | [
"java.util",
"javax.money"
] | java.util; javax.money; | 1,439,071 |
public static void info(final String tag, final String message) {
if (Logger.isInfoLogsEnabled()) {
Log.i(tag, message);
}
}
/**
* Send a {@link Log#INFO} log message.
*
* @param tag Used to identify the source of a log message. It usually identifies the class or activity where
* the
* log call occurs.
* @param format the format string {@link String#format(String, Object...)} | static void function(final String tag, final String message) { if (Logger.isInfoLogsEnabled()) { Log.i(tag, message); } } /** * Send a {@link Log#INFO} log message. * * @param tag Used to identify the source of a log message. It usually identifies the class or activity where * the * log call occurs. * @param format the format string {@link String#format(String, Object...)} | /**
* Send a {@link Log#INFO} log message.
*
* @param tag Used to identify the source of a log message. It usually identifies the class or activity where
* the
* log call occurs.
* @param message The message to be logged
*/ | Send a <code>Log#INFO</code> log message | info | {
"repo_name": "remelpugh/android-shared",
"path": "Android Shared/src/main/java/com/dabay6/libraries/androidshared/logging/Logger.java",
"license": "mit",
"size": 12177
} | [
"android.util.Log"
] | import android.util.Log; | import android.util.*; | [
"android.util"
] | android.util; | 953,142 |
public void setGreen(final int green) throws BioAssayRuntimeException {
testIndex();
if (this.greens == null)
throw new BioAssayRuntimeException(BioAssayRuntimeException.NULL_POINTER,
"this field doesn't exist in this bioAssay");
this.greens[this.index] = green;
} | void function(final int green) throws BioAssayRuntimeException { testIndex(); if (this.greens == null) throw new BioAssayRuntimeException(BioAssayRuntimeException.NULL_POINTER, STR); this.greens[this.index] = green; } | /**
* Set the green value.
* @param green The green data to set
* @throws BioAssayRuntimeException if greens is null or if greens size if
* wrong
*/ | Set the green value | setGreen | {
"repo_name": "GenomicParisCentre/nividic",
"path": "src/main/java/fr/ens/transcriptome/nividic/om/impl/SpotImpl.java",
"license": "lgpl-2.1",
"size": 22875
} | [
"fr.ens.transcriptome.nividic.om.BioAssayRuntimeException"
] | import fr.ens.transcriptome.nividic.om.BioAssayRuntimeException; | import fr.ens.transcriptome.nividic.om.*; | [
"fr.ens.transcriptome"
] | fr.ens.transcriptome; | 2,356,753 |
try {
LaunchEnvironment env = new LaunchEnvironment(args);
(new ControlTopology(env)).setup();
} catch (Exception e) {
System.exit(handleLaunchException(e));
}
} | try { LaunchEnvironment env = new LaunchEnvironment(args); (new ControlTopology(env)).setup(); } catch (Exception e) { System.exit(handleLaunchException(e)); } } | /**
* Topology uploader.
*/ | Topology uploader | main | {
"repo_name": "jonvestal/open-kilda",
"path": "src-java/server42/server42-control-storm-topology/src/main/java/org/openkilda/server42/control/topology/ControlTopology.java",
"license": "apache-2.0",
"size": 5248
} | [
"org.openkilda.wfm.LaunchEnvironment"
] | import org.openkilda.wfm.LaunchEnvironment; | import org.openkilda.wfm.*; | [
"org.openkilda.wfm"
] | org.openkilda.wfm; | 1,715,405 |
public void setNominalColor (Color nominalColor) { this.nominalColor = nominalColor; } | public void setNominalColor (Color nominalColor) { this.nominalColor = nominalColor; } | /** Returns the default Color that is assigned to this link for visualization
* @return see above
*/ | Returns the default Color that is assigned to this link for visualization | getNominalColor | {
"repo_name": "girtel/Net2Plan",
"path": "Net2Plan-Core/src/main/java/com/net2plan/interfaces/networkDesign/Link.java",
"license": "bsd-2-clause",
"size": 70510
} | [
"java.awt.Color"
] | import java.awt.Color; | import java.awt.*; | [
"java.awt"
] | java.awt; | 2,678,226 |
public Path getClasspath() {
return classpath;
} | Path function() { return classpath; } | /**
* Gets the classpath.
*
* @return Returns a Path
*/ | Gets the classpath | getClasspath | {
"repo_name": "komalsukhani/debian-groovy2",
"path": "subprojects/groovy-ant/src/main/java/org/codehaus/groovy/ant/Groovy.java",
"license": "apache-2.0",
"size": 22870
} | [
"org.apache.tools.ant.types.Path"
] | import org.apache.tools.ant.types.Path; | import org.apache.tools.ant.types.*; | [
"org.apache.tools"
] | org.apache.tools; | 2,562,712 |
private void updateNPCMovement(PacketBuilder packet, NPC npc) {
if (npc.getSprites().getSecondarySprite() == -1) {
if (npc.getSprites().getPrimarySprite() == -1) {
if (npc.getUpdateFlags().isUpdateRequired()) {
packet.putBits(1, 1);
packet.putBits(2, 0);
} else {
packet.putBits(1, 0);
}
} else {
packet.putBits(1, 1);
packet.putBits(2, 1);
packet.putBits(3, npc.getSprites().getPrimarySprite());
packet.putBits(1, npc.getUpdateFlags().isUpdateRequired() ? 1 : 0);
}
} else {
packet.putBits(1, 1);
packet.putBits(2, 2);
packet.putBits(3, npc.getSprites().getPrimarySprite());
packet.putBits(3, npc.getSprites().getSecondarySprite());
packet.putBits(1, npc.getUpdateFlags().isUpdateRequired() ? 1 : 0);
}
}
| void function(PacketBuilder packet, NPC npc) { if (npc.getSprites().getSecondarySprite() == -1) { if (npc.getSprites().getPrimarySprite() == -1) { if (npc.getUpdateFlags().isUpdateRequired()) { packet.putBits(1, 1); packet.putBits(2, 0); } else { packet.putBits(1, 0); } } else { packet.putBits(1, 1); packet.putBits(2, 1); packet.putBits(3, npc.getSprites().getPrimarySprite()); packet.putBits(1, npc.getUpdateFlags().isUpdateRequired() ? 1 : 0); } } else { packet.putBits(1, 1); packet.putBits(2, 2); packet.putBits(3, npc.getSprites().getPrimarySprite()); packet.putBits(3, npc.getSprites().getSecondarySprite()); packet.putBits(1, npc.getUpdateFlags().isUpdateRequired() ? 1 : 0); } } | /**
* Update an NPC's movement.
*
* @param packet
* The main packet.
* @param npc
* The npc.
*/ | Update an NPC's movement | updateNPCMovement | {
"repo_name": "kewle003/hyperion",
"path": "src/org/rs2server/rs2/task/impl/NPCUpdateTask.java",
"license": "mit",
"size": 8594
} | [
"org.rs2server.rs2.net.PacketBuilder"
] | import org.rs2server.rs2.net.PacketBuilder; | import org.rs2server.rs2.net.*; | [
"org.rs2server.rs2"
] | org.rs2server.rs2; | 118,266 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.