method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
private void processRuntimeDeps(JavaTargetAttributes.Builder attributes) {
List<TransitiveInfoCollection> runtimeDepInfo = getRuntimeDeps(ruleContext);
checkRuntimeDeps(ruleContext, runtimeDepInfo);
JavaCompilationArgs args = JavaCompilationArgs.builder()
.addTransitiveTargets(runtimeDepInfo, true, ClasspathType.RUNTIME_ONLY)
.build();
attributes.addRuntimeClassPathEntries(args.getRuntimeJars());
attributes.addInstrumentationMetadataEntries(args.getInstrumentationMetadata());
} | void function(JavaTargetAttributes.Builder attributes) { List<TransitiveInfoCollection> runtimeDepInfo = getRuntimeDeps(ruleContext); checkRuntimeDeps(ruleContext, runtimeDepInfo); JavaCompilationArgs args = JavaCompilationArgs.builder() .addTransitiveTargets(runtimeDepInfo, true, ClasspathType.RUNTIME_ONLY) .build(); attributes.addRuntimeClassPathEntries(args.getRuntimeJars()); attributes.addInstrumentationMetadataEntries(args.getInstrumentationMetadata()); } | /**
* Processes the transitive runtime_deps of this target.
*/ | Processes the transitive runtime_deps of this target | processRuntimeDeps | {
"repo_name": "whuwxl/bazel",
"path": "src/main/java/com/google/devtools/build/lib/rules/java/JavaCommon.java",
"license": "apache-2.0",
"size": 33851
} | [
"com.google.common.collect.ImmutableList",
"com.google.devtools.build.lib.analysis.TransitiveInfoCollection",
"com.google.devtools.build.lib.rules.java.JavaCompilationArgs",
"java.util.List"
] | import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.rules.java.JavaCompilationArgs; import java.util.List; | import com.google.common.collect.*; import com.google.devtools.build.lib.analysis.*; import com.google.devtools.build.lib.rules.java.*; import java.util.*; | [
"com.google.common",
"com.google.devtools",
"java.util"
] | com.google.common; com.google.devtools; java.util; | 2,548,858 |
public void setPermissionEntityCache(SimpleCache<Serializable, Object> permissionEntityCache)
{
this.permissionEntityCache = new EntityLookupCache<Long, PermissionEntity, PermissionEntity>(
permissionEntityCache,
CACHE_REGION_PERMISSION,
permissionEntityDaoCallback);
}
public AbstractAclCrudDAOImpl()
{
this.aclEntityDaoCallback = new AclEntityCallbackDAO();
this.aclEntityCache = new EntityLookupCache<Long, AclEntity, Serializable>(aclEntityDaoCallback);
this.authorityEntityDaoCallback = new AuthorityEntityCallbackDAO();
this.authorityEntityCache = new EntityLookupCache<Long, AuthorityEntity, String>(authorityEntityDaoCallback);
this.permissionEntityDaoCallback = new PermissionEntityCallbackDAO();
this.permissionEntityCache = new EntityLookupCache<Long, PermissionEntity, PermissionEntity>(permissionEntityDaoCallback);
}
//
// Access Control List (ACL)
//
| void function(SimpleCache<Serializable, Object> permissionEntityCache) { this.permissionEntityCache = new EntityLookupCache<Long, PermissionEntity, PermissionEntity>( permissionEntityCache, CACHE_REGION_PERMISSION, permissionEntityDaoCallback); } public AbstractAclCrudDAOImpl() { this.aclEntityDaoCallback = new AclEntityCallbackDAO(); this.aclEntityCache = new EntityLookupCache<Long, AclEntity, Serializable>(aclEntityDaoCallback); this.authorityEntityDaoCallback = new AuthorityEntityCallbackDAO(); this.authorityEntityCache = new EntityLookupCache<Long, AuthorityEntity, String>(authorityEntityDaoCallback); this.permissionEntityDaoCallback = new PermissionEntityCallbackDAO(); this.permissionEntityCache = new EntityLookupCache<Long, PermissionEntity, PermissionEntity>(permissionEntityDaoCallback); } | /**
* Set the cache to use for <b>alf_permission</b> lookups (optional).
*
* @param permissionEntityCache the cache of IDs to PermissionEntities
*/ | Set the cache to use for alf_permission lookups (optional) | setPermissionEntityCache | {
"repo_name": "Alfresco/alfresco-repository",
"path": "src/main/java/org/alfresco/repo/domain/permissions/AbstractAclCrudDAOImpl.java",
"license": "lgpl-3.0",
"size": 38060
} | [
"java.io.Serializable",
"org.alfresco.repo.cache.SimpleCache",
"org.alfresco.repo.cache.lookup.EntityLookupCache"
] | import java.io.Serializable; import org.alfresco.repo.cache.SimpleCache; import org.alfresco.repo.cache.lookup.EntityLookupCache; | import java.io.*; import org.alfresco.repo.cache.*; import org.alfresco.repo.cache.lookup.*; | [
"java.io",
"org.alfresco.repo"
] | java.io; org.alfresco.repo; | 2,072,914 |
public ServiceCall getAllWithValuesAsync(String localStringPath, String pathItemStringPath, final ServiceCallback<Void> serviceCallback) throws IllegalArgumentException {
if (serviceCallback == null) {
throw new IllegalArgumentException("ServiceCallback is required for async calls.");
}
if (localStringPath == null) {
serviceCallback.failure(new IllegalArgumentException("Parameter localStringPath is required and cannot be null."));
return null;
}
if (pathItemStringPath == null) {
serviceCallback.failure(new IllegalArgumentException("Parameter pathItemStringPath is required and cannot be null."));
return null;
}
if (this.client.getGlobalStringPath() == null) {
serviceCallback.failure(new IllegalArgumentException("Parameter this.client.getGlobalStringPath() is required and cannot be null."));
return null;
} | ServiceCall function(String localStringPath, String pathItemStringPath, final ServiceCallback<Void> serviceCallback) throws IllegalArgumentException { if (serviceCallback == null) { throw new IllegalArgumentException(STR); } if (localStringPath == null) { serviceCallback.failure(new IllegalArgumentException(STR)); return null; } if (pathItemStringPath == null) { serviceCallback.failure(new IllegalArgumentException(STR)); return null; } if (this.client.getGlobalStringPath() == null) { serviceCallback.failure(new IllegalArgumentException(STR)); return null; } | /**
* send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'.
*
* @param localStringPath should contain value 'localStringPath'
* @param pathItemStringPath A string value 'pathItemStringPath' that appears in the path
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if callback is null
* @return the {@link Call} object
*/ | send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery' | getAllWithValuesAsync | {
"repo_name": "stankovski/AutoRest",
"path": "AutoRest/Generators/Java/Java.Tests/src/main/java/fixtures/url/PathItemsOperationsImpl.java",
"license": "mit",
"size": 40701
} | [
"com.microsoft.rest.ServiceCall",
"com.microsoft.rest.ServiceCallback"
] | import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback; | import com.microsoft.rest.*; | [
"com.microsoft.rest"
] | com.microsoft.rest; | 2,324,892 |
protected void usage() {
PrintStream out = System.out;
out.println("cacheserver start [-J<vmarg>]* [<attName>=<attValue>]* [-dir=<workingdir>] [-classpath=<classpath>] [-disable-default-server] [-rebalance] [-lock-memory] [-server-port=<server-port>] [-server-bind-address=<server-bind-address>] [-critical-heap-percentage=<critical-heap-percentage>] [-eviction-heap-percentage=<eviction-heap-percentage>] [-critical-off-heap-percentage=<critical-off-heap-percentage>] [-eviction-off-heap-percentage=<eviction-off-heap-percentage>]\n" );
out.println("\t" + LocalizedStrings.CacheServerLauncher_STARTS_A_GEMFIRE_CACHESERVER_VM.toLocalizedString() );
out.println("\t" + LocalizedStrings.CacheServerLauncher_VMARG.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_DIR.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_CLASSPATH.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_ATTNAME.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_REBALANCE.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_DISABLE_DEFAULT_SERVER.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_SERVER_PORT.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_SERVER_BIND_ADDRESS.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_CRITICAL_HEAP_PERCENTAGE.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_EVICTION_HEAP_PERCENTAGE.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_CRITICAL_OFF_HEAP_PERCENTAGE.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_EVICTION_OFF_HEAP_PERCENTAGE.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_LOCK_MEMORY.toLocalizedString());
out.println();
out.println( "cacheserver stop [-dir=<workingdir>]" );
out.println("\t" + LocalizedStrings.CacheServerLauncher_STOPS_A_GEMFIRE_CACHESERVER_VM.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_DIR.toLocalizedString());
out.println();
out.println( "cacheserver status [-dir=<workingdir>]" );
out.println( "\t" + LocalizedStrings.CacheServerLauncher_STATUS.toLocalizedString());
out.println("\t" + LocalizedStrings.CacheServerLauncher_DIR.toLocalizedString());
} | void function() { PrintStream out = System.out; out.println(STR ); out.println("\t" + LocalizedStrings.CacheServerLauncher_STARTS_A_GEMFIRE_CACHESERVER_VM.toLocalizedString() ); out.println("\t" + LocalizedStrings.CacheServerLauncher_VMARG.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_DIR.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_CLASSPATH.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_ATTNAME.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_REBALANCE.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_DISABLE_DEFAULT_SERVER.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_SERVER_PORT.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_SERVER_BIND_ADDRESS.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_CRITICAL_HEAP_PERCENTAGE.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_EVICTION_HEAP_PERCENTAGE.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_CRITICAL_OFF_HEAP_PERCENTAGE.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_EVICTION_OFF_HEAP_PERCENTAGE.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_LOCK_MEMORY.toLocalizedString()); out.println(); out.println( STR ); out.println("\t" + LocalizedStrings.CacheServerLauncher_STOPS_A_GEMFIRE_CACHESERVER_VM.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_DIR.toLocalizedString()); out.println(); out.println( STR ); out.println( "\t" + LocalizedStrings.CacheServerLauncher_STATUS.toLocalizedString()); out.println("\t" + LocalizedStrings.CacheServerLauncher_DIR.toLocalizedString()); } | /**
* Prints usage information about this program.
*/ | Prints usage information about this program | usage | {
"repo_name": "robertgeiger/incubator-geode",
"path": "gemfire-core/src/main/java/com/gemstone/gemfire/internal/cache/CacheServerLauncher.java",
"license": "apache-2.0",
"size": 49471
} | [
"com.gemstone.gemfire.internal.i18n.LocalizedStrings",
"java.io.PrintStream"
] | import com.gemstone.gemfire.internal.i18n.LocalizedStrings; import java.io.PrintStream; | import com.gemstone.gemfire.internal.i18n.*; import java.io.*; | [
"com.gemstone.gemfire",
"java.io"
] | com.gemstone.gemfire; java.io; | 131,869 |
@Lock
RepoResource saveResourceInTransaction(StoringRepo repo, SaveResourceContext saveContext)
throws IOException, RepoRejectException; | RepoResource saveResourceInTransaction(StoringRepo repo, SaveResourceContext saveContext) throws IOException, RepoRejectException; | /**
* Internal, transactional method to save a resource. Don't use directly.
*
* @see InternalRepositoryService#saveResource(org.artifactory.repo.StoringRepo, org.artifactory.repo.SaveResourceContext)
*/ | Internal, transactional method to save a resource. Don't use directly | saveResourceInTransaction | {
"repo_name": "alancnet/artifactory",
"path": "backend/core/src/main/java/org/artifactory/repo/service/InternalRepositoryService.java",
"license": "apache-2.0",
"size": 9146
} | [
"java.io.IOException",
"org.artifactory.api.repo.exception.RepoRejectException",
"org.artifactory.fs.RepoResource",
"org.artifactory.repo.SaveResourceContext",
"org.artifactory.repo.StoringRepo"
] | import java.io.IOException; import org.artifactory.api.repo.exception.RepoRejectException; import org.artifactory.fs.RepoResource; import org.artifactory.repo.SaveResourceContext; import org.artifactory.repo.StoringRepo; | import java.io.*; import org.artifactory.api.repo.exception.*; import org.artifactory.fs.*; import org.artifactory.repo.*; | [
"java.io",
"org.artifactory.api",
"org.artifactory.fs",
"org.artifactory.repo"
] | java.io; org.artifactory.api; org.artifactory.fs; org.artifactory.repo; | 1,027,710 |
public void setTagclass(String tagClassName)
throws ConfigException, InstantiationException, IllegalAccessException
{
setTagClass(tagClassName);
} | void function(String tagClassName) throws ConfigException, InstantiationException, IllegalAccessException { setTagClass(tagClassName); } | /**
* Sets the tei class
*/ | Sets the tei class | setTagclass | {
"repo_name": "dlitz/resin",
"path": "modules/resin/src/com/caucho/jsp/cfg/TldTag.java",
"license": "gpl-2.0",
"size": 12809
} | [
"com.caucho.config.ConfigException"
] | import com.caucho.config.ConfigException; | import com.caucho.config.*; | [
"com.caucho.config"
] | com.caucho.config; | 126,100 |
@Test(expected = NullPointerException.class)
public void testLocaleConstructorWithNullDateFormat() {
new ParseSqlTime(null, false, Locale.GERMAN);
}
| @Test(expected = NullPointerException.class) void function() { new ParseSqlTime(null, false, Locale.GERMAN); } | /**
* Tests construction (using the Locale constructor) with a null date format (should throw an Exception).
*/ | Tests construction (using the Locale constructor) with a null date format (should throw an Exception) | testLocaleConstructorWithNullDateFormat | {
"repo_name": "jamesbassett/super-csv",
"path": "super-csv/src/test/java/org/supercsv/cellprocessor/ParseSqlTimeTest.java",
"license": "apache-2.0",
"size": 5025
} | [
"java.util.Locale",
"org.junit.Test"
] | import java.util.Locale; import org.junit.Test; | import java.util.*; import org.junit.*; | [
"java.util",
"org.junit"
] | java.util; org.junit; | 2,772,867 |
public void removeGestureStateListener(GestureStateListener listener) {
mGestureStateListeners.removeObserver(listener);
} | void function(GestureStateListener listener) { mGestureStateListeners.removeObserver(listener); } | /**
* Removes a listener that was added to watch for gesture state changes.
* @param listener Listener to remove.
*/ | Removes a listener that was added to watch for gesture state changes | removeGestureStateListener | {
"repo_name": "hefen1/chromium",
"path": "content/public/android/java/src/org/chromium/content/browser/ContentViewCore.java",
"license": "bsd-3-clause",
"size": 125908
} | [
"org.chromium.content_public.browser.GestureStateListener"
] | import org.chromium.content_public.browser.GestureStateListener; | import org.chromium.content_public.browser.*; | [
"org.chromium.content_public"
] | org.chromium.content_public; | 2,099,871 |
@Test
public void testStarter_main() throws Exception {
// _Starter lies in the default package and thus needs to be accessed in
// a quirky way
Class<?> c = Class.forName("_Starter");
Method m = c.getMethod("main", new Class[] { String[].class });
String[] args = new String[] { "foo", "bar", "baz" };
m.invoke(null, new Object[] { args });
Assert.assertArrayEquals(args, last_args);
}
public static String[] last_args; | void function() throws Exception { Class<?> c = Class.forName(STR); Method m = c.getMethod("main", new Class[] { String[].class }); String[] args = new String[] { "foo", "bar", "baz" }; m.invoke(null, new Object[] { args }); Assert.assertArrayEquals(args, last_args); } public static String[] last_args; | /**
* Tests whether the _Starter calls the configured class' main method and
* properly propagates the program arguments.
*
* @throws Exception
*/ | Tests whether the _Starter calls the configured class' main method and properly propagates the program arguments | testStarter_main | {
"repo_name": "21Net/pkg-maven-plugin",
"path": "src/test/java/de/tarent/maven/plugins/pkg/_default/_StarterTest.java",
"license": "gpl-2.0",
"size": 1929
} | [
"java.lang.reflect.Method",
"org.junit.Assert"
] | import java.lang.reflect.Method; import org.junit.Assert; | import java.lang.reflect.*; import org.junit.*; | [
"java.lang",
"org.junit"
] | java.lang; org.junit; | 1,200,946 |
Map<String, String> getNSMap();
int CC_KEY = 1;
int CC_KEYREF = 2;
int CC_UNIQUE = 3; | Map<String, String> getNSMap(); int CC_KEY = 1; int CC_KEYREF = 2; int CC_UNIQUE = 3; | /**
* Return a read-only copy of the namespace map. This is the
* set of prefix to URI mappings that were in scope in the
* schema at the point at which this constraint was declared
*/ | Return a read-only copy of the namespace map. This is the set of prefix to URI mappings that were in scope in the schema at the point at which this constraint was declared | getNSMap | {
"repo_name": "apache/xmlbeans",
"path": "src/main/java/org/apache/xmlbeans/SchemaIdentityConstraint.java",
"license": "apache-2.0",
"size": 3012
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 721,349 |
Certificate getCertificate(String alias); | Certificate getCertificate(String alias); | /**
* Returns a certificate that has the given <code>alias</code>, or
* <code>null</code> if this keyring has no such entry.
*
* @param alias The alias of the certificate to find.
* @return The certificate with the designated <code>alias</code>, or
* <code>null</code> if none found.
*/ | Returns a certificate that has the given <code>alias</code>, or <code>null</code> if this keyring has no such entry | getCertificate | {
"repo_name": "shaotuanchen/sunflower_exp",
"path": "tools/source/gcc-4.2.4/libjava/classpath/gnu/javax/crypto/keyring/IPublicKeyring.java",
"license": "bsd-3-clause",
"size": 3082
} | [
"java.security.cert.Certificate"
] | import java.security.cert.Certificate; | import java.security.cert.*; | [
"java.security"
] | java.security; | 1,508,709 |
protected final void incrementCurrentValue(BigInteger by) {
currentValue = currentValue.add(by);
} | final void function(BigInteger by) { currentValue = currentValue.add(by); } | /**
* Increments the current value by a specified amount.
*
* @param by the amount to increment by
*/ | Increments the current value by a specified amount | incrementCurrentValue | {
"repo_name": "rmswimkktt/checkstyle",
"path": "src/main/java/com/puppycrawl/tools/checkstyle/checks/metrics/AbstractComplexityCheck.java",
"license": "lgpl-2.1",
"size": 5546
} | [
"java.math.BigInteger"
] | import java.math.BigInteger; | import java.math.*; | [
"java.math"
] | java.math; | 41,127 |
View getViewById(String viewId);
| View getViewById(String viewId); | /**
* Returns the View entry identified by the given id
*
* @param viewId - unique id for view
* @return View instance associated with the id
*/ | Returns the View entry identified by the given id | getViewById | {
"repo_name": "kuali/kc-rice",
"path": "rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/service/DataDictionaryService.java",
"license": "apache-2.0",
"size": 19553
} | [
"org.kuali.rice.krad.uif.view.View"
] | import org.kuali.rice.krad.uif.view.View; | import org.kuali.rice.krad.uif.view.*; | [
"org.kuali.rice"
] | org.kuali.rice; | 498,545 |
public static VisibilityLabelsResponse addLabels(Configuration conf, final String[] labels)
throws Throwable {
// TODO: Make it so caller passes in a Connection rather than have us do this expensive
// setup each time. This class only used in test and shell at moment though.
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Table table = connection.getTable(LABELS_TABLE_NAME)) {
Batch.Call<VisibilityLabelsService, VisibilityLabelsResponse> callable =
new Batch.Call<VisibilityLabelsService, VisibilityLabelsResponse>() {
ServerRpcController controller = new ServerRpcController();
BlockingRpcCallback<VisibilityLabelsResponse> rpcCallback =
new BlockingRpcCallback<VisibilityLabelsResponse>(); | static VisibilityLabelsResponse function(Configuration conf, final String[] labels) throws Throwable { try (Connection connection = ConnectionFactory.createConnection(conf)) { try (Table table = connection.getTable(LABELS_TABLE_NAME)) { Batch.Call<VisibilityLabelsService, VisibilityLabelsResponse> callable = new Batch.Call<VisibilityLabelsService, VisibilityLabelsResponse>() { ServerRpcController controller = new ServerRpcController(); BlockingRpcCallback<VisibilityLabelsResponse> rpcCallback = new BlockingRpcCallback<VisibilityLabelsResponse>(); | /**
* Utility method for adding labels to the system.
*
* @param conf
* @param labels
* @return VisibilityLabelsResponse
* @throws Throwable
*/ | Utility method for adding labels to the system | addLabels | {
"repo_name": "Guavus/hbase",
"path": "hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java",
"license": "apache-2.0",
"size": 12126
} | [
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.hbase.client.Connection",
"org.apache.hadoop.hbase.client.ConnectionFactory",
"org.apache.hadoop.hbase.client.Table",
"org.apache.hadoop.hbase.client.coprocessor.Batch",
"org.apache.hadoop.hbase.ipc.BlockingRpcCallback",
"org.apache.hadoop.hbase.ipc.ServerRpcController",
"org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos"
] | import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos; | import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.client.coprocessor.*; import org.apache.hadoop.hbase.ipc.*; import org.apache.hadoop.hbase.protobuf.generated.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 2,114,672 |
public final MetaProperty<CacheManager> cacheManager() {
return _cacheManager;
} | final MetaProperty<CacheManager> function() { return _cacheManager; } | /**
* The meta-property for the {@code cacheManager} property.
* @return the meta-property, not null
*/ | The meta-property for the cacheManager property | cacheManager | {
"repo_name": "McLeodMoores/starling",
"path": "projects/component/src/main/java/com/opengamma/component/factory/master/EHCachingPositionMasterComponentFactory.java",
"license": "apache-2.0",
"size": 14085
} | [
"net.sf.ehcache.CacheManager",
"org.joda.beans.MetaProperty"
] | import net.sf.ehcache.CacheManager; import org.joda.beans.MetaProperty; | import net.sf.ehcache.*; import org.joda.beans.*; | [
"net.sf.ehcache",
"org.joda.beans"
] | net.sf.ehcache; org.joda.beans; | 266,568 |
public PulseListBuilder withNextPulses(Collection<Long> pulses) {
pulses.stream().filter(Objects::nonNull).forEach(this::withNextPulse);
return this;
} | PulseListBuilder function(Collection<Long> pulses) { pulses.stream().filter(Objects::nonNull).forEach(this::withNextPulse); return this; } | /**
* Add a collection of non-null pulses with the specified durations in tstates.
* @param pulses a collection of pulses in 3,500,000 Hz tstates size
* @return this builder instance
* @throws IllegalArgumentException if pulse is null or not greater than or equal to 0
*/ | Add a collection of non-null pulses with the specified durations in tstates | withNextPulses | {
"repo_name": "fmeunier/wav2pzx",
"path": "src/main/java/xyz/meunier/wav2pzx/pulselist/PulseListBuilder.java",
"license": "bsd-2-clause",
"size": 4865
} | [
"java.util.Collection",
"java.util.Objects"
] | import java.util.Collection; import java.util.Objects; | import java.util.*; | [
"java.util"
] | java.util; | 1,128,885 |
public Object[] getPeerCertificateChain(boolean force)
throws IOException; | Object[] function(boolean force) throws IOException; | /**
* The client certificate chain (if any).
* @param force If <code>true</code>, then re-negotiate the
* connection if necessary.
*/ | The client certificate chain (if any) | getPeerCertificateChain | {
"repo_name": "whitingjr/JbossWeb_7_2_0",
"path": "src/main/java/org/apache/tomcat/util/net/SSLSupport.java",
"license": "apache-2.0",
"size": 3419
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 625,507 |
public GetUniverseGraphicsGraphicIdOk getUniverseGraphicsGraphicId(Integer graphicId, String datasource, String userAgent, String xUserAgent) throws ApiException {
ApiResponse<GetUniverseGraphicsGraphicIdOk> resp = getUniverseGraphicsGraphicIdWithHttpInfo(graphicId, datasource, userAgent, xUserAgent);
return resp.getData();
} | GetUniverseGraphicsGraphicIdOk function(Integer graphicId, String datasource, String userAgent, String xUserAgent) throws ApiException { ApiResponse<GetUniverseGraphicsGraphicIdOk> resp = getUniverseGraphicsGraphicIdWithHttpInfo(graphicId, datasource, userAgent, xUserAgent); return resp.getData(); } | /**
* Get graphic information
* Get information on a graphic --- Alternate route: `/v1/universe/graphics/{graphic_id}/` Alternate route: `/legacy/universe/graphics/{graphic_id}/` Alternate route: `/dev/universe/graphics/{graphic_id}/` --- This route expires daily at 11:05
* @param graphicId graphic_id integer (required)
* @param datasource The server name you would like data from (optional, default to tranquility)
* @param userAgent Client identifier, takes precedence over headers (optional)
* @param xUserAgent Client identifier, takes precedence over User-Agent (optional)
* @return GetUniverseGraphicsGraphicIdOk
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/ | Get graphic information Get information on a graphic --- Alternate route: `/v1/universe/graphics/{graphic_id}/` Alternate route: `/legacy/universe/graphics/{graphic_id}/` Alternate route: `/dev/universe/graphics/{graphic_id}/` --- This route expires daily at 11:05 | getUniverseGraphicsGraphicId | {
"repo_name": "Tmin10/EVE-Security-Service",
"path": "server-api/src/main/java/ru/tmin10/EVESecurityService/serverApi/api/UniverseApi.java",
"license": "gpl-3.0",
"size": 215688
} | [
"ru.tmin10.EVESecurityService"
] | import ru.tmin10.EVESecurityService; | import ru.tmin10.*; | [
"ru.tmin10"
] | ru.tmin10; | 1,873,883 |
private FileInfo getShardFileInfo(File file) {
FileInfo info = FileInfo.fromFile(file);
if (info == null) {
return null; // file with incorrect name/extension
}
File expectedDirectory = getSubdirectory(info.resourceId);
boolean isCorrect = expectedDirectory.equals(file.getParentFile());
return isCorrect ? info : null;
}
private static enum FileType {
CONTENT(CONTENT_FILE_EXTENSION),
TEMP(TEMP_FILE_EXTENSION);
public final String extension;
FileType(String extension) {
this.extension = extension;
} | FileInfo function(File file) { FileInfo info = FileInfo.fromFile(file); if (info == null) { return null; } File expectedDirectory = getSubdirectory(info.resourceId); boolean isCorrect = expectedDirectory.equals(file.getParentFile()); return isCorrect ? info : null; } private static enum FileType { CONTENT(CONTENT_FILE_EXTENSION), TEMP(TEMP_FILE_EXTENSION); public final String extension; FileType(String extension) { this.extension = extension; } | /**
* Checks that the file is placed in the correct shard according to its
* filename (and hence the represented key). If it's correct its FileInfo is returned.
* @param file the file to check
* @return the corresponding FileInfo object if shard is correct, null otherwise
*/ | Checks that the file is placed in the correct shard according to its filename (and hence the represented key). If it's correct its FileInfo is returned | getShardFileInfo | {
"repo_name": "desmond1121/fresco",
"path": "imagepipeline-base/src/main/java/com/facebook/cache/disk/DefaultDiskStorage.java",
"license": "bsd-3-clause",
"size": 23508
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 1,969,262 |
public static String getCorpusId(final Corpus corpus, final String accountId) {
Validator.notNull(corpus, "corpus cannot be null");
if (corpus.getId() != null) {
validate(CORPUS_ID_REGEX, corpus.getId(),
"Provide a valid corpus.id (format is " + '"' + "/corpora/{account_id}/{corpus} +" + '"' + ")");
return corpus.getId();
} else {
Validator.notNull(corpus.getName(), "corpus.name cannot be null");
return "/corpora/" + accountId + "/" + corpus.getName();
}
} | static String function(final Corpus corpus, final String accountId) { Validator.notNull(corpus, STR); if (corpus.getId() != null) { validate(CORPUS_ID_REGEX, corpus.getId(), STR + 'STR/corpora/{account_id}/{corpus} +" + 'STR)"); return corpus.getId(); } else { Validator.notNull(corpus.getName(), STR); return STR + accountId + "/" + corpus.getName(); } } | /**
* This method validate the id if it has been populated in the corpus object, otherwise it will
* generated it.
*
* @param corpus Corpus the corpus object,
* @param accountId String the account id.
* @return the corpus id
*/ | This method validate the id if it has been populated in the corpus object, otherwise it will generated it | getCorpusId | {
"repo_name": "m2fd/java-sdk",
"path": "src/main/java/com/ibm/watson/developer_cloud/concept_insights/v2/util/IDHelper.java",
"license": "apache-2.0",
"size": 4990
} | [
"com.ibm.watson.developer_cloud.concept_insights.v2.model.Corpus",
"com.ibm.watson.developer_cloud.util.Validator"
] | import com.ibm.watson.developer_cloud.concept_insights.v2.model.Corpus; import com.ibm.watson.developer_cloud.util.Validator; | import com.ibm.watson.developer_cloud.concept_insights.v2.model.*; import com.ibm.watson.developer_cloud.util.*; | [
"com.ibm.watson"
] | com.ibm.watson; | 1,818,564 |
private static RuntimeException getCause(InvocationTargetException e) {
Throwable cause = e.getCause();
if(cause instanceof RuntimeException)
throw (RuntimeException) cause;
else
throw new IllegalStateException(e.getCause());
} | static RuntimeException function(InvocationTargetException e) { Throwable cause = e.getCause(); if(cause instanceof RuntimeException) throw (RuntimeException) cause; else throw new IllegalStateException(e.getCause()); } | /**
* Get the root cause of the Exception
*
* @param e The Exception
* @return The root cause of the Exception
*/ | Get the root cause of the Exception | getCause | {
"repo_name": "azkaban/azkaban-legacy",
"path": "azkaban-common/src/java/azkaban/common/utils/Utils.java",
"license": "apache-2.0",
"size": 18562
} | [
"java.lang.reflect.InvocationTargetException"
] | import java.lang.reflect.InvocationTargetException; | import java.lang.reflect.*; | [
"java.lang"
] | java.lang; | 384,886 |
@Test
public void testDownloadingLaterCheckpoint() throws Exception {
// Roll edit logs a few times to inflate txid
nn0.getRpcServer().rollEditLog();
nn0.getRpcServer().rollEditLog();
// Make checkpoint
NameNodeAdapter.enterSafeMode(nn0, false);
NameNodeAdapter.saveNamespace(nn0);
NameNodeAdapter.leaveSafeMode(nn0);
long expectedCheckpointTxId = NameNodeAdapter.getNamesystem(nn0)
.getFSImage().getMostRecentCheckpointTxId();
assertEquals(6, expectedCheckpointTxId);
int rc = BootstrapStandby.run(
new String[]{"-force"},
cluster.getConfiguration(1));
assertEquals(0, rc);
// Should have copied over the namespace from the active
FSImageTestUtil.assertNNHasCheckpoints(cluster, 1,
ImmutableList.of((int)expectedCheckpointTxId));
FSImageTestUtil.assertNNFilesMatch(cluster);
// We should now be able to start the standby successfully.
cluster.restartNameNode(1);
} | void function() throws Exception { nn0.getRpcServer().rollEditLog(); nn0.getRpcServer().rollEditLog(); NameNodeAdapter.enterSafeMode(nn0, false); NameNodeAdapter.saveNamespace(nn0); NameNodeAdapter.leaveSafeMode(nn0); long expectedCheckpointTxId = NameNodeAdapter.getNamesystem(nn0) .getFSImage().getMostRecentCheckpointTxId(); assertEquals(6, expectedCheckpointTxId); int rc = BootstrapStandby.run( new String[]{STR}, cluster.getConfiguration(1)); assertEquals(0, rc); FSImageTestUtil.assertNNHasCheckpoints(cluster, 1, ImmutableList.of((int)expectedCheckpointTxId)); FSImageTestUtil.assertNNFilesMatch(cluster); cluster.restartNameNode(1); } | /**
* Test for downloading a checkpoint made at a later checkpoint
* from the active.
*/ | Test for downloading a checkpoint made at a later checkpoint from the active | testDownloadingLaterCheckpoint | {
"repo_name": "tomatoKiller/Hadoop_Source_Learn",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestBootstrapStandby.java",
"license": "apache-2.0",
"size": 7242
} | [
"com.google.common.collect.ImmutableList",
"org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil",
"org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter",
"org.junit.Assert"
] | import com.google.common.collect.ImmutableList; import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; import org.junit.Assert; | import com.google.common.collect.*; import org.apache.hadoop.hdfs.server.namenode.*; import org.junit.*; | [
"com.google.common",
"org.apache.hadoop",
"org.junit"
] | com.google.common; org.apache.hadoop; org.junit; | 1,001,555 |
protected List buildSearchResultList(Collection searchResultsCollection, Long actualSize) {
CollectionIncomplete results = new CollectionIncomplete(searchResultsCollection, actualSize);
// sort list if default sort column given
List searchResults = (List) results;
List defaultSortColumns = getDefaultSortColumns();
if (defaultSortColumns.size() > 0) {
Collections.sort(results, new BeanPropertyComparator(defaultSortColumns, true));
}
return searchResults;
}
| List function(Collection searchResultsCollection, Long actualSize) { CollectionIncomplete results = new CollectionIncomplete(searchResultsCollection, actualSize); List searchResults = (List) results; List defaultSortColumns = getDefaultSortColumns(); if (defaultSortColumns.size() > 0) { Collections.sort(results, new BeanPropertyComparator(defaultSortColumns, true)); } return searchResults; } | /**
* build the search result list from the given collection and the number of all qualified search results
*
* @param searchResultsCollection the given search results, which may be a subset of the qualified search results
* @param actualSize the number of all qualified search results
* @return the serach result list with the given results and actual size
*/ | build the search result list from the given collection and the number of all qualified search results | buildSearchResultList | {
"repo_name": "ua-eas/ua-kfs-5.3",
"path": "work/src/org/kuali/kfs/module/ar/businessobject/lookup/CustomerInvoiceWriteoffLookupResultLookupableHelperServiceImpl.java",
"license": "agpl-3.0",
"size": 13759
} | [
"java.util.Collection",
"java.util.Collections",
"java.util.List",
"org.kuali.rice.krad.lookup.CollectionIncomplete",
"org.kuali.rice.krad.util.BeanPropertyComparator"
] | import java.util.Collection; import java.util.Collections; import java.util.List; import org.kuali.rice.krad.lookup.CollectionIncomplete; import org.kuali.rice.krad.util.BeanPropertyComparator; | import java.util.*; import org.kuali.rice.krad.lookup.*; import org.kuali.rice.krad.util.*; | [
"java.util",
"org.kuali.rice"
] | java.util; org.kuali.rice; | 1,247,210 |
List findByExample(Object exampleEntity) throws DataAccessException; | List findByExample(Object exampleEntity) throws DataAccessException; | /**
* Execute a query based on the given example entity object.
* @param exampleEntity an instance of the desired entity,
* serving as example for "query-by-example"
* @return a {@link List} containing 0 or more persistent instances
* @throws org.springframework.dao.DataAccessException in case of Hibernate errors
* @see org.hibernate.criterion.Example#create(Object)
*/ | Execute a query based on the given example entity object | findByExample | {
"repo_name": "cbeams-archive/spring-framework-2.5.x",
"path": "src/org/springframework/orm/hibernate3/HibernateOperations.java",
"license": "apache-2.0",
"size": 44752
} | [
"java.util.List",
"org.springframework.dao.DataAccessException"
] | import java.util.List; import org.springframework.dao.DataAccessException; | import java.util.*; import org.springframework.dao.*; | [
"java.util",
"org.springframework.dao"
] | java.util; org.springframework.dao; | 122,149 |
public static String getServerScheme() {
if (null == serverScheme) {
serverScheme = PropsUtil.getProperty("serverScheme");
if (null == serverScheme) {
throw new IllegalStateException("latke.properties [serverScheme] is empty");
}
}
return serverScheme;
} | static String function() { if (null == serverScheme) { serverScheme = PropsUtil.getProperty(STR); if (null == serverScheme) { throw new IllegalStateException(STR); } } return serverScheme; } | /**
* Gets server scheme.
*
* <p>
* Returns the value of "serverScheme" property in latke.properties.
* </p>
*
* @return server scheme
*/ | Gets server scheme. Returns the value of "serverScheme" property in latke.properties. | getServerScheme | {
"repo_name": "daima/solo-spring",
"path": "src/main/java/org/b3log/solo/Latkes.java",
"license": "apache-2.0",
"size": 22440
} | [
"org.b3log.solo.util.PropsUtil"
] | import org.b3log.solo.util.PropsUtil; | import org.b3log.solo.util.*; | [
"org.b3log.solo"
] | org.b3log.solo; | 1,795,002 |
protected Size2D arrangeRR(Graphics2D g2, Range widthRange,
Range heightRange) {
RectangleEdge position = getPosition();
if (position == RectangleEdge.TOP || position == RectangleEdge.BOTTOM) {
float maxWidth = (float) widthRange.getUpperBound();
g2.setFont(this.font);
this.content = TextUtilities.createTextBlock(this.text, this.font,
this.paint, maxWidth, new G2TextMeasurer(g2));
this.content.setLineAlignment(this.textAlignment);
Size2D contentSize = this.content.calculateDimensions(g2);
if (this.expandToFitSpace) {
return new Size2D(maxWidth, contentSize.getHeight());
}
else {
return contentSize;
}
}
else if (position == RectangleEdge.LEFT || position
== RectangleEdge.RIGHT) {
float maxWidth = (float) heightRange.getUpperBound();
g2.setFont(this.font);
this.content = TextUtilities.createTextBlock(this.text, this.font,
this.paint, maxWidth, new G2TextMeasurer(g2));
this.content.setLineAlignment(this.textAlignment);
Size2D contentSize = this.content.calculateDimensions(g2);
// transpose the dimensions, because the title is rotated
if (this.expandToFitSpace) {
return new Size2D(contentSize.getHeight(), maxWidth);
}
else {
return new Size2D(contentSize.height, contentSize.width);
}
}
else {
throw new RuntimeException("Unrecognised exception.");
}
}
| Size2D function(Graphics2D g2, Range widthRange, Range heightRange) { RectangleEdge position = getPosition(); if (position == RectangleEdge.TOP position == RectangleEdge.BOTTOM) { float maxWidth = (float) widthRange.getUpperBound(); g2.setFont(this.font); this.content = TextUtilities.createTextBlock(this.text, this.font, this.paint, maxWidth, new G2TextMeasurer(g2)); this.content.setLineAlignment(this.textAlignment); Size2D contentSize = this.content.calculateDimensions(g2); if (this.expandToFitSpace) { return new Size2D(maxWidth, contentSize.getHeight()); } else { return contentSize; } } else if (position == RectangleEdge.LEFT position == RectangleEdge.RIGHT) { float maxWidth = (float) heightRange.getUpperBound(); g2.setFont(this.font); this.content = TextUtilities.createTextBlock(this.text, this.font, this.paint, maxWidth, new G2TextMeasurer(g2)); this.content.setLineAlignment(this.textAlignment); Size2D contentSize = this.content.calculateDimensions(g2); if (this.expandToFitSpace) { return new Size2D(contentSize.getHeight(), maxWidth); } else { return new Size2D(contentSize.height, contentSize.width); } } else { throw new RuntimeException(STR); } } | /**
* Returns the content size for the title. This will reflect the fact that
* a text title positioned on the left or right of a chart will be rotated
* 90 degrees.
*
* @param g2 the graphics device.
* @param widthRange the width range.
* @param heightRange the height range.
*
* @return The content size.
*/ | Returns the content size for the title. This will reflect the fact that a text title positioned on the left or right of a chart will be rotated 90 degrees | arrangeRR | {
"repo_name": "SpoonLabs/astor",
"path": "examples/chart_11/source/org/jfree/chart/title/TextTitle.java",
"license": "gpl-2.0",
"size": 28589
} | [
"java.awt.Graphics2D",
"org.jfree.chart.text.G2TextMeasurer",
"org.jfree.chart.text.TextUtilities",
"org.jfree.chart.util.RectangleEdge",
"org.jfree.chart.util.Size2D",
"org.jfree.data.Range"
] | import java.awt.Graphics2D; import org.jfree.chart.text.G2TextMeasurer; import org.jfree.chart.text.TextUtilities; import org.jfree.chart.util.RectangleEdge; import org.jfree.chart.util.Size2D; import org.jfree.data.Range; | import java.awt.*; import org.jfree.chart.text.*; import org.jfree.chart.util.*; import org.jfree.data.*; | [
"java.awt",
"org.jfree.chart",
"org.jfree.data"
] | java.awt; org.jfree.chart; org.jfree.data; | 2,036,022 |
@Test
public void testJavaFields() {
InternalSerializationService ss = getSerializationService();
JavaFields object = new JavaFields();
MapSampleMetadata metadata = MapSampleMetadataResolver.resolve(ss, jetMapMetadataResolver, object, true);
checkFields(
metadata,
field("publicFieldBase", QueryDataType.INT, true),
field("publicField", QueryDataType.INT, true),
hiddenField(KEY, QueryDataType.OBJECT, true)
);
metadata = MapSampleMetadataResolver.resolve(ss, jetMapMetadataResolver, ss.toData(object), true);
checkFields(
metadata,
field("publicFieldBase", QueryDataType.INT, true),
field("publicField", QueryDataType.INT, true),
hiddenField(KEY, QueryDataType.OBJECT, true)
);
} | void function() { InternalSerializationService ss = getSerializationService(); JavaFields object = new JavaFields(); MapSampleMetadata metadata = MapSampleMetadataResolver.resolve(ss, jetMapMetadataResolver, object, true); checkFields( metadata, field(STR, QueryDataType.INT, true), field(STR, QueryDataType.INT, true), hiddenField(KEY, QueryDataType.OBJECT, true) ); metadata = MapSampleMetadataResolver.resolve(ss, jetMapMetadataResolver, ss.toData(object), true); checkFields( metadata, field(STR, QueryDataType.INT, true), field(STR, QueryDataType.INT, true), hiddenField(KEY, QueryDataType.OBJECT, true) ); } | /**
* Test Java fields.
*/ | Test Java fields | testJavaFields | {
"repo_name": "mdogan/hazelcast",
"path": "hazelcast/src/test/java/com/hazelcast/sql/impl/schema/map/sample/MapSampleMetadataResolverTest.java",
"license": "apache-2.0",
"size": 31227
} | [
"com.hazelcast.internal.serialization.InternalSerializationService",
"com.hazelcast.sql.impl.type.QueryDataType"
] | import com.hazelcast.internal.serialization.InternalSerializationService; import com.hazelcast.sql.impl.type.QueryDataType; | import com.hazelcast.internal.serialization.*; import com.hazelcast.sql.impl.type.*; | [
"com.hazelcast.internal",
"com.hazelcast.sql"
] | com.hazelcast.internal; com.hazelcast.sql; | 2,876,601 |
private long updateCheckpoints(boolean recovery)
{
int operatorCount = 0;
UpdateCheckpointsContext ctx = new UpdateCheckpointsContext(clock, recovery, getCheckpointGroups());
for (OperatorMeta logicalOperator : plan.getLogicalPlan().getRootOperators()) {
List<PTOperator> operators = plan.getOperators(logicalOperator);
if (operators != null) {
for (PTOperator operator : operators) {
operatorCount++;
updateRecoveryCheckpoints(operator, ctx, recovery);
}
}
}
// if no physical operators are available, then don't update committedWindowId
if (operatorCount == 0) {
return committedWindowId;
}
purgeCheckpoints();
for (PTOperator oper : ctx.blocked) {
String containerId = oper.getContainer().getExternalId();
if (containerId != null) {
LOG.info("Blocked operator {} container {} time {}ms", oper, oper.getContainer().toIdStateString(), ctx.currentTms - oper.stats.lastWindowIdChangeTms);
this.containerStopRequests.put(containerId, containerId);
}
}
return ctx.committedWindowId.longValue();
} | long function(boolean recovery) { int operatorCount = 0; UpdateCheckpointsContext ctx = new UpdateCheckpointsContext(clock, recovery, getCheckpointGroups()); for (OperatorMeta logicalOperator : plan.getLogicalPlan().getRootOperators()) { List<PTOperator> operators = plan.getOperators(logicalOperator); if (operators != null) { for (PTOperator operator : operators) { operatorCount++; updateRecoveryCheckpoints(operator, ctx, recovery); } } } if (operatorCount == 0) { return committedWindowId; } purgeCheckpoints(); for (PTOperator oper : ctx.blocked) { String containerId = oper.getContainer().getExternalId(); if (containerId != null) { LOG.info(STR, oper, oper.getContainer().toIdStateString(), ctx.currentTms - oper.stats.lastWindowIdChangeTms); this.containerStopRequests.put(containerId, containerId); } } return ctx.committedWindowId.longValue(); } | /**
* Visit all operators to update current checkpoint based on updated downstream state.
* Purge older checkpoints that are no longer needed.
*/ | Visit all operators to update current checkpoint based on updated downstream state. Purge older checkpoints that are no longer needed | updateCheckpoints | {
"repo_name": "mattqzhang/apex-core",
"path": "engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java",
"license": "apache-2.0",
"size": 136954
} | [
"com.datatorrent.stram.plan.logical.LogicalPlan",
"com.datatorrent.stram.plan.physical.PTOperator",
"java.util.List"
] | import com.datatorrent.stram.plan.logical.LogicalPlan; import com.datatorrent.stram.plan.physical.PTOperator; import java.util.List; | import com.datatorrent.stram.plan.logical.*; import com.datatorrent.stram.plan.physical.*; import java.util.*; | [
"com.datatorrent.stram",
"java.util"
] | com.datatorrent.stram; java.util; | 487,632 |
public long lastModified() throws IOException {
long lastModified = getFileForLastModifiedCheck().lastModified();
if (lastModified == 0L) {
throw new FileNotFoundException(getDescription() +
" cannot be resolved in the file system for resolving its last-modified timestamp");
}
return lastModified;
}
| long function() throws IOException { long lastModified = getFileForLastModifiedCheck().lastModified(); if (lastModified == 0L) { throw new FileNotFoundException(getDescription() + STR); } return lastModified; } | /**
* This implementation checks the timestamp of the underlying File,
* if available.
* @see #getFileForLastModifiedCheck()
*/ | This implementation checks the timestamp of the underlying File, if available | lastModified | {
"repo_name": "besom/bbossgroups-mvn",
"path": "bboss_util/src/main/java/org/frameworkset/util/io/AbstractResource.java",
"license": "apache-2.0",
"size": 5295
} | [
"java.io.FileNotFoundException",
"java.io.IOException"
] | import java.io.FileNotFoundException; import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 171,125 |
public Resource buildComponentResource() {
purgeEmptyMetadata();
AbstractResourceImpl res = new AbstractResourceImpl(path, resourceType, resourceSuperType, componentMetadata);
if (sling != null) {
res.setResourceResolver(sling.getRequest().getResourceResolver());
}
return res;
} | Resource function() { purgeEmptyMetadata(); AbstractResourceImpl res = new AbstractResourceImpl(path, resourceType, resourceSuperType, componentMetadata); if (sling != null) { res.setResourceResolver(sling.getRequest().getResourceResolver()); } return res; } | /**
* If your component needs child nodes then override this method, call the
* superclass implementation, and then use addChildren to add additional
* nodes to it.
*
* @return
*/ | If your component needs child nodes then override this method, call the superclass implementation, and then use addChildren to add additional nodes to it | buildComponentResource | {
"repo_name": "bstopp/acs-aem-commons",
"path": "bundle/src/main/java/com/adobe/acs/commons/mcp/form/FieldComponent.java",
"license": "apache-2.0",
"size": 9191
} | [
"org.apache.sling.api.resource.Resource"
] | import org.apache.sling.api.resource.Resource; | import org.apache.sling.api.resource.*; | [
"org.apache.sling"
] | org.apache.sling; | 1,698,115 |
public static String getLatestBundleID(OozieClient oozieClient,
String entityName, EntityType entityType) throws OozieClientException {
List<String> bundleIds = OozieUtil.getBundles(oozieClient, entityName, entityType);
String max = "0";
int maxID = -1;
for (String strID : bundleIds) {
if (maxID < Integer.parseInt(strID.substring(0, strID.indexOf('-')))) {
maxID = Integer.parseInt(strID.substring(0, strID.indexOf('-')));
max = strID;
}
}
return max;
} | static String function(OozieClient oozieClient, String entityName, EntityType entityType) throws OozieClientException { List<String> bundleIds = OozieUtil.getBundles(oozieClient, entityName, entityType); String max = "0"; int maxID = -1; for (String strID : bundleIds) { if (maxID < Integer.parseInt(strID.substring(0, strID.indexOf('-')))) { maxID = Integer.parseInt(strID.substring(0, strID.indexOf('-'))); max = strID; } } return max; } | /**
* Retrieves the latest bundle ID.
*
* @param oozieClient where job is running
* @param entityName name of entity job is related to
* @param entityType type of entity - feed or process expected
* @return latest bundle ID
* @throws OozieClientException
*/ | Retrieves the latest bundle ID | getLatestBundleID | {
"repo_name": "ajayyadav/Apache-Falcon",
"path": "falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/InstanceUtil.java",
"license": "apache-2.0",
"size": 45772
} | [
"java.util.List",
"org.apache.falcon.entity.v0.EntityType",
"org.apache.oozie.client.OozieClient",
"org.apache.oozie.client.OozieClientException"
] | import java.util.List; import org.apache.falcon.entity.v0.EntityType; import org.apache.oozie.client.OozieClient; import org.apache.oozie.client.OozieClientException; | import java.util.*; import org.apache.falcon.entity.v0.*; import org.apache.oozie.client.*; | [
"java.util",
"org.apache.falcon",
"org.apache.oozie"
] | java.util; org.apache.falcon; org.apache.oozie; | 663,937 |
private String encodeImage(String path) {
File imagefile = new File(path);
FileInputStream fis = null;
try {
fis = new FileInputStream(imagefile);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
Bitmap bm = BitmapFactory.decodeStream(fis);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.JPEG, 100, baos);
byte[] b = baos.toByteArray();
String encImage = Base64.encodeToString(b, Base64.DEFAULT);
//Base64.de
return encImage;
}
| String function(String path) { File imagefile = new File(path); FileInputStream fis = null; try { fis = new FileInputStream(imagefile); } catch (FileNotFoundException e) { e.printStackTrace(); } Bitmap bm = BitmapFactory.decodeStream(fis); ByteArrayOutputStream baos = new ByteArrayOutputStream(); bm.compress(Bitmap.CompressFormat.JPEG, 100, baos); byte[] b = baos.toByteArray(); String encImage = Base64.encodeToString(b, Base64.DEFAULT); return encImage; } | /**
* Encode an image to a base 64 String
*
* @param path
* @return
*/ | Encode an image to a base 64 String | encodeImage | {
"repo_name": "PGMacDesign/PGMacUtilities",
"path": "library/src/main/java/com/pgmacdesign/pgmactips/utilities/FileUtilities.java",
"license": "apache-2.0",
"size": 66064
} | [
"android.graphics.Bitmap",
"android.graphics.BitmapFactory",
"android.util.Base64",
"java.io.ByteArrayOutputStream",
"java.io.File",
"java.io.FileInputStream",
"java.io.FileNotFoundException"
] | import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.util.Base64; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; | import android.graphics.*; import android.util.*; import java.io.*; | [
"android.graphics",
"android.util",
"java.io"
] | android.graphics; android.util; java.io; | 1,247,582 |
static TString tensorOf(Shape shape, DataBuffer<String> data) {
return tensorOf(NdArrays.wrap(shape, data));
} | static TString tensorOf(Shape shape, DataBuffer<String> data) { return tensorOf(NdArrays.wrap(shape, data)); } | /**
* Allocates a new tensor with the given shape and data.
*
* <p>The data will be copied from the provided buffer to the tensor after it is allocated. The
* strings are encoded into bytes using the UTF-8 charset.
*
* @param shape shape of the tensor
* @param data buffer of strings to initialize the tensor with
* @return the new tensor
*/ | Allocates a new tensor with the given shape and data. The data will be copied from the provided buffer to the tensor after it is allocated. The strings are encoded into bytes using the UTF-8 charset | tensorOf | {
"repo_name": "tensorflow/java",
"path": "tensorflow-core/tensorflow-core-api/src/main/java/org/tensorflow/types/TString.java",
"license": "apache-2.0",
"size": 9855
} | [
"org.tensorflow.ndarray.NdArrays",
"org.tensorflow.ndarray.Shape",
"org.tensorflow.ndarray.buffer.DataBuffer"
] | import org.tensorflow.ndarray.NdArrays; import org.tensorflow.ndarray.Shape; import org.tensorflow.ndarray.buffer.DataBuffer; | import org.tensorflow.ndarray.*; import org.tensorflow.ndarray.buffer.*; | [
"org.tensorflow.ndarray"
] | org.tensorflow.ndarray; | 629,682 |
protected void prepare() throws ServletException {
// No-op
} | void function() throws ServletException { } | /**
* Prepares a servlet for request execution. This method is called immediately
* prior to the {@link #validate(Query.Method, Path)} method.
* <p>
* The default implementation is a no-op.
*
* @throws ServletException
*/ | Prepares a servlet for request execution. This method is called immediately prior to the <code>#validate(Query.Method, Path)</code> method. The default implementation is a no-op | prepare | {
"repo_name": "andrescabrera/gwt-dojo-toolkit",
"path": "src/org/apache/pivot/web/server/QueryServlet.java",
"license": "apache-2.0",
"size": 20154
} | [
"javax.servlet.ServletException"
] | import javax.servlet.ServletException; | import javax.servlet.*; | [
"javax.servlet"
] | javax.servlet; | 1,940,836 |
public void moveTo(T target, int targetIndex) {
Objects.requireNonNull(target);
// Check that the target node is not an ancestor of this node, because this would create loops in the tree
if (this.isAncestorOf(target)) {
throw new UnsupportedOperationException("the target cannot be a descendant of this node");
}
// Remove from previous parent
Optional<T> oldParent = getParent();
if (oldParent.isPresent()) {
oldParent.get().removeChild((T) this);
}
// Add as child
target.addChild((T) this, targetIndex);
} | void function(T target, int targetIndex) { Objects.requireNonNull(target); if (this.isAncestorOf(target)) { throw new UnsupportedOperationException(STR); } Optional<T> oldParent = getParent(); if (oldParent.isPresent()) { oldParent.get().removeChild((T) this); } target.addChild((T) this, targetIndex); } | /**
* Removes this node from its parent and makes it a child of the specified node
* by adding it to the specified position in the children list.
* In this way the whole subtree based at this node is moved to the given node.
*
* @param target the new parent
* @param targetIndex the position where the children should be inserted
* @throws NullPointerException if target is null
* @throws ArrayIndexOutOfBoundsException if targetIndex is out of bounds
* @throws UnsupportedOperationException if target is an descendant of this node
*/ | Removes this node from its parent and makes it a child of the specified node by adding it to the specified position in the children list. In this way the whole subtree based at this node is moved to the given node | moveTo | {
"repo_name": "bartsch-dev/jabref",
"path": "src/main/java/org/jabref/model/TreeNode.java",
"license": "mit",
"size": 21094
} | [
"java.util.Objects",
"java.util.Optional"
] | import java.util.Objects; import java.util.Optional; | import java.util.*; | [
"java.util"
] | java.util; | 1,294,422 |
List<String> syncServicesTable(); | List<String> syncServicesTable(); | /**
* Synchronize configured services list with the database.
*
* @return a {@link java.util.List} object.
*/ | Synchronize configured services list with the database | syncServicesTable | {
"repo_name": "roskens/opennms-pre-github",
"path": "opennms-services/src/main/java/org/opennms/netmgt/capsd/CapsdDbSyncer.java",
"license": "agpl-3.0",
"size": 6024
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 568,186 |
public UGen getPitchUGen() {
return pitchEnvelope;
} | UGen function() { return pitchEnvelope; } | /**
* Gets the pitch UGen.
*
* @return the pitch UGen.
*/ | Gets the pitch UGen | getPitchUGen | {
"repo_name": "nhochberger/WavFragmentation",
"path": "lib/beads/src/beads_main/net/beadsproject/beads/ugens/GranularSamplePlayer.java",
"license": "mit",
"size": 14509
} | [
"net.beadsproject.beads.core.UGen"
] | import net.beadsproject.beads.core.UGen; | import net.beadsproject.beads.core.*; | [
"net.beadsproject.beads"
] | net.beadsproject.beads; | 76,334 |
List<ColumnModel> validateSchemaSize(List<String> columnIds);
| List<ColumnModel> validateSchemaSize(List<String> columnIds); | /**
* Validate the given schema is under the max size.
*
* @param columnIds
*/ | Validate the given schema is under the max size | validateSchemaSize | {
"repo_name": "Sage-Bionetworks/Synapse-Repository-Services",
"path": "services/repository-managers/src/main/java/org/sagebionetworks/repo/manager/table/ColumnModelManager.java",
"license": "apache-2.0",
"size": 5718
} | [
"java.util.List",
"org.sagebionetworks.repo.model.table.ColumnModel"
] | import java.util.List; import org.sagebionetworks.repo.model.table.ColumnModel; | import java.util.*; import org.sagebionetworks.repo.model.table.*; | [
"java.util",
"org.sagebionetworks.repo"
] | java.util; org.sagebionetworks.repo; | 1,014,360 |
public void setPacketStream(PacketStream packetStream) {
this.packetStream = packetStream;
} | void function(PacketStream packetStream) { this.packetStream = packetStream; } | /**
* Sets the output packet stream responsible for transmitting this packet.
* @param packetStream - new output packet stream.
*/ | Sets the output packet stream responsible for transmitting this packet | setPacketStream | {
"repo_name": "ewized/ProtocolLib",
"path": "ProtocolLib/src/main/java/com/comphenix/protocol/async/AsyncMarker.java",
"license": "gpl-2.0",
"size": 15054
} | [
"com.comphenix.protocol.PacketStream"
] | import com.comphenix.protocol.PacketStream; | import com.comphenix.protocol.*; | [
"com.comphenix.protocol"
] | com.comphenix.protocol; | 2,581,070 |
public static IPath getDstarPath() {
return getDstarPath(null);
}
| static IPath function() { return getDstarPath(null); } | /**
* Returns the d* Diagram file path.
*
* @return the d* diagram file path.
*/ | Returns the d* Diagram file path | getDstarPath | {
"repo_name": "kuriking/testdc2",
"path": "net.dependableos.dcase.diagram/src/net/dependableos/dcase/diagram/part/PatternUtil.java",
"license": "epl-1.0",
"size": 34627
} | [
"org.eclipse.core.runtime.IPath"
] | import org.eclipse.core.runtime.IPath; | import org.eclipse.core.runtime.*; | [
"org.eclipse.core"
] | org.eclipse.core; | 1,141,076 |
public static void startPhysicalWeb(ChromeApplication application) {
PhysicalWebBleClient physicalWebBleClient = PhysicalWebBleClient.getInstance(application);
physicalWebBleClient.subscribe();
clearUrlsAsync(application);
} | static void function(ChromeApplication application) { PhysicalWebBleClient physicalWebBleClient = PhysicalWebBleClient.getInstance(application); physicalWebBleClient.subscribe(); clearUrlsAsync(application); } | /**
* Start the Physical Web feature.
* At the moment, this only enables URL discovery over BLE.
* @param application An instance of {@link ChromeApplication}, used to get the
* appropriate PhysicalWebBleClient implementation.
*/ | Start the Physical Web feature. At the moment, this only enables URL discovery over BLE | startPhysicalWeb | {
"repo_name": "Bysmyyr/chromium-crosswalk",
"path": "chrome/android/java/src/org/chromium/chrome/browser/physicalweb/PhysicalWeb.java",
"license": "bsd-3-clause",
"size": 2372
} | [
"org.chromium.chrome.browser.ChromeApplication"
] | import org.chromium.chrome.browser.ChromeApplication; | import org.chromium.chrome.browser.*; | [
"org.chromium.chrome"
] | org.chromium.chrome; | 1,651,145 |
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
}
| void function(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); } | /**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This adds <code>org.eclipse.emf.edit.command.CommandParameter</code>s describing the children that can be created under this object. | collectNewChildDescriptors | {
"repo_name": "KAMP-Research/KAMP4APS",
"path": "edu.kit.ipd.sdq.kamp4aps.aps.edit/src/edu/kit/ipd/sdq/kamp4aps/model/aPS/InterfaceRepository/provider/InterfaceItemProvider.java",
"license": "apache-2.0",
"size": 4133
} | [
"java.util.Collection"
] | import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 2,473,301 |
TypeAndAnnotation getJavaType(QName xmlTypeName); | TypeAndAnnotation getJavaType(QName xmlTypeName); | /**
* Returns the fully-qualified name of the Java type that is bound to the
* specified XML type.
*
* @param xmlTypeName
* must not be null.
* @return
* null if the XML type is not bound to any Java type.
*/ | Returns the fully-qualified name of the Java type that is bound to the specified XML type | getJavaType | {
"repo_name": "FauxFaux/jdk9-jaxws",
"path": "src/jdk.xml.bind/share/classes/com/sun/tools/internal/xjc/api/S2JJAXBModel.java",
"license": "gpl-2.0",
"size": 3894
} | [
"javax.xml.namespace.QName"
] | import javax.xml.namespace.QName; | import javax.xml.namespace.*; | [
"javax.xml"
] | javax.xml; | 2,687,670 |
private Map<String, Node> getTypeTransformations() {
JSDocInfo jsdoc = NodeUtil.getBestJSDocInfo(this.root);
return jsdoc == null ? ImmutableMap.<String, Node>of() : jsdoc.getTypeTransformations();
} | Map<String, Node> function() { JSDocInfo jsdoc = NodeUtil.getBestJSDocInfo(this.root); return jsdoc == null ? ImmutableMap.<String, Node>of() : jsdoc.getTypeTransformations(); } | /**
* Returns a non-null map from TTL variables to their transformations in AST form.
*/ | Returns a non-null map from TTL variables to their transformations in AST form | getTypeTransformations | {
"repo_name": "GerHobbelt/closure-compiler",
"path": "src/com/google/javascript/jscomp/NTIScope.java",
"license": "apache-2.0",
"size": 26104
} | [
"com.google.common.collect.ImmutableMap",
"com.google.javascript.rhino.JSDocInfo",
"com.google.javascript.rhino.Node",
"java.util.Map"
] | import com.google.common.collect.ImmutableMap; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import java.util.Map; | import com.google.common.collect.*; import com.google.javascript.rhino.*; import java.util.*; | [
"com.google.common",
"com.google.javascript",
"java.util"
] | com.google.common; com.google.javascript; java.util; | 2,132,259 |
EReference getTeamMember_Team();
| EReference getTeamMember_Team(); | /**
* Returns the meta object for the reference '{@link model.TeamMember#getTeam <em>Team</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the reference '<em>Team</em>'.
* @see model.TeamMember#getTeam()
* @see #getTeamMember()
* @generated
*/ | Returns the meta object for the reference '<code>model.TeamMember#getTeam Team</code>'. | getTeamMember_Team | {
"repo_name": "reedcourty/denafutsal",
"path": "hu.bme.mit.inf.mdsd.1.model/src/model/ModelPackage.java",
"license": "mit",
"size": 49421
} | [
"org.eclipse.emf.ecore.EReference"
] | import org.eclipse.emf.ecore.EReference; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 1,540,078 |
public boolean awaitStarted(long timeout, TimeUnit timeUnit) throws InterruptedException {
return started.await(timeout, timeUnit);
} | boolean function(long timeout, TimeUnit timeUnit) throws InterruptedException { return started.await(timeout, timeUnit); } | /**
* Blocks until {@link ResultCallback#onStart()} was called or the given timeout occurs. {@link ResultCallback#onStart()} is called when
* the request was processed on the server side and the response is incoming.
* @return {@code true} if started and {@code false} if the waiting time elapsed
* before {@link ResultCallback#onStart()} was called.
*/ | Blocks until <code>ResultCallback#onStart()</code> was called or the given timeout occurs. <code>ResultCallback#onStart()</code> is called when the request was processed on the server side and the response is incoming | awaitStarted | {
"repo_name": "llamahunter/docker-java",
"path": "src/main/java/com/github/dockerjava/core/async/ResultCallbackTemplate.java",
"license": "apache-2.0",
"size": 3857
} | [
"java.util.concurrent.TimeUnit"
] | import java.util.concurrent.TimeUnit; | import java.util.concurrent.*; | [
"java.util"
] | java.util; | 1,091,952 |
@CheckForNull
public ContainerConfig getConfig() {
return config;
} | ContainerConfig function() { return config; } | /**
* Get the image commit configuration
* @see #config
*/ | Get the image commit configuration | getConfig | {
"repo_name": "tejksat/docker-java",
"path": "docker-java-api/src/main/java/com/github/dockerjava/api/command/InspectImageResponse.java",
"license": "apache-2.0",
"size": 7137
} | [
"com.github.dockerjava.api.model.ContainerConfig"
] | import com.github.dockerjava.api.model.ContainerConfig; | import com.github.dockerjava.api.model.*; | [
"com.github.dockerjava"
] | com.github.dockerjava; | 49,029 |
@JsonProperty("contracts")
public void setContracts(Set<Contract> contracts) {
this.contracts = contracts;
} | @JsonProperty(STR) void function(Set<Contract> contracts) { this.contracts = contracts; } | /**
* Contracts
* <p>
* Information from the contract creation phase of the procurement process.
*/ | Contracts Information from the contract creation phase of the procurement process | setContracts | {
"repo_name": "devgateway/ocua",
"path": "persistence-mongodb/src/main/java/org/devgateway/ocds/persistence/mongo/Release.java",
"license": "mit",
"size": 24880
} | [
"com.fasterxml.jackson.annotation.JsonProperty",
"java.util.Set"
] | import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Set; | import com.fasterxml.jackson.annotation.*; import java.util.*; | [
"com.fasterxml.jackson",
"java.util"
] | com.fasterxml.jackson; java.util; | 956,759 |
private void goToBuildDetails(long buildId) {
Intent intent = new Intent(this, BuildDetailsActivity.class);
intent.putExtra(BuildDetailsActivity.EXTRA_BUILD_ID, buildId);
intent.putExtra(BuildDetailsActivity.EXTRA_REPO_SLUG, getPresenter().getRepoSlug());
startActivityForResult(intent, BUILD_DETAILS_REQUEST_CODE);
} | void function(long buildId) { Intent intent = new Intent(this, BuildDetailsActivity.class); intent.putExtra(BuildDetailsActivity.EXTRA_BUILD_ID, buildId); intent.putExtra(BuildDetailsActivity.EXTRA_REPO_SLUG, getPresenter().getRepoSlug()); startActivityForResult(intent, BUILD_DETAILS_REQUEST_CODE); } | /**
* Navigates to the build details
*
* @param buildId Build ID
*/ | Navigates to the build details | goToBuildDetails | {
"repo_name": "dkhmelenko/Varis-Android",
"path": "app/src/main/java/com/khmelenko/lab/varis/repodetails/RepoDetailsActivity.java",
"license": "apache-2.0",
"size": 9231
} | [
"android.content.Intent",
"com.khmelenko.lab.varis.builddetails.BuildDetailsActivity"
] | import android.content.Intent; import com.khmelenko.lab.varis.builddetails.BuildDetailsActivity; | import android.content.*; import com.khmelenko.lab.varis.builddetails.*; | [
"android.content",
"com.khmelenko.lab"
] | android.content; com.khmelenko.lab; | 1,303,135 |
public static double euclideanSimilarity(Vector a, Vector b) {
return 1 / (1 + euclideanDistance(a,b));
} | static double function(Vector a, Vector b) { return 1 / (1 + euclideanDistance(a,b)); } | /**
* Returns the euclidian similiarty between two arrays of values.
*
* @throws IllegalArgumentException when the length of the two vectors are
* not the same.
*/ | Returns the euclidian similiarty between two arrays of values | euclideanSimilarity | {
"repo_name": "fozziethebeat/S-Space",
"path": "src/main/java/edu/ucla/sspace/common/Similarity.java",
"license": "gpl-2.0",
"size": 90779
} | [
"edu.ucla.sspace.vector.Vector"
] | import edu.ucla.sspace.vector.Vector; | import edu.ucla.sspace.vector.*; | [
"edu.ucla.sspace"
] | edu.ucla.sspace; | 215,264 |
public byte[] writeFile(Path filepath, int sizeKB)
throws IOException {
FileSystem fs = cluster.getFileSystem();
// Write a file with the specified amount of data
DataOutputStream os = fs.create(filepath);
byte data[] = new byte[1024 * sizeKB];
new Random().nextBytes(data);
os.write(data);
os.close();
return data;
} | byte[] function(Path filepath, int sizeKB) throws IOException { FileSystem fs = cluster.getFileSystem(); DataOutputStream os = fs.create(filepath); byte data[] = new byte[1024 * sizeKB]; new Random().nextBytes(data); os.write(data); os.close(); return data; } | /**
* Create a file of the given size filled with random data.
* @return File data.
*/ | Create a file of the given size filled with random data | writeFile | {
"repo_name": "ict-carch/hadoop-plus",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/BlockReaderTestUtil.java",
"license": "apache-2.0",
"size": 5306
} | [
"java.io.DataOutputStream",
"java.io.IOException",
"java.util.Random",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.Path"
] | import java.io.DataOutputStream; import java.io.IOException; import java.util.Random; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; | import java.io.*; import java.util.*; import org.apache.hadoop.fs.*; | [
"java.io",
"java.util",
"org.apache.hadoop"
] | java.io; java.util; org.apache.hadoop; | 2,398,787 |
public void verifyStatusMessage(String expectedStatusMessage) {
assertEquals(expectedStatusMessage, getFsCopyStatus());
} | void function(String expectedStatusMessage) { assertEquals(expectedStatusMessage, getFsCopyStatus()); } | /**
* Verifies that the status message on the copy modal is the {@code expectedStatusMessage}.
*/ | Verifies that the status message on the copy modal is the expectedStatusMessage | verifyStatusMessage | {
"repo_name": "xpdavid/teammates",
"path": "src/test/java/teammates/test/pageobjects/InstructorCopyFsToModal.java",
"license": "gpl-2.0",
"size": 3926
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 682,334 |
protected HttpURLConnection setupUrlConnection(
Connection rConnection,
I rInput)
{
try
{
String sTargetUrl = getTargetUrl(rConnection, rInput);
HttpURLConnection aUrlConnection =
(HttpURLConnection) new URL(sTargetUrl).openConnection();
eRequestMethod.applyTo(aUrlConnection);
applyRequestHeaders(rConnection, aUrlConnection);
String sUserName = rConnection.getUserName();
if (sUserName != null)
{
NetUtil.enableHttpBasicAuth(aUrlConnection,
sUserName,
rConnection.getPassword());
}
return aUrlConnection;
}
catch (Exception e)
{
throw new CommunicationException(e);
}
} | HttpURLConnection function( Connection rConnection, I rInput) { try { String sTargetUrl = getTargetUrl(rConnection, rInput); HttpURLConnection aUrlConnection = (HttpURLConnection) new URL(sTargetUrl).openConnection(); eRequestMethod.applyTo(aUrlConnection); applyRequestHeaders(rConnection, aUrlConnection); String sUserName = rConnection.getUserName(); if (sUserName != null) { NetUtil.enableHttpBasicAuth(aUrlConnection, sUserName, rConnection.getPassword()); } return aUrlConnection; } catch (Exception e) { throw new CommunicationException(e); } } | /***************************************
* Creates and initializes the URL connection used to communicate with
* the HTTP endpoint.
*
* @param rConnection The endpoint connection
* @param rInput The input value for this communication method
*
* @return The URL connection
*
* @throws CommunicationException If the setup fails
*/ | Creates and initializes the URL connection used to communicate with the HTTP endpoint | setupUrlConnection | {
"repo_name": "esoco/esoco-lib",
"path": "src/main/java/de/esoco/lib/comm/HttpEndpoint.java",
"license": "apache-2.0",
"size": 19099
} | [
"de.esoco.lib.net.NetUtil",
"java.net.HttpURLConnection"
] | import de.esoco.lib.net.NetUtil; import java.net.HttpURLConnection; | import de.esoco.lib.net.*; import java.net.*; | [
"de.esoco.lib",
"java.net"
] | de.esoco.lib; java.net; | 2,280,522 |
public List<CathDomain> filterByCathCode(String query); | List<CathDomain> function(String query); | /** Return list of CATH descriptions whose CATH codes (e.g. 1.4.6.10) start with the query.
* This is currently redundant with getDescriptionsByNodeId.
*
* @param query
* @return CATH descriptions
*/ | Return list of CATH descriptions whose CATH codes (e.g. 1.4.6.10) start with the query. This is currently redundant with getDescriptionsByNodeId | filterByCathCode | {
"repo_name": "sbliven/biojava",
"path": "biojava3-structure/src/main/java/org/biojava/bio/structure/cath/CathDatabase.java",
"license": "lgpl-2.1",
"size": 3173
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,877,273 |
public static <V> boolean addDistinctEntry(List<V> sourceList, V entry) {
return (sourceList != null && !sourceList.contains(entry)) ? sourceList.add(entry) : false;
} | static <V> boolean function(List<V> sourceList, V entry) { return (sourceList != null && !sourceList.contains(entry)) ? sourceList.add(entry) : false; } | /**
* add distinct entry to list
*
* @param <V>
* @param sourceList
* @param entry
* @return if entry already exist in sourceList, return false, else add it and return true.
*/ | add distinct entry to list | addDistinctEntry | {
"repo_name": "q197585312/testApp",
"path": "skinlibrary/src/main/java/solid/ren/skinlibrary/utils/SkinListUtils.java",
"license": "apache-2.0",
"size": 6076
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 994,456 |
@Test
public void test_hashCode2() {
Handgun h1 = new Handgun(123456,"Glock");
Handgun h3 = new Handgun(654321,"Glock");
assertTrue(h1.hashCode()!=h3.hashCode());
} | void function() { Handgun h1 = new Handgun(123456,"Glock"); Handgun h3 = new Handgun(654321,"Glock"); assertTrue(h1.hashCode()!=h3.hashCode()); } | /** Test case 2 for .hashCode
@see Handgun#hashCode
*/ | Test case 2 for .hashCode | test_hashCode2 | {
"repo_name": "UCSB-CS56-W14/W14-lab05",
"path": "src/edu/ucsb/cs56/w14/lab05/menaiskander/HandgunTest.java",
"license": "mit",
"size": 2017
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 1,522,359 |
List<NamedObjectModel> getEnvironmentEntries(); | List<NamedObjectModel> getEnvironmentEntries(); | /**
* List of (kie) environment entries to be registered
* @return
*/ | List of (kie) environment entries to be registered | getEnvironmentEntries | {
"repo_name": "etirelli/droolsjbpm-knowledge",
"path": "kie-internal/src/main/java/org/kie/internal/runtime/conf/DeploymentDescriptor.java",
"license": "apache-2.0",
"size": 5362
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,655,910 |
public static void delete(String path) {
delete(Paths.get(path));
} | static void function(String path) { delete(Paths.get(path)); } | /**
* Deletes a directory recursively.
*/ | Deletes a directory recursively | delete | {
"repo_name": "msrocka/Dirs.java",
"path": "src/main/java/dirs/Dirs.java",
"license": "unlicense",
"size": 3336
} | [
"java.nio.file.Paths"
] | import java.nio.file.Paths; | import java.nio.file.*; | [
"java.nio"
] | java.nio; | 2,478,243 |
@Override
public void addHashes(List<HashEntry> hashes) throws TskCoreException {
SleuthkitJNI.addToHashDatabase(hashes, handle);
} | void function(List<HashEntry> hashes) throws TskCoreException { SleuthkitJNI.addToHashDatabase(hashes, handle); } | /**
* Adds a list of hashes to the hash database at once
*
* @param hashes List of hashes
*
* @throws TskCoreException
*/ | Adds a list of hashes to the hash database at once | addHashes | {
"repo_name": "esaunders/autopsy",
"path": "Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java",
"license": "apache-2.0",
"size": 60611
} | [
"java.util.List",
"org.sleuthkit.datamodel.HashEntry",
"org.sleuthkit.datamodel.SleuthkitJNI",
"org.sleuthkit.datamodel.TskCoreException"
] | import java.util.List; import org.sleuthkit.datamodel.HashEntry; import org.sleuthkit.datamodel.SleuthkitJNI; import org.sleuthkit.datamodel.TskCoreException; | import java.util.*; import org.sleuthkit.datamodel.*; | [
"java.util",
"org.sleuthkit.datamodel"
] | java.util; org.sleuthkit.datamodel; | 2,647,018 |
protected void sendSynReply() {
response.setCommitted(true);
// Special headers
MimeHeaders headers = response.getMimeHeaders();
String contentType = response.getContentType();
if (contentType != null) {
headers.setValue("Content-Type").setString(contentType);
}
String contentLanguage = response.getContentLanguage();
if (contentLanguage != null) {
headers.setValue("Content-Language").setString(contentLanguage);
}
long contentLength = response.getContentLengthLong();
if (contentLength >= 0) {
headers.setValue("Content-Length").setLong(contentLength);
}
sendResponseHead();
}
| void function() { response.setCommitted(true); MimeHeaders headers = response.getMimeHeaders(); String contentType = response.getContentType(); if (contentType != null) { headers.setValue(STR).setString(contentType); } String contentLanguage = response.getContentLanguage(); if (contentLanguage != null) { headers.setValue(STR).setString(contentLanguage); } long contentLength = response.getContentLengthLong(); if (contentLength >= 0) { headers.setValue(STR).setLong(contentLength); } sendResponseHead(); } | /**
* When committing the response, we have to validate the set of headers, as
* well as setup the response filters.
*/ | When committing the response, we have to validate the set of headers, as well as setup the response filters | sendSynReply | {
"repo_name": "wenzhucjy/tomcat_source",
"path": "tomcat-8.0.9-sourcecode/java/org/apache/coyote/spdy/SpdyProcessor.java",
"license": "apache-2.0",
"size": 22422
} | [
"org.apache.tomcat.util.http.MimeHeaders"
] | import org.apache.tomcat.util.http.MimeHeaders; | import org.apache.tomcat.util.http.*; | [
"org.apache.tomcat"
] | org.apache.tomcat; | 1,170,333 |
LeaderRetrievalService getResourceManagerLeaderRetriever(); | LeaderRetrievalService getResourceManagerLeaderRetriever(); | /**
* Gets the leader retriever for the cluster's resource manager.
*/ | Gets the leader retriever for the cluster's resource manager | getResourceManagerLeaderRetriever | {
"repo_name": "ueshin/apache-flink",
"path": "flink-runtime/src/main/java/org/apache/flink/runtime/highavailability/HighAvailabilityServices.java",
"license": "apache-2.0",
"size": 7276
} | [
"org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService"
] | import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService; | import org.apache.flink.runtime.leaderretrieval.*; | [
"org.apache.flink"
] | org.apache.flink; | 918,664 |
public void populate (int reserveWord, String value) throws ApplicationFault {
if ( StringUtils.isEmpty(value)) return;
value = value.replace('_', ' ').trim();
if ( DEBUG_ENABLED) QueryLog.l.debug(
"QueryContext> Value=" + value + " ,reserve sequence " + reserveWord);
switch (reserveWord) {
case ReserveQueryWord.RESERVE_docType:
this.docType = value;
break;
case ReserveQueryWord.RESERVE_scroll:
this.scroll = new Integer(value);
if ( this.scroll < 0 ) this.scroll = 0;
break;
case ReserveQueryWord.RESERVE_state:
this.state = new Storable(value);
break;
case ReserveQueryWord.RESERVE_team:
this.team = new Storable(value);
break;
case ReserveQueryWord.RESERVE_createdBefore:
this.createdBefore = Long.parseLong(value);
break;
case ReserveQueryWord.RESERVE_createdAfter:
this.createdAfter = Long.parseLong(value);
break;
case ReserveQueryWord.RESERVE_modifiedAfter:
this.modifiedAfter = Long.parseLong(value);
break;
case ReserveQueryWord.RESERVE_modifiedBefore:
this.modifiedBefore = Long.parseLong(value);
break;
case ReserveQueryWord.RESERVE_areaInKmRadius:
this.areaInKmRadius = Integer.parseInt(value);
break;
case ReserveQueryWord.RESERVE_matchIp:
this.matchIp = value;
break;
case ReserveQueryWord.RESERVE_latlng:
String[] latlng = StringUtils.getStrings(value, ',');
if ( null == geoId) this.geoId = GeoId.convertLatLng(
Float.parseFloat(latlng[0]), Float.parseFloat(latlng[1]));
break;
case ReserveQueryWord.RESERVE_boostMultiphrase:
this.boostMultiPhrase = Integer.parseInt(value);
break;
case ReserveQueryWord.RESERVE_boostTermWeight:
this.boostTermWeight = Integer.parseInt(value);
break;
case ReserveQueryWord.RESERVE_boostDocumentWeight:
this.boostDocumentWeight = Integer.parseInt(value);
break;
case ReserveQueryWord.RESERVE_boostIpProximity:
this.boostIpProximity = Integer.parseInt(value);
break;
case ReserveQueryWord.RESERVE_boostOwner:
this.boostOwner = Integer.parseInt(value);
break;
case ReserveQueryWord.RESERVE_boostFreshness:
this.boostFreshness = Integer.parseInt(value);
break;
case ReserveQueryWord.RESERVE_boostPrecious:
this.boostPrecious = Integer.parseInt(value);
break;
case ReserveQueryWord.RESERVE_boostChoices:
this.boostChoices = Integer.parseInt(value);
break;
case ReserveQueryWord.RESERVE_facetFetchLimit:
this.facetFetchLimit = Integer.parseInt(value);
break;
case ReserveQueryWord.RESERVE_metaFetchLimit:
this.metaFetchLimit = Integer.parseInt(value);
break;
case ReserveQueryWord.RESERVE_documentFetchLimit:
this.documentFetchLimit = Integer.parseInt(value);
if ( this.metaFetchLimit < this.documentFetchLimit)
this.metaFetchLimit = this.documentFetchLimit;
break;
case ReserveQueryWord.RESERVE_teaserSectionLength:
this.teaserSectionLen = Integer.parseInt(value);
break;
case ReserveQueryWord.RESERVE_metaFields:
this.metaFields = StringUtils.getStrings(value, ",");
break;
case ReserveQueryWord.RESERVE_cluster:
if ( null == clusters) clusters = new HashSet<String>();
List<String> lstClusters = StringUtils.fastSplit(value, ',');
for (String cl : lstClusters) {
clusters.add(cl);
}
break;
case ReserveQueryWord.RESERVE_sortOnMeta:
if ( null == sortOnMeta) sortOnMeta = new HashMap<String, String>();
String[] sortValues = StringUtils.getStrings(value, '=');
switch (sortValues.length) {
case 1:
this.sortOnMeta.put(sortValues[0], SORT_ASC);
break;
case 2:
validateSortingStyle(sortValues[1]);
this.sortOnMeta.put(sortValues[0],sortValues[1]);
break;
default:
throw new ApplicationFault("Parsing Failure : Invalid sorting. Ex id=asc");
}
break;
case ReserveQueryWord.RESERVE_touchstones:
this.isTouchStone = true;
break;
case ReserveQueryWord.RESERVE_sortOnField:
if ( null == sortOnFld) sortOnFld = new HashMap<String, String>();
String[] sortFldValues = StringUtils.getStrings(value, '=');
switch (sortFldValues.length) {
case 1:
this.sortOnFld.put(sortFldValues[0], SORT_ASC);
break;
case 2:
validateSortingStyle(sortFldValues[1]);
this.sortOnFld.put(sortFldValues[0],sortFldValues[1]);
break;
default:
throw new ApplicationFault("Parsing Failure : Invalid sorting. Ex empname=asc");
}
break;
}
}
| void function (int reserveWord, String value) throws ApplicationFault { if ( StringUtils.isEmpty(value)) return; value = value.replace('_', ' ').trim(); if ( DEBUG_ENABLED) QueryLog.l.debug( STR + value + STR + reserveWord); switch (reserveWord) { case ReserveQueryWord.RESERVE_docType: this.docType = value; break; case ReserveQueryWord.RESERVE_scroll: this.scroll = new Integer(value); if ( this.scroll < 0 ) this.scroll = 0; break; case ReserveQueryWord.RESERVE_state: this.state = new Storable(value); break; case ReserveQueryWord.RESERVE_team: this.team = new Storable(value); break; case ReserveQueryWord.RESERVE_createdBefore: this.createdBefore = Long.parseLong(value); break; case ReserveQueryWord.RESERVE_createdAfter: this.createdAfter = Long.parseLong(value); break; case ReserveQueryWord.RESERVE_modifiedAfter: this.modifiedAfter = Long.parseLong(value); break; case ReserveQueryWord.RESERVE_modifiedBefore: this.modifiedBefore = Long.parseLong(value); break; case ReserveQueryWord.RESERVE_areaInKmRadius: this.areaInKmRadius = Integer.parseInt(value); break; case ReserveQueryWord.RESERVE_matchIp: this.matchIp = value; break; case ReserveQueryWord.RESERVE_latlng: String[] latlng = StringUtils.getStrings(value, ','); if ( null == geoId) this.geoId = GeoId.convertLatLng( Float.parseFloat(latlng[0]), Float.parseFloat(latlng[1])); break; case ReserveQueryWord.RESERVE_boostMultiphrase: this.boostMultiPhrase = Integer.parseInt(value); break; case ReserveQueryWord.RESERVE_boostTermWeight: this.boostTermWeight = Integer.parseInt(value); break; case ReserveQueryWord.RESERVE_boostDocumentWeight: this.boostDocumentWeight = Integer.parseInt(value); break; case ReserveQueryWord.RESERVE_boostIpProximity: this.boostIpProximity = Integer.parseInt(value); break; case ReserveQueryWord.RESERVE_boostOwner: this.boostOwner = Integer.parseInt(value); break; case ReserveQueryWord.RESERVE_boostFreshness: this.boostFreshness = Integer.parseInt(value); break; case ReserveQueryWord.RESERVE_boostPrecious: this.boostPrecious = Integer.parseInt(value); break; case ReserveQueryWord.RESERVE_boostChoices: this.boostChoices = Integer.parseInt(value); break; case ReserveQueryWord.RESERVE_facetFetchLimit: this.facetFetchLimit = Integer.parseInt(value); break; case ReserveQueryWord.RESERVE_metaFetchLimit: this.metaFetchLimit = Integer.parseInt(value); break; case ReserveQueryWord.RESERVE_documentFetchLimit: this.documentFetchLimit = Integer.parseInt(value); if ( this.metaFetchLimit < this.documentFetchLimit) this.metaFetchLimit = this.documentFetchLimit; break; case ReserveQueryWord.RESERVE_teaserSectionLength: this.teaserSectionLen = Integer.parseInt(value); break; case ReserveQueryWord.RESERVE_metaFields: this.metaFields = StringUtils.getStrings(value, ","); break; case ReserveQueryWord.RESERVE_cluster: if ( null == clusters) clusters = new HashSet<String>(); List<String> lstClusters = StringUtils.fastSplit(value, ','); for (String cl : lstClusters) { clusters.add(cl); } break; case ReserveQueryWord.RESERVE_sortOnMeta: if ( null == sortOnMeta) sortOnMeta = new HashMap<String, String>(); String[] sortValues = StringUtils.getStrings(value, '='); switch (sortValues.length) { case 1: this.sortOnMeta.put(sortValues[0], SORT_ASC); break; case 2: validateSortingStyle(sortValues[1]); this.sortOnMeta.put(sortValues[0],sortValues[1]); break; default: throw new ApplicationFault(STR); } break; case ReserveQueryWord.RESERVE_touchstones: this.isTouchStone = true; break; case ReserveQueryWord.RESERVE_sortOnField: if ( null == sortOnFld) sortOnFld = new HashMap<String, String>(); String[] sortFldValues = StringUtils.getStrings(value, '='); switch (sortFldValues.length) { case 1: this.sortOnFld.put(sortFldValues[0], SORT_ASC); break; case 2: validateSortingStyle(sortFldValues[1]); this.sortOnFld.put(sortFldValues[0],sortFldValues[1]); break; default: throw new ApplicationFault(STR); } break; } } | /**
* This populates the query Context element from the
* Lucene Style Query String
* @param reserveWord
* @param value
*/ | This populates the query Context element from the Lucene Style Query String | populate | {
"repo_name": "bizosys/hsearch-obsolete",
"path": "src/java/com/bizosys/hsearch/query/QueryContext.java",
"license": "apache-2.0",
"size": 14172
} | [
"com.bizosys.hsearch.filter.Storable",
"com.bizosys.hsearch.util.GeoId",
"com.bizosys.oneline.ApplicationFault",
"com.bizosys.oneline.util.StringUtils",
"java.util.HashMap",
"java.util.HashSet",
"java.util.List"
] | import com.bizosys.hsearch.filter.Storable; import com.bizosys.hsearch.util.GeoId; import com.bizosys.oneline.ApplicationFault; import com.bizosys.oneline.util.StringUtils; import java.util.HashMap; import java.util.HashSet; import java.util.List; | import com.bizosys.hsearch.filter.*; import com.bizosys.hsearch.util.*; import com.bizosys.oneline.*; import com.bizosys.oneline.util.*; import java.util.*; | [
"com.bizosys.hsearch",
"com.bizosys.oneline",
"java.util"
] | com.bizosys.hsearch; com.bizosys.oneline; java.util; | 338,953 |
protected FileSystemConfiguration igfsConfiguration(String gridName) throws IgniteCheckedException {
FileSystemConfiguration cfg = new FileSystemConfiguration();
cfg.setDataCacheName("partitioned");
cfg.setMetaCacheName("replicated");
cfg.setName("igfs");
cfg.setPrefetchBlocks(1);
cfg.setMaxSpaceSize(64 * 1024 * 1024);
cfg.setDefaultMode(mode);
if (mode != PRIMARY)
cfg.setSecondaryFileSystem(new IgniteHadoopIgfsSecondaryFileSystem(secondaryFileSystemUriPath(),
secondaryFileSystemConfigPath(), SECONDARY_FS_USER));
cfg.setIpcEndpointConfiguration(primaryIpcEndpointConfiguration(gridName));
cfg.setManagementPort(-1);
cfg.setBlockSize(512 * 1024); // Together with group blocks mapper will yield 64M per node groups.
return cfg;
} | FileSystemConfiguration function(String gridName) throws IgniteCheckedException { FileSystemConfiguration cfg = new FileSystemConfiguration(); cfg.setDataCacheName(STR); cfg.setMetaCacheName(STR); cfg.setName("igfs"); cfg.setPrefetchBlocks(1); cfg.setMaxSpaceSize(64 * 1024 * 1024); cfg.setDefaultMode(mode); if (mode != PRIMARY) cfg.setSecondaryFileSystem(new IgniteHadoopIgfsSecondaryFileSystem(secondaryFileSystemUriPath(), secondaryFileSystemConfigPath(), SECONDARY_FS_USER)); cfg.setIpcEndpointConfiguration(primaryIpcEndpointConfiguration(gridName)); cfg.setManagementPort(-1); cfg.setBlockSize(512 * 1024); return cfg; } | /**
* Gets IGFS configuration.
*
* @param gridName Grid name.
* @return IGFS configuration.
*/ | Gets IGFS configuration | igfsConfiguration | {
"repo_name": "gargvish/ignite",
"path": "modules/hadoop/src/test/java/org/apache/ignite/igfs/HadoopIgfs20FileSystemAbstractSelfTest.java",
"license": "apache-2.0",
"size": 67942
} | [
"org.apache.ignite.IgniteCheckedException",
"org.apache.ignite.configuration.FileSystemConfiguration",
"org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem"
] | import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem; | import org.apache.ignite.*; import org.apache.ignite.configuration.*; import org.apache.ignite.hadoop.fs.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 314,915 |
public UserDetails getUserDetailsFromToken(String token) {
if (authenticated(token)) {
// Load user
Optional<AuthUser> user = authUserServ.load(Token.getUidFromToken(token));
if (user.filter(AuthUser::enabled).isPresent()) {
List<GrantedAuthority> authorities = new LinkedList<>();
Set<AuthGroup> groups = user.get().getGroups();
if (groups != null && groups.size() > 0) {
groups.forEach(x -> x.getRoles().forEach(y -> authorities.add(new SimpleGrantedAuthority(y.getName().trim()))));
}
return new User(user.get().getUid(), "***", authorities);
}
}
return null;
} | UserDetails function(String token) { if (authenticated(token)) { Optional<AuthUser> user = authUserServ.load(Token.getUidFromToken(token)); if (user.filter(AuthUser::enabled).isPresent()) { List<GrantedAuthority> authorities = new LinkedList<>(); Set<AuthGroup> groups = user.get().getGroups(); if (groups != null && groups.size() > 0) { groups.forEach(x -> x.getRoles().forEach(y -> authorities.add(new SimpleGrantedAuthority(y.getName().trim())))); } return new User(user.get().getUid(), "***", authorities); } } return null; } | /**
* Get {@link org.springframework.security.core.userdetails.UserDetails} from token
*
* @param token token
* @return {@link org.springframework.security.core.userdetails.UserDetails} if token authenticated,otherwise return null
*/ | Get <code>org.springframework.security.core.userdetails.UserDetails</code> from token | getUserDetailsFromToken | {
"repo_name": "rockagen/security-stateless-samples",
"path": "src/main/java/com/rockagen/gnext/service/spring/security/extension/ExTokenAuthentication.java",
"license": "apache-2.0",
"size": 6286
} | [
"com.rockagen.gnext.po.AuthGroup",
"com.rockagen.gnext.po.AuthUser",
"com.rockagen.gnext.tool.Token",
"java.util.LinkedList",
"java.util.List",
"java.util.Optional",
"java.util.Set",
"org.springframework.security.core.GrantedAuthority",
"org.springframework.security.core.authority.SimpleGrantedAuthority",
"org.springframework.security.core.userdetails.User",
"org.springframework.security.core.userdetails.UserDetails"
] | import com.rockagen.gnext.po.AuthGroup; import com.rockagen.gnext.po.AuthUser; import com.rockagen.gnext.tool.Token; import java.util.LinkedList; import java.util.List; import java.util.Optional; import java.util.Set; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.authority.SimpleGrantedAuthority; import org.springframework.security.core.userdetails.User; import org.springframework.security.core.userdetails.UserDetails; | import com.rockagen.gnext.po.*; import com.rockagen.gnext.tool.*; import java.util.*; import org.springframework.security.core.*; import org.springframework.security.core.authority.*; import org.springframework.security.core.userdetails.*; | [
"com.rockagen.gnext",
"java.util",
"org.springframework.security"
] | com.rockagen.gnext; java.util; org.springframework.security; | 1,718,267 |
final Toolbar toolbar = (Toolbar) activity.findViewById(toolbarId);
if (toolbar == null) {
return false;
}
activity.setSupportActionBar(toolbar);
return true;
} | final Toolbar toolbar = (Toolbar) activity.findViewById(toolbarId); if (toolbar == null) { return false; } activity.setSupportActionBar(toolbar); return true; } | /**
* Locates a {@link Toolbar} with a particular view ID in an {@link AppCompatActivity}'s layout
* and if found, uses it as the activity's action bar.
*
* @param activity An activity.
* @param toolbarId The view ID of the {@link Toolbar} to use as {@code activity}'s app bar.
*
* @return {@code true} if {@code activity} has a {@link Toolbar} with {@code toolbarId} as its
* view ID; {@code false} otherwise.
*/ | Locates a <code>Toolbar</code> with a particular view ID in an <code>AppCompatActivity</code>'s layout and if found, uses it as the activity's action bar | setToolbarAppBar | {
"repo_name": "queencodemonkey/Presentation-Loving-Lean-Layouts",
"path": "app/src/main/java/com/randomlytyping/util/AppCompatUtil.java",
"license": "apache-2.0",
"size": 1955
} | [
"android.support.v7.widget.Toolbar"
] | import android.support.v7.widget.Toolbar; | import android.support.v7.widget.*; | [
"android.support"
] | android.support; | 329,458 |
public Timestamp getCreated();
public static final String COLUMNNAME_CreatedBy = "CreatedBy"; | Timestamp function(); public static final String COLUMNNAME_CreatedBy = STR; | /** Get Created.
* Date this record was created
*/ | Get Created. Date this record was created | getCreated | {
"repo_name": "geneos/adempiere",
"path": "base/src/org/compiere/model/I_C_DunningRunLine.java",
"license": "gpl-2.0",
"size": 8511
} | [
"java.sql.Timestamp"
] | import java.sql.Timestamp; | import java.sql.*; | [
"java.sql"
] | java.sql; | 490,860 |
void enterColumnNameTypeList(@NotNull CQLParser.ColumnNameTypeListContext ctx);
void exitColumnNameTypeList(@NotNull CQLParser.ColumnNameTypeListContext ctx); | void enterColumnNameTypeList(@NotNull CQLParser.ColumnNameTypeListContext ctx); void exitColumnNameTypeList(@NotNull CQLParser.ColumnNameTypeListContext ctx); | /**
* Exit a parse tree produced by {@link CQLParser#columnNameTypeList}.
* @param ctx the parse tree
*/ | Exit a parse tree produced by <code>CQLParser#columnNameTypeList</code> | exitColumnNameTypeList | {
"repo_name": "jack6215/StreamCQL",
"path": "cql/src/main/java/com/huawei/streaming/cql/semanticanalyzer/parser/CQLParserListener.java",
"license": "apache-2.0",
"size": 62500
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 2,115,553 |
boolean clientUpdateByExampleSelectiveMethodGenerated(Method method,
Interface interfaze, IntrospectedTable introspectedTable); | boolean clientUpdateByExampleSelectiveMethodGenerated(Method method, Interface interfaze, IntrospectedTable introspectedTable); | /**
* This method is called when the updateByExampleSelective method has been
* generated in the client interface.
*
* @param method
* the generated updateByExampleSelective method
* @param interfaze
* the partially implemented client interface. You can add
* additional imported classes to the interface if
* necessary.
* @param introspectedTable
* The class containing information about the table as
* introspected from the database
* @return true if the method should be generated, false if the generated
* method should be ignored. In the case of multiple plugins, the
* first plugin returning false will disable the calling of further
* plugins.
*/ | This method is called when the updateByExampleSelective method has been generated in the client interface | clientUpdateByExampleSelectiveMethodGenerated | {
"repo_name": "li24361/mybatis-generator-core",
"path": "src/main/java/org/mybatis/generator/api/Plugin.java",
"license": "apache-2.0",
"size": 73078
} | [
"org.mybatis.generator.api.dom.java.Interface",
"org.mybatis.generator.api.dom.java.Method"
] | import org.mybatis.generator.api.dom.java.Interface; import org.mybatis.generator.api.dom.java.Method; | import org.mybatis.generator.api.dom.java.*; | [
"org.mybatis.generator"
] | org.mybatis.generator; | 2,627,528 |
public void updateThrottleTierPermissions(String tierName, String permissionType, String roles) throws
APIManagementException {
apiMgtDAO.updateThrottleTierPermissions(tierName, permissionType, roles, tenantId);
} | void function(String tierName, String permissionType, String roles) throws APIManagementException { apiMgtDAO.updateThrottleTierPermissions(tierName, permissionType, roles, tenantId); } | /**
* Update the Tier Permissions
*
* @param tierName Tier Name
* @param permissionType Permission Type
* @param roles Roles
* @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status
*/ | Update the Tier Permissions | updateThrottleTierPermissions | {
"repo_name": "jaadds/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIProviderImpl.java",
"license": "apache-2.0",
"size": 563675
} | [
"org.wso2.carbon.apimgt.api.APIManagementException"
] | import org.wso2.carbon.apimgt.api.APIManagementException; | import org.wso2.carbon.apimgt.api.*; | [
"org.wso2.carbon"
] | org.wso2.carbon; | 1,538,698 |
public void setAllowLeadingWildcard(boolean allowLeadingWildcard) {
AllowLeadingWildcardAttribute attr = getQueryConfigHandler().getAttribute(
AllowLeadingWildcardAttribute.class);
attr.setAllowLeadingWildcard(allowLeadingWildcard);
// uncomment code below when deprecated query parser attributes are removed
// getQueryConfigHandler().set(ConfigurationKeys.ALLOW_LEADING_WILDCARD,
// allowLeadingWildcard);
} | void function(boolean allowLeadingWildcard) { AllowLeadingWildcardAttribute attr = getQueryConfigHandler().getAttribute( AllowLeadingWildcardAttribute.class); attr.setAllowLeadingWildcard(allowLeadingWildcard); } | /**
* Set to <code>true</code> to allow leading wildcard characters.
* <p>
* When set, <code>*</code> or <code>?</code> are allowed as the first
* character of a PrefixQuery and WildcardQuery. Note that this can produce
* very slow queries on big indexes.
* <p>
* Default: false.
*/ | Set to <code>true</code> to allow leading wildcard characters. When set, <code>*</code> or <code>?</code> are allowed as the first character of a PrefixQuery and WildcardQuery. Note that this can produce very slow queries on big indexes. Default: false | setAllowLeadingWildcard | {
"repo_name": "bighaidao/lucene",
"path": "lucene-queryparser/src/main/java/org/apache/lucene/queryParser/standard/StandardQueryParser.java",
"license": "apache-2.0",
"size": 23450
} | [
"org.apache.lucene.queryParser.standard.config.AllowLeadingWildcardAttribute"
] | import org.apache.lucene.queryParser.standard.config.AllowLeadingWildcardAttribute; | import org.apache.lucene.*; | [
"org.apache.lucene"
] | org.apache.lucene; | 984,928 |
public static void compareResultSetWithDelta(
ResultSet resultSet,
double expected,
double delta) throws Exception {
if (!resultSet.next()) {
fail("Query returned 0 rows, expected 1");
}
double actual = resultSet.getDouble(1);
if (resultSet.next()) {
fail("Query returned 2 or more rows, expected 1");
}
if ((actual < (expected - delta)) || (actual > (expected + delta))) {
fail("Query returned " + actual + ", expected " + expected
+ ((delta == 0) ? "" : ("+/-" + delta)));
}
}
//~ Inner Classes ----------------------------------------------------------
private static class SqlTypeChecker implements TypeChecker {
private final SqlTypeName typeName;
SqlTypeChecker(SqlTypeName typeName) {
this.typeName = typeName;
} | static void function( ResultSet resultSet, double expected, double delta) throws Exception { if (!resultSet.next()) { fail(STR); } double actual = resultSet.getDouble(1); if (resultSet.next()) { fail(STR); } if ((actual < (expected - delta)) (actual > (expected + delta))) { fail(STR + actual + STR + expected + ((delta == 0) ? STR+/-" + delta))); } } private static class SqlTypeChecker implements TypeChecker { private final SqlTypeName typeName; SqlTypeChecker(SqlTypeName typeName) { this.typeName = typeName; } | /**
* Compares the first column of a result set against a numeric result,
* within a given tolerance. The result set must return exactly one row.
*
* @param resultSet Result set
* @param expected Expected result
* @param delta Tolerance
*/ | Compares the first column of a result set against a numeric result, within a given tolerance. The result set must return exactly one row | compareResultSetWithDelta | {
"repo_name": "sreev/incubator-calcite",
"path": "core/src/test/java/org/apache/calcite/sql/test/SqlTests.java",
"license": "apache-2.0",
"size": 12554
} | [
"java.sql.ResultSet",
"org.apache.calcite.sql.test.SqlTester",
"org.apache.calcite.sql.type.SqlTypeName",
"org.junit.Assert"
] | import java.sql.ResultSet; import org.apache.calcite.sql.test.SqlTester; import org.apache.calcite.sql.type.SqlTypeName; import org.junit.Assert; | import java.sql.*; import org.apache.calcite.sql.test.*; import org.apache.calcite.sql.type.*; import org.junit.*; | [
"java.sql",
"org.apache.calcite",
"org.junit"
] | java.sql; org.apache.calcite; org.junit; | 1,673,924 |
private boolean[] correctBits(boolean[] rawbits) throws FormatException {
GenericGF gf;
int codewordSize;
if (ddata.getNbLayers() <= 2) {
codewordSize = 6;
gf = GenericGF.AZTEC_DATA_6;
} else if (ddata.getNbLayers() <= 8) {
codewordSize = 8;
gf = GenericGF.AZTEC_DATA_8;
} else if (ddata.getNbLayers() <= 22) {
codewordSize = 10;
gf = GenericGF.AZTEC_DATA_10;
} else {
codewordSize = 12;
gf = GenericGF.AZTEC_DATA_12;
}
int numDataCodewords = ddata.getNbDatablocks();
int numCodewords = rawbits.length / codewordSize;
if (numCodewords < numDataCodewords) {
throw FormatException.getFormatInstance();
}
int offset = rawbits.length % codewordSize;
int[] dataWords = new int[numCodewords];
for (int i = 0; i < numCodewords; i++, offset += codewordSize) {
dataWords[i] = readCode(rawbits, offset, codewordSize);
}
try {
ReedSolomonDecoder rsDecoder = new ReedSolomonDecoder(gf);
rsDecoder.decode(dataWords, numCodewords - numDataCodewords);
} catch (ReedSolomonException ex) {
throw FormatException.getFormatInstance(ex);
}
// Now perform the unstuffing operation.
// First, count how many bits are going to be thrown out as stuffing
int mask = (1 << codewordSize) - 1;
int stuffedBits = 0;
for (int i = 0; i < numDataCodewords; i++) {
int dataWord = dataWords[i];
if (dataWord == 0 || dataWord == mask) {
throw FormatException.getFormatInstance();
} else if (dataWord == 1 || dataWord == mask - 1) {
stuffedBits++;
}
}
// Now, actually unpack the bits and remove the stuffing
boolean[] correctedBits = new boolean[numDataCodewords * codewordSize - stuffedBits];
int index = 0;
for (int i = 0; i < numDataCodewords; i++) {
int dataWord = dataWords[i];
if (dataWord == 1 || dataWord == mask - 1) {
// next codewordSize-1 bits are all zeros or all ones
Arrays.fill(correctedBits, index, index + codewordSize - 1, dataWord > 1);
index += codewordSize - 1;
} else {
for (int bit = codewordSize - 1; bit >= 0; --bit) {
correctedBits[index++] = (dataWord & (1 << bit)) != 0;
}
}
}
return correctedBits;
} | boolean[] function(boolean[] rawbits) throws FormatException { GenericGF gf; int codewordSize; if (ddata.getNbLayers() <= 2) { codewordSize = 6; gf = GenericGF.AZTEC_DATA_6; } else if (ddata.getNbLayers() <= 8) { codewordSize = 8; gf = GenericGF.AZTEC_DATA_8; } else if (ddata.getNbLayers() <= 22) { codewordSize = 10; gf = GenericGF.AZTEC_DATA_10; } else { codewordSize = 12; gf = GenericGF.AZTEC_DATA_12; } int numDataCodewords = ddata.getNbDatablocks(); int numCodewords = rawbits.length / codewordSize; if (numCodewords < numDataCodewords) { throw FormatException.getFormatInstance(); } int offset = rawbits.length % codewordSize; int[] dataWords = new int[numCodewords]; for (int i = 0; i < numCodewords; i++, offset += codewordSize) { dataWords[i] = readCode(rawbits, offset, codewordSize); } try { ReedSolomonDecoder rsDecoder = new ReedSolomonDecoder(gf); rsDecoder.decode(dataWords, numCodewords - numDataCodewords); } catch (ReedSolomonException ex) { throw FormatException.getFormatInstance(ex); } int mask = (1 << codewordSize) - 1; int stuffedBits = 0; for (int i = 0; i < numDataCodewords; i++) { int dataWord = dataWords[i]; if (dataWord == 0 dataWord == mask) { throw FormatException.getFormatInstance(); } else if (dataWord == 1 dataWord == mask - 1) { stuffedBits++; } } boolean[] correctedBits = new boolean[numDataCodewords * codewordSize - stuffedBits]; int index = 0; for (int i = 0; i < numDataCodewords; i++) { int dataWord = dataWords[i]; if (dataWord == 1 dataWord == mask - 1) { Arrays.fill(correctedBits, index, index + codewordSize - 1, dataWord > 1); index += codewordSize - 1; } else { for (int bit = codewordSize - 1; bit >= 0; --bit) { correctedBits[index++] = (dataWord & (1 << bit)) != 0; } } } return correctedBits; } | /**
* <p>Performs RS error correction on an array of bits.</p>
*
* @return the corrected array
* @throws FormatException if the input contains too many errors
*/ | Performs RS error correction on an array of bits | correctBits | {
"repo_name": "RoverPlatform/rover-android",
"path": "sdk/src/main/java/io/rover/shaded/zxing/com/google/zxing/aztec/decoder/Decoder.java",
"license": "apache-2.0",
"size": 12583
} | [
"io.rover.shaded.zxing.com.google.zxing.FormatException",
"io.rover.shaded.zxing.com.google.zxing.common.reedsolomon.GenericGF",
"io.rover.shaded.zxing.com.google.zxing.common.reedsolomon.ReedSolomonDecoder",
"io.rover.shaded.zxing.com.google.zxing.common.reedsolomon.ReedSolomonException",
"java.util.Arrays"
] | import io.rover.shaded.zxing.com.google.zxing.FormatException; import io.rover.shaded.zxing.com.google.zxing.common.reedsolomon.GenericGF; import io.rover.shaded.zxing.com.google.zxing.common.reedsolomon.ReedSolomonDecoder; import io.rover.shaded.zxing.com.google.zxing.common.reedsolomon.ReedSolomonException; import java.util.Arrays; | import io.rover.shaded.zxing.com.google.zxing.*; import io.rover.shaded.zxing.com.google.zxing.common.reedsolomon.*; import java.util.*; | [
"io.rover.shaded",
"java.util"
] | io.rover.shaded; java.util; | 626,949 |
@Override public void exitRetrievalQuery(@NotNull QueryParser.RetrievalQueryContext ctx) { } | @Override public void exitRetrievalQuery(@NotNull QueryParser.RetrievalQueryContext ctx) { } | /**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/ | The default implementation does nothing | enterRetrievalQuery | {
"repo_name": "objectof-group/objectof",
"path": "model/src/main/java/net/objectof/model/query/parser/QueryParserBaseListener.java",
"license": "gpl-3.0",
"size": 6183
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 2,874,148 |
public static Map<String, InputStream> extractJarFromPath(String name)
throws Exception
{
Map<String, InputStream> values = new HashMap<String, InputStream>();
if (name == null) return values;
ClassLoader loader = IOUtil.class.getClassLoader();
if (isJavaWebStart())
loader = Thread.currentThread().getContextClassLoader();
//Get the URLs
URL[] urls = ((URLClassLoader) loader).getURLs();
try {
File f;
String n;
for (URL url : urls) {
n = url.getFile();
f = new File(n);
if (f.getName().contains(name)) {
readJar(values, f);
}
}
} catch (Exception e) {
throw new Exception("Cannot read the requested jar.", e);
}
return values;
}
| static Map<String, InputStream> function(String name) throws Exception { Map<String, InputStream> values = new HashMap<String, InputStream>(); if (name == null) return values; ClassLoader loader = IOUtil.class.getClassLoader(); if (isJavaWebStart()) loader = Thread.currentThread().getContextClassLoader(); URL[] urls = ((URLClassLoader) loader).getURLs(); try { File f; String n; for (URL url : urls) { n = url.getFile(); f = new File(n); if (f.getName().contains(name)) { readJar(values, f); } } } catch (Exception e) { throw new Exception(STR, e); } return values; } | /**
* Extracts the specified jar name from the class path.
*
* @param name Value contained in the jar name.
* @return See above.
*/ | Extracts the specified jar name from the class path | extractJarFromPath | {
"repo_name": "stelfrich/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/util/file/IOUtil.java",
"license": "gpl-2.0",
"size": 12334
} | [
"java.io.File",
"java.io.InputStream",
"java.net.URLClassLoader",
"java.util.HashMap",
"java.util.Map"
] | import java.io.File; import java.io.InputStream; import java.net.URLClassLoader; import java.util.HashMap; import java.util.Map; | import java.io.*; import java.net.*; import java.util.*; | [
"java.io",
"java.net",
"java.util"
] | java.io; java.net; java.util; | 2,485,029 |
public void testJava2DToValue() {
DateAxis axis = new DateAxis();
axis.setRange(50.0, 100.0);
Rectangle2D dataArea = new Rectangle2D.Double(10.0, 50.0, 400.0, 300.0);
double y1 = axis.java2DToValue(75.0, dataArea, RectangleEdge.LEFT);
assertTrue(same(y1, 95.8333333, 1.0));
double y2 = axis.java2DToValue(75.0, dataArea, RectangleEdge.RIGHT);
assertTrue(same(y2, 95.8333333, 1.0));
double x1 = axis.java2DToValue(75.0, dataArea, RectangleEdge.TOP);
assertTrue(same(x1, 58.125, 1.0));
double x2 = axis.java2DToValue(75.0, dataArea, RectangleEdge.BOTTOM);
assertTrue(same(x2, 58.125, 1.0));
axis.setInverted(true);
double y3 = axis.java2DToValue(75.0, dataArea, RectangleEdge.LEFT);
assertTrue(same(y3, 54.1666667, 1.0));
double y4 = axis.java2DToValue(75.0, dataArea, RectangleEdge.RIGHT);
assertTrue(same(y4, 54.1666667, 1.0));
double x3 = axis.java2DToValue(75.0, dataArea, RectangleEdge.TOP);
assertTrue(same(x3, 91.875, 1.0));
double x4 = axis.java2DToValue(75.0, dataArea, RectangleEdge.BOTTOM);
assertTrue(same(x4, 91.875, 1.0));
} | void function() { DateAxis axis = new DateAxis(); axis.setRange(50.0, 100.0); Rectangle2D dataArea = new Rectangle2D.Double(10.0, 50.0, 400.0, 300.0); double y1 = axis.java2DToValue(75.0, dataArea, RectangleEdge.LEFT); assertTrue(same(y1, 95.8333333, 1.0)); double y2 = axis.java2DToValue(75.0, dataArea, RectangleEdge.RIGHT); assertTrue(same(y2, 95.8333333, 1.0)); double x1 = axis.java2DToValue(75.0, dataArea, RectangleEdge.TOP); assertTrue(same(x1, 58.125, 1.0)); double x2 = axis.java2DToValue(75.0, dataArea, RectangleEdge.BOTTOM); assertTrue(same(x2, 58.125, 1.0)); axis.setInverted(true); double y3 = axis.java2DToValue(75.0, dataArea, RectangleEdge.LEFT); assertTrue(same(y3, 54.1666667, 1.0)); double y4 = axis.java2DToValue(75.0, dataArea, RectangleEdge.RIGHT); assertTrue(same(y4, 54.1666667, 1.0)); double x3 = axis.java2DToValue(75.0, dataArea, RectangleEdge.TOP); assertTrue(same(x3, 91.875, 1.0)); double x4 = axis.java2DToValue(75.0, dataArea, RectangleEdge.BOTTOM); assertTrue(same(x4, 91.875, 1.0)); } | /**
* Test the translation of Java2D values to data values.
*/ | Test the translation of Java2D values to data values | testJava2DToValue | {
"repo_name": "integrated/jfreechart",
"path": "tests/org/jfree/chart/axis/junit/DateAxisTests.java",
"license": "lgpl-2.1",
"size": 42323
} | [
"java.awt.geom.Rectangle2D",
"org.jfree.chart.axis.DateAxis",
"org.jfree.ui.RectangleEdge"
] | import java.awt.geom.Rectangle2D; import org.jfree.chart.axis.DateAxis; import org.jfree.ui.RectangleEdge; | import java.awt.geom.*; import org.jfree.chart.axis.*; import org.jfree.ui.*; | [
"java.awt",
"org.jfree.chart",
"org.jfree.ui"
] | java.awt; org.jfree.chart; org.jfree.ui; | 131,803 |
public void updateBackgroundPadding(Rect padding) {
mBackgroundPadding.set(padding);
} | void function(Rect padding) { mBackgroundPadding.set(padding); } | /**
* Notifies the adapter of the background padding so that it can draw things correctly in the
* item decorator.
*/ | Notifies the adapter of the background padding so that it can draw things correctly in the item decorator | updateBackgroundPadding | {
"repo_name": "YAJATapps/FlickLauncher",
"path": "src/com/android/launcher3/allapps/AllAppsGridAdapter.java",
"license": "apache-2.0",
"size": 22616
} | [
"android.graphics.Rect"
] | import android.graphics.Rect; | import android.graphics.*; | [
"android.graphics"
] | android.graphics; | 714,390 |
public Set<Tier> getTiers(int tierType, String username) throws APIManagementException {
Set<Tier> tiers = new TreeSet<Tier>(new TierNameComparator());
String tenantDomain = MultitenantUtils.getTenantDomain(username);
Map<String, Tier> tierMap;
if(!APIUtil.isAdvanceThrottlingEnabled()) {
tierMap = APIUtil.getTiers(tierType, tenantDomain);
tiers.addAll(tierMap.values());
} else {
int tenantIdFromUsername = APIUtil.getTenantId(username);
if (tierType == APIConstants.TIER_API_TYPE) {
tierMap = APIUtil.getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, tenantIdFromUsername);
} else if (tierType == APIConstants.TIER_RESOURCE_TYPE) {
tierMap = APIUtil.getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_API, tenantIdFromUsername);
} else if (tierType == APIConstants.TIER_APPLICATION_TYPE) {
tierMap = APIUtil.getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_APP, tenantIdFromUsername);
} else {
throw new APIManagementException("No such a tier type : " + tierType);
}
tiers.addAll(tierMap.values());
}
return tiers;
} | Set<Tier> function(int tierType, String username) throws APIManagementException { Set<Tier> tiers = new TreeSet<Tier>(new TierNameComparator()); String tenantDomain = MultitenantUtils.getTenantDomain(username); Map<String, Tier> tierMap; if(!APIUtil.isAdvanceThrottlingEnabled()) { tierMap = APIUtil.getTiers(tierType, tenantDomain); tiers.addAll(tierMap.values()); } else { int tenantIdFromUsername = APIUtil.getTenantId(username); if (tierType == APIConstants.TIER_API_TYPE) { tierMap = APIUtil.getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_SUB, tenantIdFromUsername); } else if (tierType == APIConstants.TIER_RESOURCE_TYPE) { tierMap = APIUtil.getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_API, tenantIdFromUsername); } else if (tierType == APIConstants.TIER_APPLICATION_TYPE) { tierMap = APIUtil.getTiersFromPolicies(PolicyConstants.POLICY_LEVEL_APP, tenantIdFromUsername); } else { throw new APIManagementException(STR + tierType); } tiers.addAll(tierMap.values()); } return tiers; } | /**
* Returns a list of pre-defined # {@link org.wso2.carbon.apimgt.api.model.Tier} in the system.
*
* @param tierType type of the tiers (api,resource ot application)
* @param username current logged user
* @return Set<Tier> return list of tier names
* @throws APIManagementException APIManagementException if failed to get the predefined tiers
*/ | Returns a list of pre-defined # <code>org.wso2.carbon.apimgt.api.model.Tier</code> in the system | getTiers | {
"repo_name": "dhanuka84/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/AbstractAPIManager.java",
"license": "apache-2.0",
"size": 72559
} | [
"java.util.Map",
"java.util.Set",
"java.util.TreeSet",
"org.wso2.carbon.apimgt.api.APIManagementException",
"org.wso2.carbon.apimgt.api.model.Tier",
"org.wso2.carbon.apimgt.api.model.policy.PolicyConstants",
"org.wso2.carbon.apimgt.impl.utils.APIUtil",
"org.wso2.carbon.apimgt.impl.utils.TierNameComparator",
"org.wso2.carbon.utils.multitenancy.MultitenantUtils"
] | import java.util.Map; import java.util.Set; import java.util.TreeSet; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.api.model.policy.PolicyConstants; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.apimgt.impl.utils.TierNameComparator; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; | import java.util.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.apimgt.api.model.*; import org.wso2.carbon.apimgt.api.model.policy.*; import org.wso2.carbon.apimgt.impl.utils.*; import org.wso2.carbon.utils.multitenancy.*; | [
"java.util",
"org.wso2.carbon"
] | java.util; org.wso2.carbon; | 2,398,206 |
StorageAsset getPath(); | StorageAsset getPath(); | /**
* The path where the index is stored.
* @return
*/ | The path where the index is stored | getPath | {
"repo_name": "apache/archiva",
"path": "archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexingContext.java",
"license": "apache-2.0",
"size": 4069
} | [
"org.apache.archiva.repository.storage.StorageAsset"
] | import org.apache.archiva.repository.storage.StorageAsset; | import org.apache.archiva.repository.storage.*; | [
"org.apache.archiva"
] | org.apache.archiva; | 1,289,571 |
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
java.awt.GridBagConstraints gridBagConstraints;
filterPane = new org.sleuthkit.autopsy.geolocation.HidingPane();
statusBar = new javax.swing.JPanel();
reportButton = new javax.swing.JButton();
progressBar = new javax.swing.JProgressBar();
coordLabel = new javax.swing.JLabel();
mapPanel = new org.sleuthkit.autopsy.geolocation.MapPanel();
setLayout(new java.awt.BorderLayout());
add(filterPane, java.awt.BorderLayout.WEST);
statusBar.setLayout(new java.awt.GridBagLayout()); | @SuppressWarnings(STR) void function() { java.awt.GridBagConstraints gridBagConstraints; filterPane = new org.sleuthkit.autopsy.geolocation.HidingPane(); statusBar = new javax.swing.JPanel(); reportButton = new javax.swing.JButton(); progressBar = new javax.swing.JProgressBar(); coordLabel = new javax.swing.JLabel(); mapPanel = new org.sleuthkit.autopsy.geolocation.MapPanel(); setLayout(new java.awt.BorderLayout()); add(filterPane, java.awt.BorderLayout.WEST); statusBar.setLayout(new java.awt.GridBagLayout()); | /**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/ | This method is called from within the constructor to initialize the form. regenerated by the Form Editor | initComponents | {
"repo_name": "esaunders/autopsy",
"path": "Core/src/org/sleuthkit/autopsy/geolocation/GeolocationTopComponent.java",
"license": "apache-2.0",
"size": 21123
} | [
"java.awt.BorderLayout"
] | import java.awt.BorderLayout; | import java.awt.*; | [
"java.awt"
] | java.awt; | 1,137,294 |
@Override
public Cursor query(Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
// Open a read-only database.
SQLiteDatabase db = myOpenHelper.getWritableDatabase();
// Replace these with valid SQL statements if necessary.
String groupBy = null;
String having = null;
SQLiteQueryBuilder queryBuilder = new SQLiteQueryBuilder();
queryBuilder.setTables(MySQLiteOpenHelper.DATABASE_TABLE);
// If this is a row query, limit the result set to the passed in row.
switch (uriMatcher.match(uri)) {
case SINGLE_ROW :
String rowID = uri.getPathSegments().get(1);
queryBuilder.appendWhere(KEY_ID + "=" + rowID);
break;
case SEARCH :
String query = uri.getPathSegments().get(1);
queryBuilder.appendWhere(KEY_SEARCH_COLUMN +
" LIKE \"%" + query + "%\"");
queryBuilder.setProjectionMap(SEARCH_SUGGEST_PROJECTION_MAP);
break;
default: break;
}
Cursor cursor = queryBuilder.query(db, projection, selection,
selectionArgs, groupBy, having, sortOrder);
return cursor;
}
//
//
// [NOTE: This searchable definition belongs (and can be found) in
// res/xml/searchablewithsuggestions.xml]
//
// <?xml version="1.0" encoding="utf-8"?>
// <searchable xmlns:android="http://schemas.android.com/apk/res/android"
// android:label="@string/app_name"
// android:searchSuggestAuthority=
// "com.paad.skeletonsearchabledatabaseprovider"
// android:searchSuggestIntentAction="android.intent.action.VIEW"
// android:searchSuggestIntentData=
// "content://com.paad.skeletonsearchabledatabaseprovider/elements">
// </searchable> | Cursor function(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { SQLiteDatabase db = myOpenHelper.getWritableDatabase(); String groupBy = null; String having = null; SQLiteQueryBuilder queryBuilder = new SQLiteQueryBuilder(); queryBuilder.setTables(MySQLiteOpenHelper.DATABASE_TABLE); switch (uriMatcher.match(uri)) { case SINGLE_ROW : String rowID = uri.getPathSegments().get(1); queryBuilder.appendWhere(KEY_ID + "=" + rowID); break; case SEARCH : String query = uri.getPathSegments().get(1); queryBuilder.appendWhere(KEY_SEARCH_COLUMN + STR%STR%\""); queryBuilder.setProjectionMap(SEARCH_SUGGEST_PROJECTION_MAP); break; default: break; } Cursor cursor = queryBuilder.query(db, projection, selection, selectionArgs, groupBy, having, sortOrder); return cursor; } | /**
* Listing 8-33: Returning search suggestions for a query
*/ | Listing 8-33: Returning search suggestions for a query | query | {
"repo_name": "Izek/pa4ad",
"path": "PA4AP/Code Snippets/Chapter 8/PA4AD_Ch08_DatabaseSkeleton/src/com/paad/DatabaseSkeleton/MySearchSuggestionsContentProvider.java",
"license": "gpl-3.0",
"size": 10366
} | [
"android.database.Cursor",
"android.database.sqlite.SQLiteDatabase",
"android.database.sqlite.SQLiteQueryBuilder",
"android.net.Uri"
] | import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteQueryBuilder; import android.net.Uri; | import android.database.*; import android.database.sqlite.*; import android.net.*; | [
"android.database",
"android.net"
] | android.database; android.net; | 1,853,268 |
@Test
public void testPreviousStandardDateMonthA() {
MyDateAxis axis = new MyDateAxis("Month");
Month nov2006 = new Month(11, 2006);
Month dec2006 = new Month(12, 2006);
// five dates to check...
Date d0 = new Date(nov2006.getFirstMillisecond());
Date d1 = new Date(nov2006.getFirstMillisecond() + 500L);
Date d2 = new Date(nov2006.getMiddleMillisecond());
Date d3 = new Date(nov2006.getMiddleMillisecond() + 500L);
Date d4 = new Date(nov2006.getLastMillisecond());
Date end = new Date(dec2006.getLastMillisecond());
DateTickUnit unit = new DateTickUnit(DateTickUnit.MONTH, 1);
axis.setTickUnit(unit);
// START: check d0 and d1
axis.setTickMarkPosition(DateTickMarkPosition.START);
axis.setRange(d0, end);
Date psd = axis.previousStandardDate(d0, unit);
Date nsd = unit.addToDate(psd);
assertTrue(psd.getTime() < d0.getTime());
assertTrue(nsd.getTime() >= d0.getTime());
axis.setRange(d1, end);
psd = axis.previousStandardDate(d1, unit);
nsd = unit.addToDate(psd);
assertTrue(psd.getTime() < d1.getTime());
assertTrue(nsd.getTime() >= d1.getTime());
// MIDDLE: check d1, d2 and d3
axis.setTickMarkPosition(DateTickMarkPosition.MIDDLE);
axis.setRange(d1, end);
psd = axis.previousStandardDate(d1, unit);
nsd = unit.addToDate(psd);
assertTrue(psd.getTime() < d1.getTime());
assertTrue(nsd.getTime() >= d1.getTime());
axis.setRange(d2, end);
psd = axis.previousStandardDate(d2, unit);
nsd = unit.addToDate(psd);
assertTrue(psd.getTime() < d2.getTime());
assertTrue(nsd.getTime() >= d2.getTime());
axis.setRange(d3, end);
psd = axis.previousStandardDate(d3, unit);
nsd = unit.addToDate(psd);
assertTrue(psd.getTime() < d3.getTime());
assertTrue(nsd.getTime() >= d3.getTime());
// END: check d3 and d4
axis.setTickMarkPosition(DateTickMarkPosition.END);
axis.setRange(d3, end);
psd = axis.previousStandardDate(d3, unit);
nsd = unit.addToDate(psd);
assertTrue(psd.getTime() < d3.getTime());
assertTrue(nsd.getTime() >= d3.getTime());
axis.setRange(d4, end);
psd = axis.previousStandardDate(d4, unit);
nsd = unit.addToDate(psd);
assertTrue(psd.getTime() < d4.getTime());
assertTrue(nsd.getTime() >= d4.getTime());
} | void function() { MyDateAxis axis = new MyDateAxis("Month"); Month nov2006 = new Month(11, 2006); Month dec2006 = new Month(12, 2006); Date d0 = new Date(nov2006.getFirstMillisecond()); Date d1 = new Date(nov2006.getFirstMillisecond() + 500L); Date d2 = new Date(nov2006.getMiddleMillisecond()); Date d3 = new Date(nov2006.getMiddleMillisecond() + 500L); Date d4 = new Date(nov2006.getLastMillisecond()); Date end = new Date(dec2006.getLastMillisecond()); DateTickUnit unit = new DateTickUnit(DateTickUnit.MONTH, 1); axis.setTickUnit(unit); axis.setTickMarkPosition(DateTickMarkPosition.START); axis.setRange(d0, end); Date psd = axis.previousStandardDate(d0, unit); Date nsd = unit.addToDate(psd); assertTrue(psd.getTime() < d0.getTime()); assertTrue(nsd.getTime() >= d0.getTime()); axis.setRange(d1, end); psd = axis.previousStandardDate(d1, unit); nsd = unit.addToDate(psd); assertTrue(psd.getTime() < d1.getTime()); assertTrue(nsd.getTime() >= d1.getTime()); axis.setTickMarkPosition(DateTickMarkPosition.MIDDLE); axis.setRange(d1, end); psd = axis.previousStandardDate(d1, unit); nsd = unit.addToDate(psd); assertTrue(psd.getTime() < d1.getTime()); assertTrue(nsd.getTime() >= d1.getTime()); axis.setRange(d2, end); psd = axis.previousStandardDate(d2, unit); nsd = unit.addToDate(psd); assertTrue(psd.getTime() < d2.getTime()); assertTrue(nsd.getTime() >= d2.getTime()); axis.setRange(d3, end); psd = axis.previousStandardDate(d3, unit); nsd = unit.addToDate(psd); assertTrue(psd.getTime() < d3.getTime()); assertTrue(nsd.getTime() >= d3.getTime()); axis.setTickMarkPosition(DateTickMarkPosition.END); axis.setRange(d3, end); psd = axis.previousStandardDate(d3, unit); nsd = unit.addToDate(psd); assertTrue(psd.getTime() < d3.getTime()); assertTrue(nsd.getTime() >= d3.getTime()); axis.setRange(d4, end); psd = axis.previousStandardDate(d4, unit); nsd = unit.addToDate(psd); assertTrue(psd.getTime() < d4.getTime()); assertTrue(nsd.getTime() >= d4.getTime()); } | /**
* A basic check for the testPreviousStandardDate() method when the
* tick unit is 1 month.
*/ | A basic check for the testPreviousStandardDate() method when the tick unit is 1 month | testPreviousStandardDateMonthA | {
"repo_name": "Mr-Steve/LTSpice_Library_Manager",
"path": "libs/jfreechart-1.0.16/tests/org/jfree/chart/axis/DateAxisTest.java",
"license": "gpl-2.0",
"size": 42457
} | [
"java.util.Date",
"org.jfree.data.time.Month",
"org.junit.Assert"
] | import java.util.Date; import org.jfree.data.time.Month; import org.junit.Assert; | import java.util.*; import org.jfree.data.time.*; import org.junit.*; | [
"java.util",
"org.jfree.data",
"org.junit"
] | java.util; org.jfree.data; org.junit; | 1,316,965 |
@Test
public void testNoBufferingOrBlockingOfSequence() throws Throwable {
final CountDownLatch finished = new CountDownLatch(1);
final int COUNT = 30;
final CountDownLatch timeHasPassed = new CountDownLatch(COUNT);
final AtomicBoolean running = new AtomicBoolean(true);
final AtomicInteger count = new AtomicInteger(0);
final Observable<Integer> obs = Observable.create(new Observable.OnSubscribe<Integer>() { | void function() throws Throwable { final CountDownLatch finished = new CountDownLatch(1); final int COUNT = 30; final CountDownLatch timeHasPassed = new CountDownLatch(COUNT); final AtomicBoolean running = new AtomicBoolean(true); final AtomicInteger count = new AtomicInteger(0); final Observable<Integer> obs = Observable.create(new Observable.OnSubscribe<Integer>() { | /**
* Confirm that no buffering or blocking of the Observable onNext calls occurs and it just grabs the next emitted value.
* <p/>
* This results in output such as => a: 1 b: 2 c: 89
*
* @throws Throwable
*/ | Confirm that no buffering or blocking of the Observable onNext calls occurs and it just grabs the next emitted value. This results in output such as => a: 1 b: 2 c: 89 | testNoBufferingOrBlockingOfSequence | {
"repo_name": "akarnokd/RxJavaFlow",
"path": "src/test/java/rx/internal/operators/BlockingOperatorNextTest.java",
"license": "apache-2.0",
"size": 10621
} | [
"java.util.concurrent.CountDownLatch",
"java.util.concurrent.atomic.AtomicBoolean",
"java.util.concurrent.atomic.AtomicInteger"
] | import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; | import java.util.concurrent.*; import java.util.concurrent.atomic.*; | [
"java.util"
] | java.util; | 141,611 |
private SplitFileInserterSegment[] splitIntoSegments(int segmentSize, int crossCheckBlocks, int segCount, int deductBlocksFromSegments, Bucket[] origDataBlocks, Executor executor, ObjectContainer container, ClientContext context, boolean persistent, BaseClientPutter putter, byte cryptoAlgorithm, byte[] splitfileCryptoKey) {
int dataBlocks = origDataBlocks.length;
ArrayList<SplitFileInserterSegment> segs = new ArrayList<SplitFileInserterSegment>();
CompatibilityMode cmode = ctx.getCompatibilityMode();
// First split the data up
if(segCount == 1) {
// Single segment
SplitFileInserterSegment onlySeg = new SplitFileInserterSegment(this, persistent, realTimeFlag, putter, splitfileAlgorithm, crossCheckBlocks, FECCodec.getCheckBlocks(splitfileAlgorithm, origDataBlocks.length + crossCheckBlocks, cmode), origDataBlocks, ctx, getCHKOnly, 0, cryptoAlgorithm, splitfileCryptoKey, container);
segs.add(onlySeg);
} else {
int j = 0;
int segNo = 0;
int data = segmentSize;
int check = FECCodec.getCheckBlocks(splitfileAlgorithm, data + crossCheckBlocks, cmode);
for(int i=segmentSize;;) {
if(i > dataBlocks) i = dataBlocks;
if(data > (i-j)) {
// Last segment.
assert(segNo == segCount-1);
data = i-j;
check = FECCodec.getCheckBlocks(splitfileAlgorithm, data + crossCheckBlocks, cmode);
}
Bucket[] seg = new Bucket[i-j];
System.arraycopy(origDataBlocks, j, seg, 0, data);
j = i;
for(int x=0;x<seg.length;x++)
if(seg[x] == null) throw new NullPointerException("In splitIntoSegs: "+x+" is null of "+seg.length+" of "+segNo);
SplitFileInserterSegment s = new SplitFileInserterSegment(this, persistent, realTimeFlag, putter, splitfileAlgorithm, crossCheckBlocks, check, seg, ctx, getCHKOnly, segNo, cryptoAlgorithm, splitfileCryptoKey, container);
segs.add(s);
if(deductBlocksFromSegments != 0)
if(logMINOR) Logger.minor(this, "INSERTING: Segment "+segNo+" of "+segCount+" : "+data+" data blocks "+check+" check blocks");
segNo++;
if(i == dataBlocks) break;
// Deduct one block from each later segment, rather than having a really short last segment.
if(segCount - segNo == deductBlocksFromSegments) {
data--;
// Don't change check.
}
i += data;
}
assert(segNo == segCount);
}
if(persistent)
container.activate(parent, 1);
parent.notifyClients(container, context);
return segs.toArray(new SplitFileInserterSegment[segs.size()]);
} | SplitFileInserterSegment[] function(int segmentSize, int crossCheckBlocks, int segCount, int deductBlocksFromSegments, Bucket[] origDataBlocks, Executor executor, ObjectContainer container, ClientContext context, boolean persistent, BaseClientPutter putter, byte cryptoAlgorithm, byte[] splitfileCryptoKey) { int dataBlocks = origDataBlocks.length; ArrayList<SplitFileInserterSegment> segs = new ArrayList<SplitFileInserterSegment>(); CompatibilityMode cmode = ctx.getCompatibilityMode(); if(segCount == 1) { SplitFileInserterSegment onlySeg = new SplitFileInserterSegment(this, persistent, realTimeFlag, putter, splitfileAlgorithm, crossCheckBlocks, FECCodec.getCheckBlocks(splitfileAlgorithm, origDataBlocks.length + crossCheckBlocks, cmode), origDataBlocks, ctx, getCHKOnly, 0, cryptoAlgorithm, splitfileCryptoKey, container); segs.add(onlySeg); } else { int j = 0; int segNo = 0; int data = segmentSize; int check = FECCodec.getCheckBlocks(splitfileAlgorithm, data + crossCheckBlocks, cmode); for(int i=segmentSize;;) { if(i > dataBlocks) i = dataBlocks; if(data > (i-j)) { assert(segNo == segCount-1); data = i-j; check = FECCodec.getCheckBlocks(splitfileAlgorithm, data + crossCheckBlocks, cmode); } Bucket[] seg = new Bucket[i-j]; System.arraycopy(origDataBlocks, j, seg, 0, data); j = i; for(int x=0;x<seg.length;x++) if(seg[x] == null) throw new NullPointerException(STR+x+STR+seg.length+STR+segNo); SplitFileInserterSegment s = new SplitFileInserterSegment(this, persistent, realTimeFlag, putter, splitfileAlgorithm, crossCheckBlocks, check, seg, ctx, getCHKOnly, segNo, cryptoAlgorithm, splitfileCryptoKey, container); segs.add(s); if(deductBlocksFromSegments != 0) if(logMINOR) Logger.minor(this, STR+segNo+STR+segCount+STR+data+STR+check+STR); segNo++; if(i == dataBlocks) break; if(segCount - segNo == deductBlocksFromSegments) { data--; } i += data; } assert(segNo == segCount); } if(persistent) container.activate(parent, 1); parent.notifyClients(container, context); return segs.toArray(new SplitFileInserterSegment[segs.size()]); } | /**
* Group the blocks into segments.
* @param deductBlocksFromSegments
*/ | Group the blocks into segments | splitIntoSegments | {
"repo_name": "NiteshBharadwaj/android-staging",
"path": "src/freenet/client/async/SplitFileInserter.java",
"license": "gpl-2.0",
"size": 29582
} | [
"com.db4o.ObjectContainer",
"java.util.ArrayList"
] | import com.db4o.ObjectContainer; import java.util.ArrayList; | import com.db4o.*; import java.util.*; | [
"com.db4o",
"java.util"
] | com.db4o; java.util; | 1,646,732 |
public void unlock() throws IOException {
if (this.lock == null)
return;
this.lock.release();
lock.channel().close();
lock = null;
} | void function() throws IOException { if (this.lock == null) return; this.lock.release(); lock.channel().close(); lock = null; } | /**
* Unlock storage.
*
* @throws IOException
*/ | Unlock storage | unlock | {
"repo_name": "songweijia/fffs",
"path": "sources/hadoop-2.4.1-src/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java",
"license": "apache-2.0",
"size": 35481
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,333,903 |
Set<INamespaceDefinition> getNamespaceSet(); | Set<INamespaceDefinition> getNamespaceSet(); | /**
* Gets the set of namespaces.
*
* @return A set of namespace definitions.
*/ | Gets the set of namespaces | getNamespaceSet | {
"repo_name": "adufilie/flex-falcon",
"path": "compiler/src/org/apache/flex/compiler/definitions/IQualifiers.java",
"license": "apache-2.0",
"size": 1903
} | [
"java.util.Set"
] | import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 134,553 |
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
response.setContentType("text/html;charset=UTF-8");
String operacao = request.getParameter("operacao");
try {
switch (operacao){
case "CriarNovo":
this.criarNovo(request, response);
break;
case "Cadastrar":
this.cadastrar(request, response);
break;
case "Listar":
this.listar(request, response);
break;
case "Excluir":
this.excluir(request, response);
break;
case "Editar":
this.editar(request, response);
break;
}
} catch (Exception ex){
ex.printStackTrace();
}
} | void function(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType(STR); String operacao = request.getParameter(STR); try { switch (operacao){ case STR: this.criarNovo(request, response); break; case STR: this.cadastrar(request, response); break; case STR: this.listar(request, response); break; case STR: this.excluir(request, response); break; case STR: this.editar(request, response); break; } } catch (Exception ex){ ex.printStackTrace(); } } | /**
* Processes requests for both HTTP <code>GET</code> and <code>POST</code>
* methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/ | Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods | processRequest | {
"repo_name": "Ronneesley/redesocial",
"path": "codigo/RedeSocialWeb/src/java/br/com/redesocial/controle/CategoriaControle.java",
"license": "mit",
"size": 7258
} | [
"java.io.IOException",
"javax.servlet.ServletException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse"
] | import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; | import java.io.*; import javax.servlet.*; import javax.servlet.http.*; | [
"java.io",
"javax.servlet"
] | java.io; javax.servlet; | 1,905,766 |
@Test
public void testChangeParentFromNullToNull() {
final TreeNode<T> newNode = newNode(newData());
final boolean expectedResult = false;
final boolean actualResult = newNode.changeParent(null);
Assert.assertEquals("changeParent returned wrong result", expectedResult, actualResult);
final TreeNode<T> expected = null;
final TreeNode<T> actual = newNode.getParent();
Assert.assertEquals("new parent is not null", expected, actual);
} | void function() { final TreeNode<T> newNode = newNode(newData()); final boolean expectedResult = false; final boolean actualResult = newNode.changeParent(null); Assert.assertEquals(STR, expectedResult, actualResult); final TreeNode<T> expected = null; final TreeNode<T> actual = newNode.getParent(); Assert.assertEquals(STR, expected, actual); } | /**
* Tests {@link TreeNode#changeParent(TreeNode)} from null to null (so from no parent to no parent).
* This should have no effect.
*/ | Tests <code>TreeNode#changeParent(TreeNode)</code> from null to null (so from no parent to no parent). This should have no effect | testChangeParentFromNullToNull | {
"repo_name": "cosmocode/cosmocode-commons",
"path": "src/test/java/de/cosmocode/collections/tree/TreeNodeTest.java",
"license": "apache-2.0",
"size": 39298
} | [
"junit.framework.Assert"
] | import junit.framework.Assert; | import junit.framework.*; | [
"junit.framework"
] | junit.framework; | 787,240 |
void deleteActions(Date endDate); | void deleteActions(Date endDate); | /**
* Purges all actions up to the specified end date.
*
* @param endDate The end date
*/ | Purges all actions up to the specified end date | deleteActions | {
"repo_name": "mattyb149/nifi",
"path": "nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/NiFiServiceFacade.java",
"license": "apache-2.0",
"size": 87066
} | [
"java.util.Date"
] | import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 2,231,596 |
public List<TocItem> getChildren() {
return this.children;
}
| List<TocItem> function() { return this.children; } | /**
* Gets the children.
* @return the children
*/ | Gets the children | getChildren | {
"repo_name": "usgin/usgin-geoportal",
"path": "src/com/esri/gpt/control/search/browse/TocItem.java",
"license": "apache-2.0",
"size": 10976
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,113,568 |
public TemplatizedType createTemplatizedType(
ObjectType baseType, JSType... templatizedTypes) {
return createTemplatizedType(
baseType, ImmutableList.copyOf(templatizedTypes));
} | TemplatizedType function( ObjectType baseType, JSType... templatizedTypes) { return createTemplatizedType( baseType, ImmutableList.copyOf(templatizedTypes)); } | /**
* Creates a templatized instance of the specified type. Only ObjectTypes
* can currently be templatized; extend the logic in this function when
* more types can be templatized.
* @param baseType the type to be templatized.
* @param templatizedTypes a list of the template JSTypes. Will be matched by
* list order to the template keys on the base type.
*/ | Creates a templatized instance of the specified type. Only ObjectTypes can currently be templatized; extend the logic in this function when more types can be templatized | createTemplatizedType | {
"repo_name": "Medium/closure-compiler",
"path": "src/com/google/javascript/rhino/jstype/JSTypeRegistry.java",
"license": "apache-2.0",
"size": 66964
} | [
"com.google.common.collect.ImmutableList"
] | import com.google.common.collect.ImmutableList; | import com.google.common.collect.*; | [
"com.google.common"
] | com.google.common; | 2,360,313 |
public static void setPartyLeader(String playerName, Party party) {
String leaderName = party.getLeader();
for (Player member : party.getOnlineMembers()) {
String memberName = member.getName();
if (memberName.equalsIgnoreCase(playerName)) {
member.sendMessage(LocaleLoader.getString("Party.Owner.Player"));
}
else if (memberName.equalsIgnoreCase(leaderName)) {
member.sendMessage(LocaleLoader.getString("Party.Owner.NotLeader"));
}
else {
member.sendMessage(LocaleLoader.getString("Party.Owner.New", playerName));
}
}
party.setLeader(playerName);
} | static void function(String playerName, Party party) { String leaderName = party.getLeader(); for (Player member : party.getOnlineMembers()) { String memberName = member.getName(); if (memberName.equalsIgnoreCase(playerName)) { member.sendMessage(LocaleLoader.getString(STR)); } else if (memberName.equalsIgnoreCase(leaderName)) { member.sendMessage(LocaleLoader.getString(STR)); } else { member.sendMessage(LocaleLoader.getString(STR, playerName)); } } party.setLeader(playerName); } | /**
* Set the leader of a party.
*
* @param playerName The name of the player to set as leader
* @param party The party
*/ | Set the leader of a party | setPartyLeader | {
"repo_name": "losu/SoftM-mcMMO",
"path": "src/main/java/com/gmail/nossr50/party/PartyManager.java",
"license": "agpl-3.0",
"size": 22374
} | [
"com.gmail.nossr50.datatypes.party.Party",
"com.gmail.nossr50.locale.LocaleLoader",
"org.bukkit.entity.Player"
] | import com.gmail.nossr50.datatypes.party.Party; import com.gmail.nossr50.locale.LocaleLoader; import org.bukkit.entity.Player; | import com.gmail.nossr50.datatypes.party.*; import com.gmail.nossr50.locale.*; import org.bukkit.entity.*; | [
"com.gmail.nossr50",
"org.bukkit.entity"
] | com.gmail.nossr50; org.bukkit.entity; | 426,143 |
public int join() throws IOException, InterruptedException {
return start().join();
} | int function() throws IOException, InterruptedException { return start().join(); } | /**
* Starts the process and waits for its completion.
*/ | Starts the process and waits for its completion | join | {
"repo_name": "stefanbrausch/hudson-main",
"path": "core/src/main/java/hudson/Launcher.java",
"license": "mit",
"size": 33125
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,273,346 |
public boolean canProvidePower(IBlockState state)
{
return true;
} | boolean function(IBlockState state) { return true; } | /**
* Can this block provide power. Only wire currently seems to have this change based on its state.
*/ | Can this block provide power. Only wire currently seems to have this change based on its state | canProvidePower | {
"repo_name": "TheGreatAndPowerfulWeegee/wipunknown",
"path": "build/tmp/recompileMc/sources/net/minecraft/block/BlockRedstoneDiode.java",
"license": "gpl-3.0",
"size": 12000
} | [
"net.minecraft.block.state.IBlockState"
] | import net.minecraft.block.state.IBlockState; | import net.minecraft.block.state.*; | [
"net.minecraft.block"
] | net.minecraft.block; | 1,050,823 |
protected static ModelPortType createModelPort(String username, String password, String passwordType) {
String endpoint = ENDPOINT;
if (System.getProperty("midpoint.endpoint") != null) {
endpoint = System.getProperty("midpoint.endpoint");
}
LOGGER.info("Creating model client endpoint: {} , username={}, password={}",
new Object[] {endpoint, username, password});
ModelService modelService = new ModelService();
ModelPortType modelPort = modelService.getModelPort();
BindingProvider bp = (BindingProvider)modelPort;
Map<String, Object> requestContext = bp.getRequestContext();
requestContext.put(BindingProvider.ENDPOINT_ADDRESS_PROPERTY, endpoint);
org.apache.cxf.endpoint.Client client = ClientProxy.getClient(modelPort);
org.apache.cxf.endpoint.Endpoint cxfEndpoint = client.getEndpoint();
Map<String, Object> outProps = new HashMap<String, Object>();
if (username != null) {
outProps.put(WSHandlerConstants.ACTION, WSHandlerConstants.USERNAME_TOKEN);
outProps.put(WSHandlerConstants.USER, username);
outProps.put(WSHandlerConstants.PASSWORD_TYPE, passwordType);
ClientPasswordHandler.setPassword(password);
outProps.put(WSHandlerConstants.PW_CALLBACK_CLASS, ClientPasswordHandler.class.getName());
WSS4JOutInterceptor wssOut = new WSS4JOutInterceptor(outProps);
cxfEndpoint.getOutInterceptors().add(wssOut);
}
cxfEndpoint.getInInterceptors().add(new LoggingInInterceptor());
cxfEndpoint.getOutInterceptors().add(new LoggingOutInterceptor());
return modelPort;
}
| static ModelPortType function(String username, String password, String passwordType) { String endpoint = ENDPOINT; if (System.getProperty(STR) != null) { endpoint = System.getProperty(STR); } LOGGER.info(STR, new Object[] {endpoint, username, password}); ModelService modelService = new ModelService(); ModelPortType modelPort = modelService.getModelPort(); BindingProvider bp = (BindingProvider)modelPort; Map<String, Object> requestContext = bp.getRequestContext(); requestContext.put(BindingProvider.ENDPOINT_ADDRESS_PROPERTY, endpoint); org.apache.cxf.endpoint.Client client = ClientProxy.getClient(modelPort); org.apache.cxf.endpoint.Endpoint cxfEndpoint = client.getEndpoint(); Map<String, Object> outProps = new HashMap<String, Object>(); if (username != null) { outProps.put(WSHandlerConstants.ACTION, WSHandlerConstants.USERNAME_TOKEN); outProps.put(WSHandlerConstants.USER, username); outProps.put(WSHandlerConstants.PASSWORD_TYPE, passwordType); ClientPasswordHandler.setPassword(password); outProps.put(WSHandlerConstants.PW_CALLBACK_CLASS, ClientPasswordHandler.class.getName()); WSS4JOutInterceptor wssOut = new WSS4JOutInterceptor(outProps); cxfEndpoint.getOutInterceptors().add(wssOut); } cxfEndpoint.getInInterceptors().add(new LoggingInInterceptor()); cxfEndpoint.getOutInterceptors().add(new LoggingOutInterceptor()); return modelPort; } | /**
* Creates webservice client connecting to midpoint
* */ | Creates webservice client connecting to midpoint | createModelPort | {
"repo_name": "Pardus-Engerek/engerek",
"path": "testing/wstest/src/test/java/com/evolveum/midpoint/testing/wstest/AbstractWebserviceTest.java",
"license": "apache-2.0",
"size": 34670
} | [
"com.evolveum.midpoint.xml.ns._public.model.model_3.ModelPortType",
"com.evolveum.midpoint.xml.ns._public.model.model_3.ModelService",
"java.util.HashMap",
"java.util.Map",
"javax.xml.ws.BindingProvider",
"org.apache.cxf.frontend.ClientProxy",
"org.apache.cxf.interceptor.LoggingInInterceptor",
"org.apache.cxf.interceptor.LoggingOutInterceptor",
"org.apache.cxf.ws.security.wss4j.WSS4JOutInterceptor",
"org.apache.wss4j.dom.handler.WSHandlerConstants"
] | import com.evolveum.midpoint.xml.ns._public.model.model_3.ModelPortType; import com.evolveum.midpoint.xml.ns._public.model.model_3.ModelService; import java.util.HashMap; import java.util.Map; import javax.xml.ws.BindingProvider; import org.apache.cxf.frontend.ClientProxy; import org.apache.cxf.interceptor.LoggingInInterceptor; import org.apache.cxf.interceptor.LoggingOutInterceptor; import org.apache.cxf.ws.security.wss4j.WSS4JOutInterceptor; import org.apache.wss4j.dom.handler.WSHandlerConstants; | import com.evolveum.midpoint.xml.ns._public.model.model_3.*; import java.util.*; import javax.xml.ws.*; import org.apache.cxf.frontend.*; import org.apache.cxf.interceptor.*; import org.apache.cxf.ws.security.wss4j.*; import org.apache.wss4j.dom.handler.*; | [
"com.evolveum.midpoint",
"java.util",
"javax.xml",
"org.apache.cxf",
"org.apache.wss4j"
] | com.evolveum.midpoint; java.util; javax.xml; org.apache.cxf; org.apache.wss4j; | 2,033,526 |
public final DataTypeDescriptor getAggregator(DataTypeDescriptor inputType,
StringBuilder aggregatorClass, boolean isAvg)
throws StandardException
{
aggregatorClass.append( ClassName.CountAggregator);
// GemStone changes BEGIN
//lets make room for distinct aggregate column value transportation.
// Also, If all tables are replicated, no special processing on Data Node
CompilerContext cc = (CompilerContext)
ContextService.getContext(CompilerContext.CONTEXT_ID);
if( !cc.createQueryInfo() && !cc.allTablesAreReplicatedOnRemote() && distinct) {
TypeId cti = TypeId.getUserDefinedTypeId(
com.pivotal.gemfirexd.internal.engine.sql.compile.types.DVDSet.class.getName(),
DataTypeDescriptor.getBuiltInDataTypeDescriptor(java.sql.Types.INTEGER, false),
false);
return new DataTypeDescriptor(cti, false);
}
// #42682 okay we have count(*), higher up in SelectNode will decide for Region.size().
if(inputType == null && !distinct) {
regionSizeConvertible = true;
}
// GemStone changes END
return DataTypeDescriptor.getBuiltInDataTypeDescriptor(java.sql.Types.INTEGER, false);
} | final DataTypeDescriptor function(DataTypeDescriptor inputType, StringBuilder aggregatorClass, boolean isAvg) throws StandardException { aggregatorClass.append( ClassName.CountAggregator); CompilerContext cc = (CompilerContext) ContextService.getContext(CompilerContext.CONTEXT_ID); if( !cc.createQueryInfo() && !cc.allTablesAreReplicatedOnRemote() && distinct) { TypeId cti = TypeId.getUserDefinedTypeId( com.pivotal.gemfirexd.internal.engine.sql.compile.types.DVDSet.class.getName(), DataTypeDescriptor.getBuiltInDataTypeDescriptor(java.sql.Types.INTEGER, false), false); return new DataTypeDescriptor(cti, false); } if(inputType == null && !distinct) { regionSizeConvertible = true; } return DataTypeDescriptor.getBuiltInDataTypeDescriptor(java.sql.Types.INTEGER, false); } | /**
* Determines the result datatype. We can run
* count() on anything, and it always returns a
* INTEGER (java.lang.Integer).
*
* @param inputType the input type, either a user type or a java.lang object
*
* @return the output Class (null if cannot operate on
* value expression of this type.
*/ | Determines the result datatype. We can run count() on anything, and it always returns a INTEGER (java.lang.Integer) | getAggregator | {
"repo_name": "papicella/snappy-store",
"path": "gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/impl/sql/compile/CountAggregateDefinition.java",
"license": "apache-2.0",
"size": 4532
} | [
"com.pivotal.gemfirexd.internal.iapi.error.StandardException",
"com.pivotal.gemfirexd.internal.iapi.reference.ClassName",
"com.pivotal.gemfirexd.internal.iapi.services.context.ContextService",
"com.pivotal.gemfirexd.internal.iapi.sql.compile.CompilerContext",
"com.pivotal.gemfirexd.internal.iapi.types.DataTypeDescriptor",
"com.pivotal.gemfirexd.internal.iapi.types.TypeId"
] | import com.pivotal.gemfirexd.internal.iapi.error.StandardException; import com.pivotal.gemfirexd.internal.iapi.reference.ClassName; import com.pivotal.gemfirexd.internal.iapi.services.context.ContextService; import com.pivotal.gemfirexd.internal.iapi.sql.compile.CompilerContext; import com.pivotal.gemfirexd.internal.iapi.types.DataTypeDescriptor; import com.pivotal.gemfirexd.internal.iapi.types.TypeId; | import com.pivotal.gemfirexd.internal.iapi.error.*; import com.pivotal.gemfirexd.internal.iapi.reference.*; import com.pivotal.gemfirexd.internal.iapi.services.context.*; import com.pivotal.gemfirexd.internal.iapi.sql.compile.*; import com.pivotal.gemfirexd.internal.iapi.types.*; | [
"com.pivotal.gemfirexd"
] | com.pivotal.gemfirexd; | 1,251,319 |
public Geometry withGeometryObjectExtension(final List<AbstractObject> geometryObjectExtension) {
this.setGeometryObjectExtension(geometryObjectExtension);
return this;
} | Geometry function(final List<AbstractObject> geometryObjectExtension) { this.setGeometryObjectExtension(geometryObjectExtension); return this; } | /**
* fluent setter
* @see #setGeometryObjectExtension(List<AbstractObject>)
*
* @param geometryObjectExtension
* required parameter
*/ | fluent setter | withGeometryObjectExtension | {
"repo_name": "micromata/javaapiforkml",
"path": "src/main/java/de/micromata/opengis/kml/v_2_2_0/Geometry.java",
"license": "bsd-3-clause",
"size": 9214
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 607,356 |
Long getCharOctLenAct() {
switch (type) {
case Types.CHAR :
case Types.LONGVARCHAR :
case Types.VARCHAR :
return ValuePool.getLong(2L * Integer.MAX_VALUE);
case Types.CLOB :
return ValuePool.getLong(Long.MAX_VALUE);
default :
return null;
}
} | Long getCharOctLenAct() { switch (type) { case Types.CHAR : case Types.LONGVARCHAR : case Types.VARCHAR : return ValuePool.getLong(2L * Integer.MAX_VALUE); case Types.CLOB : return ValuePool.getLong(Long.MAX_VALUE); default : return null; } } | /**
* Retrieves the maximum Long.MAX_VALUE bounded length, in bytes, for
* character types. <p>
*
* @return the maximum Long.MAX_VALUE bounded length, in
* bytes, for character types
*/ | Retrieves the maximum Long.MAX_VALUE bounded length, in bytes, for character types. | getCharOctLenAct | {
"repo_name": "ckaestne/LEADT",
"path": "workspace/hsqldb/src/org/hsqldb/DITypeInfo.java",
"license": "gpl-3.0",
"size": 36588
} | [
"org.hsqldb.store.ValuePool"
] | import org.hsqldb.store.ValuePool; | import org.hsqldb.store.*; | [
"org.hsqldb.store"
] | org.hsqldb.store; | 1,710,363 |
public static String doc2String(Document doc) {
try {
StringWriter sw = new StringWriter();
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transformer = tf.newTransformer();
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no");
transformer.setOutputProperty(OutputKeys.METHOD, "xml");
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
transformer.transform(new DOMSource(doc), new StreamResult(sw));
return sw.toString();
} catch (Exception ex) {
throw new RuntimeException("Error converting to String", ex);
}
}
| static String function(Document doc) { try { StringWriter sw = new StringWriter(); TransformerFactory tf = TransformerFactory.newInstance(); Transformer transformer = tf.newTransformer(); transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no"); transformer.setOutputProperty(OutputKeys.METHOD, "xml"); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); transformer.transform(new DOMSource(doc), new StreamResult(sw)); return sw.toString(); } catch (Exception ex) { throw new RuntimeException(STR, ex); } } | /**
* Method to convert document to string.
*
* @param doc
* @return document content as string
*/ | Method to convert document to string | doc2String | {
"repo_name": "treejames/GeoprocessingAppstore",
"path": "src/de/tudresden/gis/manage/xml/XmlHelpMethods.java",
"license": "apache-2.0",
"size": 10595
} | [
"java.io.StringWriter",
"javax.xml.transform.OutputKeys",
"javax.xml.transform.Transformer",
"javax.xml.transform.TransformerFactory",
"javax.xml.transform.dom.DOMSource",
"javax.xml.transform.stream.StreamResult",
"org.w3c.dom.Document"
] | import java.io.StringWriter; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.w3c.dom.Document; | import java.io.*; import javax.xml.transform.*; import javax.xml.transform.dom.*; import javax.xml.transform.stream.*; import org.w3c.dom.*; | [
"java.io",
"javax.xml",
"org.w3c.dom"
] | java.io; javax.xml; org.w3c.dom; | 1,830,565 |
private static Document updateOnFileAdd(Document mets, MCRPath file) {
try {
// check for file existance (if a derivate with mets.xml is uploaded
String relPath = MCRXMLFunctions.encodeURIPath(file.getOwnerRelativePath().substring(1), true);
// Check if file already exists -> if yes do nothing
String fileExistPathString = "mets:mets/mets:fileSec/mets:fileGrp/mets:file/mets:FLocat[@xlink:href='"
+ relPath + "']";
XPathExpression<Element> xpath = XPathFactory.instance().compile(fileExistPathString, Filters.element(),
null, MCRConstants.METS_NAMESPACE, MCRConstants.XLINK_NAMESPACE);
if (xpath.evaluate(mets).size() > 0) {
String msgTemplate = "The File : '%s' already exists in mets.xml";
LOGGER.warn(String.format(Locale.ROOT, msgTemplate, relPath));
return null;
} else {
String msgTemplate = "The File : '%s' does not exists in mets.xml";
LOGGER.warn(String.format(Locale.ROOT, msgTemplate, relPath));
}
// add to file section
String contentType = MCRContentTypes.probeContentType(file);
LOGGER.warn("Content Type is : {}", contentType);
String fileGrpUSE = getFileGroupUse(file);
String fileId = new MessageFormat("{0}_{1}", Locale.ROOT)
.format(new Object[] { fileGrpUSE.toLowerCase(Locale.ROOT), getFileBase(relPath) });
File fileAsMetsFile = new File(fileId, contentType);
FLocat fLocat = new FLocat(LOCTYPE.URL, relPath);
fileAsMetsFile.setFLocat(fLocat);
Element fileSec = getFileGroup(mets, fileGrpUSE);
fileSec.addContent(fileAsMetsFile.asElement());
if (fileGrpUSE.equals(MCRMetsModelHelper.MASTER_USE)) {
updateOnImageFile(mets, fileId, relPath);
} else {
updateOnCustomFile(mets, fileId, relPath);
}
} catch (Exception ex) {
LOGGER.error("Error occured while adding file {} to the existing mets file", file, ex);
return null;
}
return mets;
} | static Document function(Document mets, MCRPath file) { try { String relPath = MCRXMLFunctions.encodeURIPath(file.getOwnerRelativePath().substring(1), true); String fileExistPathString = STR + relPath + "']"; XPathExpression<Element> xpath = XPathFactory.instance().compile(fileExistPathString, Filters.element(), null, MCRConstants.METS_NAMESPACE, MCRConstants.XLINK_NAMESPACE); if (xpath.evaluate(mets).size() > 0) { String msgTemplate = STR; LOGGER.warn(String.format(Locale.ROOT, msgTemplate, relPath)); return null; } else { String msgTemplate = STR; LOGGER.warn(String.format(Locale.ROOT, msgTemplate, relPath)); } String contentType = MCRContentTypes.probeContentType(file); LOGGER.warn(STR, contentType); String fileGrpUSE = getFileGroupUse(file); String fileId = new MessageFormat(STR, Locale.ROOT) .format(new Object[] { fileGrpUSE.toLowerCase(Locale.ROOT), getFileBase(relPath) }); File fileAsMetsFile = new File(fileId, contentType); FLocat fLocat = new FLocat(LOCTYPE.URL, relPath); fileAsMetsFile.setFLocat(fLocat); Element fileSec = getFileGroup(mets, fileGrpUSE); fileSec.addContent(fileAsMetsFile.asElement()); if (fileGrpUSE.equals(MCRMetsModelHelper.MASTER_USE)) { updateOnImageFile(mets, fileId, relPath); } else { updateOnCustomFile(mets, fileId, relPath); } } catch (Exception ex) { LOGGER.error(STR, file, ex); return null; } return mets; } | /**
* Alters the mets file
*
* @param mets
* the unmodified source
* @param file
* the file to add
* @return the modified mets or null if an exception occures
*/ | Alters the mets file | updateOnFileAdd | {
"repo_name": "MyCoRe-Org/mycore",
"path": "mycore-mets/src/main/java/org/mycore/mets/tools/MCRMetsSave.java",
"license": "gpl-3.0",
"size": 40920
} | [
"java.text.MessageFormat",
"java.util.Locale",
"org.jdom2.Document",
"org.jdom2.Element",
"org.jdom2.filter.Filters",
"org.jdom2.xpath.XPathExpression",
"org.jdom2.xpath.XPathFactory",
"org.mycore.common.MCRConstants",
"org.mycore.common.xml.MCRXMLFunctions",
"org.mycore.datamodel.niofs.MCRContentTypes",
"org.mycore.datamodel.niofs.MCRPath",
"org.mycore.mets.model.MCRMetsModelHelper",
"org.mycore.mets.model.files.FLocat",
"org.mycore.mets.model.files.File"
] | import java.text.MessageFormat; import java.util.Locale; import org.jdom2.Document; import org.jdom2.Element; import org.jdom2.filter.Filters; import org.jdom2.xpath.XPathExpression; import org.jdom2.xpath.XPathFactory; import org.mycore.common.MCRConstants; import org.mycore.common.xml.MCRXMLFunctions; import org.mycore.datamodel.niofs.MCRContentTypes; import org.mycore.datamodel.niofs.MCRPath; import org.mycore.mets.model.MCRMetsModelHelper; import org.mycore.mets.model.files.FLocat; import org.mycore.mets.model.files.File; | import java.text.*; import java.util.*; import org.jdom2.*; import org.jdom2.filter.*; import org.jdom2.xpath.*; import org.mycore.common.*; import org.mycore.common.xml.*; import org.mycore.datamodel.niofs.*; import org.mycore.mets.model.*; import org.mycore.mets.model.files.*; | [
"java.text",
"java.util",
"org.jdom2",
"org.jdom2.filter",
"org.jdom2.xpath",
"org.mycore.common",
"org.mycore.datamodel",
"org.mycore.mets"
] | java.text; java.util; org.jdom2; org.jdom2.filter; org.jdom2.xpath; org.mycore.common; org.mycore.datamodel; org.mycore.mets; | 1,776,352 |
public SimonPattern getPattern() {
return pattern;
}
| SimonPattern function() { return pattern; } | /**
* Returns the Simon pattern of this rule.
*
* @return Simon pattern of this rule
*/ | Returns the Simon pattern of this rule | getPattern | {
"repo_name": "virgo47/javasimon",
"path": "core/src/main/java/org/javasimon/callback/FilterRule.java",
"license": "bsd-3-clause",
"size": 7861
} | [
"org.javasimon.SimonPattern"
] | import org.javasimon.SimonPattern; | import org.javasimon.*; | [
"org.javasimon"
] | org.javasimon; | 1,354,121 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.