method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
@SuppressWarnings("unchecked")
public JavaAnalysisResult analyzeExpression(final String expr,
final BoundIdentifiers availableIdentifiers) throws RecognitionException {
final JavaParser parser = parse( expr );
parser.conditionalOrExpression();
JavaAnalysisResult result = new JavaAnalysisResult();
result.setAnalyzedExpr(expr);
result.setIdentifiers(new HashSet<String>( parser.getIdentifiers() ) );
return analyze( result,
availableIdentifiers );
} | @SuppressWarnings(STR) JavaAnalysisResult function(final String expr, final BoundIdentifiers availableIdentifiers) throws RecognitionException { final JavaParser parser = parse( expr ); parser.conditionalOrExpression(); JavaAnalysisResult result = new JavaAnalysisResult(); result.setAnalyzedExpr(expr); result.setIdentifiers(new HashSet<String>( parser.getIdentifiers() ) ); return analyze( result, availableIdentifiers ); } | /**
* Analyze an expression.
*
* @param expr
* The expression to analyze.
* @param availableIdentifiers
* Total set of declarations available.
*
* @return The <code>Set</code> of declarations used by the expression.
* @throws RecognitionException
* If an error occurs in the parser.
*/ | Analyze an expression | analyzeExpression | {
"repo_name": "romartin/drools",
"path": "drools-compiler/src/main/java/org/drools/compiler/rule/builder/dialect/java/JavaExprAnalyzer.java",
"license": "apache-2.0",
"size": 6626
} | [
"java.util.HashSet",
"org.antlr.runtime.RecognitionException",
"org.drools.compiler.compiler.BoundIdentifiers",
"org.drools.compiler.rule.builder.dialect.java.parser.JavaParser"
] | import java.util.HashSet; import org.antlr.runtime.RecognitionException; import org.drools.compiler.compiler.BoundIdentifiers; import org.drools.compiler.rule.builder.dialect.java.parser.JavaParser; | import java.util.*; import org.antlr.runtime.*; import org.drools.compiler.compiler.*; import org.drools.compiler.rule.builder.dialect.java.parser.*; | [
"java.util",
"org.antlr.runtime",
"org.drools.compiler"
] | java.util; org.antlr.runtime; org.drools.compiler; | 2,830,925 |
@Test
public void testSetMaxCodeLength() {
final String value = "jumped";
final DoubleMetaphone doubleMetaphone = new DoubleMetaphone();
// Sanity check of default settings
assertEquals("Default Max Code Length", 4, doubleMetaphone.getMaxCodeLen());
assertEquals("Default Primary", "JMPT", doubleMetaphone.doubleMetaphone(value, false));
assertEquals("Default Alternate", "AMPT", doubleMetaphone.doubleMetaphone(value, true));
// Check setting Max Code Length
doubleMetaphone.setMaxCodeLen(3);
assertEquals("Set Max Code Length", 3, doubleMetaphone.getMaxCodeLen());
assertEquals("Max=3 Primary", "JMP", doubleMetaphone.doubleMetaphone(value, false));
assertEquals("Max=3 Alternate", "AMP", doubleMetaphone.doubleMetaphone(value, true));
} | void function() { final String value = STR; final DoubleMetaphone doubleMetaphone = new DoubleMetaphone(); assertEquals(STR, 4, doubleMetaphone.getMaxCodeLen()); assertEquals(STR, "JMPT", doubleMetaphone.doubleMetaphone(value, false)); assertEquals(STR, "AMPT", doubleMetaphone.doubleMetaphone(value, true)); doubleMetaphone.setMaxCodeLen(3); assertEquals(STR, 3, doubleMetaphone.getMaxCodeLen()); assertEquals(STR, "JMP", doubleMetaphone.doubleMetaphone(value, false)); assertEquals(STR, "AMP", doubleMetaphone.doubleMetaphone(value, true)); } | /**
* Test setting maximum length
*/ | Test setting maximum length | testSetMaxCodeLength | {
"repo_name": "adrie4mac/commons-codec",
"path": "src/test/java/org/apache/commons/codec/language/DoubleMetaphoneTest.java",
"license": "apache-2.0",
"size": 50780
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 1,505,949 |
public ByteArrayBuffer getResponses(ByteArrayBuffer rawRequests) throws Exception
{
return _connector.getResponses(rawRequests,false);
} | ByteArrayBuffer function(ByteArrayBuffer rawRequests) throws Exception { return _connector.getResponses(rawRequests,false); } | /** Get raw HTTP responses from raw HTTP requests.
* Multiple requests and responses may be handled, but only if
* persistent connections conditions apply.
* @param rawRequests String of raw HTTP requests
* @return String of raw HTTP responses
* @throws Exception
*/ | Get raw HTTP responses from raw HTTP requests. Multiple requests and responses may be handled, but only if persistent connections conditions apply | getResponses | {
"repo_name": "whiteley/jetty8",
"path": "test-jetty-servlet/src/main/java/org/eclipse/jetty/testing/ServletTester.java",
"license": "apache-2.0",
"size": 12786
} | [
"org.eclipse.jetty.io.ByteArrayBuffer"
] | import org.eclipse.jetty.io.ByteArrayBuffer; | import org.eclipse.jetty.io.*; | [
"org.eclipse.jetty"
] | org.eclipse.jetty; | 1,696,952 |
private void flushMyWriter()
{
if (m_writer != null)
{
try
{
m_writer.flush();
}
catch(IOException ioe)
{
}
}
} | void function() { if (m_writer != null) { try { m_writer.flush(); } catch(IOException ioe) { } } } | /**
* This method is only used internally when flushing the writer from the
* various fire...() trace events. Due to the writer being wrapped with
* SerializerTraceWriter it may cause the flush of these trace events:
* EVENTTYPE_OUTPUT_PSEUDO_CHARACTERS
* EVENTTYPE_OUTPUT_CHARACTERS
* which trace the output written to the output stream.
*
*/ | This method is only used internally when flushing the writer from the various fire...() trace events. Due to the writer being wrapped with SerializerTraceWriter it may cause the flush of these trace events: EVENTTYPE_OUTPUT_PSEUDO_CHARACTERS EVENTTYPE_OUTPUT_CHARACTERS which trace the output written to the output stream | flushMyWriter | {
"repo_name": "itgeeker/jdk",
"path": "src/com/sun/org/apache/xml/internal/serializer/SerializerBase.java",
"license": "apache-2.0",
"size": 44104
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,654,039 |
//@VisibleForTesting
void skipToNextPage(ExtractorInput input) throws IOException, InterruptedException {
if (!skipToNextPage(input, endPosition)) {
// Not found until eof.
throw new EOFException();
}
} | if (!skipToNextPage(input, endPosition)) { throw new EOFException(); } } | /**
* Skips to the next page.
*
* @param input The {@code ExtractorInput} to skip to the next page.
* @throws IOException thrown if peeking/reading from the input fails.
* @throws InterruptedException thrown if interrupted while peeking/reading from the input.
* @throws EOFException if the next page can't be found before the end of the input.
*/ | Skips to the next page | skipToNextPage | {
"repo_name": "Blaez/ZiosGram",
"path": "TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/ogg/DefaultOggSeeker.java",
"license": "gpl-2.0",
"size": 12223
} | [
"java.io.EOFException"
] | import java.io.EOFException; | import java.io.*; | [
"java.io"
] | java.io; | 514,023 |
protected void serialize(ArrayMap<String, Object> bundle) {
} | void function(ArrayMap<String, Object> bundle) { } | /**
* Used to serialize data for scheduled Jobs
*
* @param bundle The Bundle that will hold the serialized data
*/ | Used to serialize data for scheduled Jobs | serialize | {
"repo_name": "lifechurch/nuclei-android",
"path": "nuclei-android/src/main/java/nuclei/task/Task.java",
"license": "apache-2.0",
"size": 8218
} | [
"android.support.v4.util.ArrayMap"
] | import android.support.v4.util.ArrayMap; | import android.support.v4.util.*; | [
"android.support"
] | android.support; | 1,940,338 |
Map<Integer, Color> getChannelsColorMap()
{
Map<Integer, Color> m = new HashMap<Integer, Color>(getMaxC());
for (int i = 0; i < getMaxC(); i++)
m.put(i, getChannelColor(i));
return m;
}
| Map<Integer, Color> getChannelsColorMap() { Map<Integer, Color> m = new HashMap<Integer, Color>(getMaxC()); for (int i = 0; i < getMaxC(); i++) m.put(i, getChannelColor(i)); return m; } | /**
* Returns a collection of pairs (channel's index, channel's color).
*
* @return See above.
*/ | Returns a collection of pairs (channel's index, channel's color) | getChannelsColorMap | {
"repo_name": "stelfrich/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/agents/imviewer/view/ImViewerModel.java",
"license": "gpl-2.0",
"size": 80456
} | [
"java.awt.Color",
"java.util.HashMap",
"java.util.Map"
] | import java.awt.Color; import java.util.HashMap; import java.util.Map; | import java.awt.*; import java.util.*; | [
"java.awt",
"java.util"
] | java.awt; java.util; | 592,047 |
// APIs to read primitive data from a ByteBuffer using Unsafe way
public static short toShort(ByteBuffer buf, int offset) {
if (littleEndian) {
return Short.reverseBytes(getAsShort(buf, offset));
}
return getAsShort(buf, offset);
} | static short function(ByteBuffer buf, int offset) { if (littleEndian) { return Short.reverseBytes(getAsShort(buf, offset)); } return getAsShort(buf, offset); } | /**
* Reads a short value at the given buffer's offset considering it was written in big-endian
* format.
*
* @param buf
* @param offset
* @return short value at offset
*/ | Reads a short value at the given buffer's offset considering it was written in big-endian format | toShort | {
"repo_name": "lshmouse/hbase",
"path": "hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java",
"license": "apache-2.0",
"size": 10960
} | [
"java.nio.ByteBuffer"
] | import java.nio.ByteBuffer; | import java.nio.*; | [
"java.nio"
] | java.nio; | 2,338,443 |
public void setLastBuildDate(final Date lastBuildDate) {
this.lastBuildDate = lastBuildDate;
} | void function(final Date lastBuildDate) { this.lastBuildDate = lastBuildDate; } | /**
* Sets the last build date with the specified last build date.
*
* @param lastBuildDate the specified last build date
*/ | Sets the last build date with the specified last build date | setLastBuildDate | {
"repo_name": "AndiHappy/solo",
"path": "src/main/java/org/b3log/solo/model/feed/rss/Channel.java",
"license": "apache-2.0",
"size": 7827
} | [
"java.util.Date"
] | import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 2,319,460 |
public void unregisterPropertyOnType(String propertyName, JSType type) {
// TODO(bashir): typesIndexedByProperty should also be updated!
Map<String, ObjectType> typeSet =
eachRefTypeIndexedByProperty.get(propertyName);
if (typeSet != null) {
typeSet.remove(type.toObjectType().getReferenceName());
}
} | void function(String propertyName, JSType type) { Map<String, ObjectType> typeSet = eachRefTypeIndexedByProperty.get(propertyName); if (typeSet != null) { typeSet.remove(type.toObjectType().getReferenceName()); } } | /**
* Removes the index's reference to a property on the given type (if it is
* currently registered). If the property is not registered on the type yet,
* this method will not change internal state.
*
* @param propertyName the name of the property to unregister
* @param type the type to unregister the property on.
*/ | Removes the index's reference to a property on the given type (if it is currently registered). If the property is not registered on the type yet, this method will not change internal state | unregisterPropertyOnType | {
"repo_name": "vobruba-martin/closure-compiler",
"path": "src/com/google/javascript/rhino/jstype/JSTypeRegistry.java",
"license": "apache-2.0",
"size": 98599
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 2,736,646 |
static Process startPythonProcess(
PythonEnvironment pythonEnv, List<String> commands, boolean redirectToPipe)
throws IOException {
ProcessBuilder pythonProcessBuilder = new ProcessBuilder();
Map<String, String> env = pythonProcessBuilder.environment();
if (pythonEnv.pythonPath != null) {
String defaultPythonPath = env.get("PYTHONPATH");
if (Strings.isNullOrEmpty(defaultPythonPath)) {
env.put("PYTHONPATH", pythonEnv.pythonPath);
} else {
env.put(
"PYTHONPATH",
String.join(File.pathSeparator, pythonEnv.pythonPath, defaultPythonPath));
}
}
if (pythonEnv.archivesDirectory != null) {
pythonProcessBuilder.directory(new File(pythonEnv.archivesDirectory));
}
pythonEnv.systemEnv.forEach(env::put);
commands.add(0, pythonEnv.pythonExec);
pythonProcessBuilder.command(commands);
// redirect the stderr to stdout
pythonProcessBuilder.redirectErrorStream(true);
if (redirectToPipe) {
pythonProcessBuilder.redirectOutput(ProcessBuilder.Redirect.PIPE);
} else {
// set the child process the output same as the parent process.
pythonProcessBuilder.redirectOutput(ProcessBuilder.Redirect.INHERIT);
}
LOG.info(
"Starting Python process with environment variables: {{}}, command: {}",
env.entrySet().stream()
.map(e -> e.getKey() + "=" + e.getValue())
.collect(Collectors.joining(", ")),
String.join(" ", commands));
Process process = pythonProcessBuilder.start();
if (!process.isAlive()) {
throw new RuntimeException("Failed to start Python process. ");
}
return process;
} | static Process startPythonProcess( PythonEnvironment pythonEnv, List<String> commands, boolean redirectToPipe) throws IOException { ProcessBuilder pythonProcessBuilder = new ProcessBuilder(); Map<String, String> env = pythonProcessBuilder.environment(); if (pythonEnv.pythonPath != null) { String defaultPythonPath = env.get(STR); if (Strings.isNullOrEmpty(defaultPythonPath)) { env.put(STR, pythonEnv.pythonPath); } else { env.put( STR, String.join(File.pathSeparator, pythonEnv.pythonPath, defaultPythonPath)); } } if (pythonEnv.archivesDirectory != null) { pythonProcessBuilder.directory(new File(pythonEnv.archivesDirectory)); } pythonEnv.systemEnv.forEach(env::put); commands.add(0, pythonEnv.pythonExec); pythonProcessBuilder.command(commands); pythonProcessBuilder.redirectErrorStream(true); if (redirectToPipe) { pythonProcessBuilder.redirectOutput(ProcessBuilder.Redirect.PIPE); } else { pythonProcessBuilder.redirectOutput(ProcessBuilder.Redirect.INHERIT); } LOG.info( STR, env.entrySet().stream() .map(e -> e.getKey() + "=" + e.getValue()) .collect(Collectors.joining(STR)), String.join(" ", commands)); Process process = pythonProcessBuilder.start(); if (!process.isAlive()) { throw new RuntimeException(STR); } return process; } | /**
* Starts python process.
*
* @param pythonEnv the python Environment which will be in a process.
* @param commands the commands that python process will execute.
* @return the process represent the python process.
* @throws IOException Thrown if an error occurred when python process start.
*/ | Starts python process | startPythonProcess | {
"repo_name": "StephanEwen/incubator-flink",
"path": "flink-python/src/main/java/org/apache/flink/client/python/PythonEnvUtils.java",
"license": "apache-2.0",
"size": 22570
} | [
"java.io.File",
"java.io.IOException",
"java.util.List",
"java.util.Map",
"java.util.stream.Collectors",
"org.apache.flink.shaded.guava30.com.google.common.base.Strings"
] | import java.io.File; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.apache.flink.shaded.guava30.com.google.common.base.Strings; | import java.io.*; import java.util.*; import java.util.stream.*; import org.apache.flink.shaded.guava30.com.google.common.base.*; | [
"java.io",
"java.util",
"org.apache.flink"
] | java.io; java.util; org.apache.flink; | 1,701,777 |
public RequestConfig getRequestConfig() {
return requestConfig;
} | RequestConfig function() { return requestConfig; } | /**
* get RequestConfig, which can set socketTimeout, connectTimeout
* and so on by request
* @return RequestConfig
*/ | get RequestConfig, which can set socketTimeout, connectTimeout and so on by request | getRequestConfig | {
"repo_name": "robin13/elasticsearch",
"path": "client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java",
"license": "apache-2.0",
"size": 11003
} | [
"org.apache.http.client.config.RequestConfig"
] | import org.apache.http.client.config.RequestConfig; | import org.apache.http.client.config.*; | [
"org.apache.http"
] | org.apache.http; | 997,598 |
List<JSONObject> getByArticleId(final String articleId)
throws RepositoryException;
| List<JSONObject> getByArticleId(final String articleId) throws RepositoryException; | /**
* Gets tags of an article specified by the article id.
*
* @param articleId the specified article id
* @return a list of tags of the specified article, returns an empty list
* if not found
* @throws RepositoryException repository exception
*/ | Gets tags of an article specified by the article id | getByArticleId | {
"repo_name": "zhourongyu/b3log-solo",
"path": "core/src/main/java/org/b3log/solo/repository/TagRepository.java",
"license": "apache-2.0",
"size": 2059
} | [
"java.util.List",
"org.b3log.latke.repository.RepositoryException",
"org.json.JSONObject"
] | import java.util.List; import org.b3log.latke.repository.RepositoryException; import org.json.JSONObject; | import java.util.*; import org.b3log.latke.repository.*; import org.json.*; | [
"java.util",
"org.b3log.latke",
"org.json"
] | java.util; org.b3log.latke; org.json; | 2,515,353 |
Call<ResponseBody> getMultiplePagesFailureAsync(final ServiceCallback<ProductResult> serviceCallback); | Call<ResponseBody> getMultiplePagesFailureAsync(final ServiceCallback<ProductResult> serviceCallback); | /**
* A paging operation that receives a 400 on the second call
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link Call} object
*/ | A paging operation that receives a 400 on the second call | getMultiplePagesFailureAsync | {
"repo_name": "BretJohnson/autorest",
"path": "AutoRest/Generators/Java/Azure.Java.Tests/src/main/java/fixtures/paging/Paging.java",
"license": "mit",
"size": 14369
} | [
"com.microsoft.rest.ServiceCallback",
"com.squareup.okhttp.ResponseBody"
] | import com.microsoft.rest.ServiceCallback; import com.squareup.okhttp.ResponseBody; | import com.microsoft.rest.*; import com.squareup.okhttp.*; | [
"com.microsoft.rest",
"com.squareup.okhttp"
] | com.microsoft.rest; com.squareup.okhttp; | 1,157,142 |
@Test
public void testUpdate() throws IOException {
TabularColumnReader columnReader = new TabularColumnFileReader(dataFile, delimiter);
DataColumn[] dataColumns = columnReader.readInDataColumns(true);
long expected = 10;
long actual = dataColumns.length;
Assert.assertEquals(expected, actual);
MetadataReader metadataReader = new MetadataFileReader(metadataFile);
Metadata metadata = metadataReader.read();
dataColumns = DataColumns.update(dataColumns, metadata);
expected = 11;
actual = dataColumns.length;
Assert.assertEquals(expected, actual);
} | void function() throws IOException { TabularColumnReader columnReader = new TabularColumnFileReader(dataFile, delimiter); DataColumn[] dataColumns = columnReader.readInDataColumns(true); long expected = 10; long actual = dataColumns.length; Assert.assertEquals(expected, actual); MetadataReader metadataReader = new MetadataFileReader(metadataFile); Metadata metadata = metadataReader.read(); dataColumns = DataColumns.update(dataColumns, metadata); expected = 11; actual = dataColumns.length; Assert.assertEquals(expected, actual); } | /**
* Test of update method, of class DataColumns.
*
* @throws IOException
*/ | Test of update method, of class DataColumns | testUpdate | {
"repo_name": "bd2kccd/data-reader",
"path": "src/test/java/edu/pitt/dbmi/data/reader/DataColumnsTest.java",
"license": "lgpl-3.0",
"size": 2469
} | [
"edu.pitt.dbmi.data.reader.metadata.Metadata",
"edu.pitt.dbmi.data.reader.metadata.MetadataFileReader",
"edu.pitt.dbmi.data.reader.metadata.MetadataReader",
"edu.pitt.dbmi.data.reader.tabular.TabularColumnFileReader",
"edu.pitt.dbmi.data.reader.tabular.TabularColumnReader",
"java.io.IOException",
"org.junit.Assert"
] | import edu.pitt.dbmi.data.reader.metadata.Metadata; import edu.pitt.dbmi.data.reader.metadata.MetadataFileReader; import edu.pitt.dbmi.data.reader.metadata.MetadataReader; import edu.pitt.dbmi.data.reader.tabular.TabularColumnFileReader; import edu.pitt.dbmi.data.reader.tabular.TabularColumnReader; import java.io.IOException; import org.junit.Assert; | import edu.pitt.dbmi.data.reader.metadata.*; import edu.pitt.dbmi.data.reader.tabular.*; import java.io.*; import org.junit.*; | [
"edu.pitt.dbmi",
"java.io",
"org.junit"
] | edu.pitt.dbmi; java.io; org.junit; | 74,477 |
ReferenceQueue<Object> getQueue()
{
return mRefQueue;
} | ReferenceQueue<Object> getQueue() { return mRefQueue; } | /**
* Get the underlying queue we track references with.
*
* @return The queue.
*/ | Get the underlying queue we track references with | getQueue | {
"repo_name": "artclarke/humble-video",
"path": "humble-video-noarch/src/main/java/io/humble/ferry/JNIMemoryManager.java",
"license": "agpl-3.0",
"size": 53832
} | [
"java.lang.ref.ReferenceQueue"
] | import java.lang.ref.ReferenceQueue; | import java.lang.ref.*; | [
"java.lang"
] | java.lang; | 1,832,053 |
private static void loadFluidDefaults(BiMap<Fluid, Integer> localFluidIDs, Set<String> defaultNames)
{
// If there's an empty set of default names, use the defaults as defined locally
if (defaultNames.isEmpty()) {
defaultNames.addAll(defaultFluidName.values());
}
BiMap<String, Fluid> localFluids = HashBiMap.create(fluids);
for (String defaultName : defaultNames)
{
Fluid fluid = masterFluidReference.get(defaultName);
if (fluid == null) {
String derivedName = defaultName.split(":",2)[1];
String localDefault = defaultFluidName.get(derivedName);
if (localDefault == null) {
FMLLog.getLogger().log(Level.ERROR, "The fluid {} (specified as {}) is missing from this instance - it will be removed", derivedName, defaultName);
continue;
}
fluid = masterFluidReference.get(localDefault);
FMLLog.getLogger().log(Level.ERROR, "The fluid {} specified as default is not present - it will be reverted to default {}", defaultName, localDefault);
}
FMLLog.getLogger().log(Level.DEBUG, "The fluid {} has been selected as the default fluid for {}", defaultName, fluid.getName());
Fluid oldFluid = localFluids.put(fluid.getName(), fluid);
Integer id = localFluidIDs.remove(oldFluid);
localFluidIDs.put(fluid, id);
}
BiMap<Integer, String> localFluidNames = HashBiMap.create();
for (Entry<Fluid, Integer> e : localFluidIDs.entrySet()) {
localFluidNames.put(e.getValue(), e.getKey().getName());
}
fluidIDs = localFluidIDs;
fluids = localFluids;
fluidNames = localFluidNames;
fluidBlocks = null;
for (FluidDelegate fd : delegates.values())
{
fd.rebind();
}
} | static void function(BiMap<Fluid, Integer> localFluidIDs, Set<String> defaultNames) { if (defaultNames.isEmpty()) { defaultNames.addAll(defaultFluidName.values()); } BiMap<String, Fluid> localFluids = HashBiMap.create(fluids); for (String defaultName : defaultNames) { Fluid fluid = masterFluidReference.get(defaultName); if (fluid == null) { String derivedName = defaultName.split(":",2)[1]; String localDefault = defaultFluidName.get(derivedName); if (localDefault == null) { FMLLog.getLogger().log(Level.ERROR, STR, derivedName, defaultName); continue; } fluid = masterFluidReference.get(localDefault); FMLLog.getLogger().log(Level.ERROR, STR, defaultName, localDefault); } FMLLog.getLogger().log(Level.DEBUG, STR, defaultName, fluid.getName()); Fluid oldFluid = localFluids.put(fluid.getName(), fluid); Integer id = localFluidIDs.remove(oldFluid); localFluidIDs.put(fluid, id); } BiMap<Integer, String> localFluidNames = HashBiMap.create(); for (Entry<Fluid, Integer> e : localFluidIDs.entrySet()) { localFluidNames.put(e.getValue(), e.getKey().getName()); } fluidIDs = localFluidIDs; fluids = localFluids; fluidNames = localFluidNames; fluidBlocks = null; for (FluidDelegate fd : delegates.values()) { fd.rebind(); } } | /**
* Called by forge to load default fluid IDs from the world or from server -> client for syncing
* DO NOT call this and expect useful behaviour.
* @param localFluidIDs
* @param defaultNames
*/ | Called by forge to load default fluid IDs from the world or from server -> client for syncing DO NOT call this and expect useful behaviour | loadFluidDefaults | {
"repo_name": "boredherobrine13/morefuelsmod-1.10",
"path": "build/tmp/recompileMc/sources/net/minecraftforge/fluids/FluidRegistry.java",
"license": "lgpl-2.1",
"size": 16118
} | [
"com.google.common.collect.BiMap",
"com.google.common.collect.HashBiMap",
"java.util.Map",
"java.util.Set",
"net.minecraftforge.fml.common.FMLLog",
"org.apache.logging.log4j.Level"
] | import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; import java.util.Map; import java.util.Set; import net.minecraftforge.fml.common.FMLLog; import org.apache.logging.log4j.Level; | import com.google.common.collect.*; import java.util.*; import net.minecraftforge.fml.common.*; import org.apache.logging.log4j.*; | [
"com.google.common",
"java.util",
"net.minecraftforge.fml",
"org.apache.logging"
] | com.google.common; java.util; net.minecraftforge.fml; org.apache.logging; | 58,972 |
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(Operation.class)) {
case DsPackage.OPERATION__DISABLE_STREAMING:
case DsPackage.OPERATION__RETURN_REQUEST_STATUS:
case DsPackage.OPERATION__NAME:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(),
false, true));
return;
case DsPackage.OPERATION__MIXED:
case DsPackage.OPERATION__CALL_QUERY:
case DsPackage.OPERATION__CALL_QUERY_GROUP:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(),
true, false));
return;
}
super.notifyChanged(notification);
} | void function(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(Operation.class)) { case DsPackage.OPERATION__DISABLE_STREAMING: case DsPackage.OPERATION__RETURN_REQUEST_STATUS: case DsPackage.OPERATION__NAME: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true)); return; case DsPackage.OPERATION__MIXED: case DsPackage.OPERATION__CALL_QUERY: case DsPackage.OPERATION__CALL_QUERY_GROUP: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false)); return; } super.notifyChanged(notification); } | /**
* This handles model notifications by calling {@link #updateChildren} to
* update any cached
* children and by creating a viewer notification, which it passes to
* {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
*
* @generated
*/ | This handles model notifications by calling <code>#updateChildren</code> to update any cached children and by creating a viewer notification, which it passes to <code>#fireNotifyChanged</code>. | notifyChanged | {
"repo_name": "splinter/developer-studio",
"path": "data-services/org.wso2.developerstudio.eclipse.ds.edit/src/org/wso2/developerstudio/eclipse/ds/provider/OperationItemProvider.java",
"license": "apache-2.0",
"size": 13749
} | [
"org.eclipse.emf.common.notify.Notification",
"org.eclipse.emf.edit.provider.ViewerNotification",
"org.wso2.developerstudio.eclipse.ds.DsPackage",
"org.wso2.developerstudio.eclipse.ds.Operation"
] | import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.edit.provider.ViewerNotification; import org.wso2.developerstudio.eclipse.ds.DsPackage; import org.wso2.developerstudio.eclipse.ds.Operation; | import org.eclipse.emf.common.notify.*; import org.eclipse.emf.edit.provider.*; import org.wso2.developerstudio.eclipse.ds.*; | [
"org.eclipse.emf",
"org.wso2.developerstudio"
] | org.eclipse.emf; org.wso2.developerstudio; | 708,228 |
public Observable<ServiceResponse<VirtualNetworkInner>> updateTagsWithServiceResponseAsync(String resourceGroupName, String virtualNetworkName) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (virtualNetworkName == null) {
throw new IllegalArgumentException("Parameter virtualNetworkName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
} | Observable<ServiceResponse<VirtualNetworkInner>> function(String resourceGroupName, String virtualNetworkName) { if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (virtualNetworkName == null) { throw new IllegalArgumentException(STR); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } | /**
* Updates a virtual network tags.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkName The name of the virtual network.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the VirtualNetworkInner object
*/ | Updates a virtual network tags | updateTagsWithServiceResponseAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2020_05_01/src/main/java/com/microsoft/azure/management/network/v2020_05_01/implementation/VirtualNetworksInner.java",
"license": "mit",
"size": 90099
} | [
"com.microsoft.rest.ServiceResponse"
] | import com.microsoft.rest.ServiceResponse; | import com.microsoft.rest.*; | [
"com.microsoft.rest"
] | com.microsoft.rest; | 2,866,685 |
Optional<PiPipeconfId> ofDevice(DeviceId deviceId); | Optional<PiPipeconfId> ofDevice(DeviceId deviceId); | /**
* Returns the pipeconf identifier currently associated with the given
* device identifier, if present. If not present, it means no pipeconf has
* been associated with that device so far.
*
* @param deviceId device identifier
* @return an optional pipeconf identifier
*/ | Returns the pipeconf identifier currently associated with the given device identifier, if present. If not present, it means no pipeconf has been associated with that device so far | ofDevice | {
"repo_name": "kuujo/onos",
"path": "core/api/src/main/java/org/onosproject/net/pi/service/PiPipeconfService.java",
"license": "apache-2.0",
"size": 4217
} | [
"java.util.Optional",
"org.onosproject.net.DeviceId",
"org.onosproject.net.pi.model.PiPipeconfId"
] | import java.util.Optional; import org.onosproject.net.DeviceId; import org.onosproject.net.pi.model.PiPipeconfId; | import java.util.*; import org.onosproject.net.*; import org.onosproject.net.pi.model.*; | [
"java.util",
"org.onosproject.net"
] | java.util; org.onosproject.net; | 2,725,216 |
public HttpPut createContentPutMethod(final String path, final Map<String, List<String>> params,
final FedoraContent content ) {
String contentPath = path;
if ( content != null && content.getChecksum() != null ) {
contentPath += "?checksum=" + content.getChecksum();
}
final HttpPut put = createPutMethod( contentPath, params );
// content stream
if ( content != null ) {
put.setEntity( new InputStreamEntity(content.getContent()) );
}
// filename
if ( content != null && content.getFilename() != null ) {
put.setHeader("Content-Disposition", "attachment; filename=\"" + content.getFilename() + "\"" );
}
// content type
if ( content != null && content.getContentType() != null ) {
put.setHeader("Content-Type", content.getContentType() );
}
return put;
} | HttpPut function(final String path, final Map<String, List<String>> params, final FedoraContent content ) { String contentPath = path; if ( content != null && content.getChecksum() != null ) { contentPath += STR + content.getChecksum(); } final HttpPut put = createPutMethod( contentPath, params ); if ( content != null ) { put.setEntity( new InputStreamEntity(content.getContent()) ); } if ( content != null && content.getFilename() != null ) { put.setHeader(STR, STRSTR\STRContent-Type", content.getContentType() ); } return put; } | /**
* Create a request to create/update content.
* @param path The datastream path.
* @param params Mapping of parameters for the PUT request
* @param content Content parameters.
* @return PUT method
**/ | Create a request to create/update content | createContentPutMethod | {
"repo_name": "fcrepo4-labs/fcrepo4-client",
"path": "fcrepo-client-impl/src/main/java/org/fcrepo/client/utils/HttpHelper.java",
"license": "apache-2.0",
"size": 17056
} | [
"java.util.List",
"java.util.Map",
"org.apache.http.client.methods.HttpPut",
"org.apache.http.entity.InputStreamEntity",
"org.fcrepo.client.FedoraContent"
] | import java.util.List; import java.util.Map; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.InputStreamEntity; import org.fcrepo.client.FedoraContent; | import java.util.*; import org.apache.http.client.methods.*; import org.apache.http.entity.*; import org.fcrepo.client.*; | [
"java.util",
"org.apache.http",
"org.fcrepo.client"
] | java.util; org.apache.http; org.fcrepo.client; | 1,253,105 |
public LearningActivityTry fetchByact_u_Last(long actId, long userId,
OrderByComparator orderByComparator) throws SystemException {
int count = countByact_u(actId, userId);
List<LearningActivityTry> list = findByact_u(actId, userId, count - 1,
count, orderByComparator);
if (!list.isEmpty()) {
return list.get(0);
}
return null;
} | LearningActivityTry function(long actId, long userId, OrderByComparator orderByComparator) throws SystemException { int count = countByact_u(actId, userId); List<LearningActivityTry> list = findByact_u(actId, userId, count - 1, count, orderByComparator); if (!list.isEmpty()) { return list.get(0); } return null; } | /**
* Returns the last learning activity try in the ordered set where actId = ? and userId = ?.
*
* @param actId the act ID
* @param userId the user ID
* @param orderByComparator the comparator to order the set by (optionally <code>null</code>)
* @return the last matching learning activity try, or <code>null</code> if a matching learning activity try could not be found
* @throws SystemException if a system exception occurred
*/ | Returns the last learning activity try in the ordered set where actId = ? and userId = ? | fetchByact_u_Last | {
"repo_name": "TelefonicaED/liferaylms-portlet",
"path": "docroot/WEB-INF/src/com/liferay/lms/service/persistence/LearningActivityTryPersistenceImpl.java",
"license": "agpl-3.0",
"size": 155464
} | [
"com.liferay.lms.model.LearningActivityTry",
"com.liferay.portal.kernel.exception.SystemException",
"com.liferay.portal.kernel.util.OrderByComparator",
"java.util.List"
] | import com.liferay.lms.model.LearningActivityTry; import com.liferay.portal.kernel.exception.SystemException; import com.liferay.portal.kernel.util.OrderByComparator; import java.util.List; | import com.liferay.lms.model.*; import com.liferay.portal.kernel.exception.*; import com.liferay.portal.kernel.util.*; import java.util.*; | [
"com.liferay.lms",
"com.liferay.portal",
"java.util"
] | com.liferay.lms; com.liferay.portal; java.util; | 544,690 |
public void skippedEntity(String arg0) throws SAXException
{
} | void function(String arg0) throws SAXException { } | /**
* Does nothing.
* @see org.xml.sax.ContentHandler#skippedEntity(String)
*/ | Does nothing | skippedEntity | {
"repo_name": "rokn/Count_Words_2015",
"path": "testing/openjdk2/jaxp/src/com/sun/org/apache/xml/internal/serializer/ToHTMLSAXHandler.java",
"license": "mit",
"size": 22450
} | [
"org.xml.sax.SAXException"
] | import org.xml.sax.SAXException; | import org.xml.sax.*; | [
"org.xml.sax"
] | org.xml.sax; | 2,968 |
protected void onCrafting(ItemStack par1ItemStack)
{
par1ItemStack.onCrafting(this.thePlayer.worldObj, this.thePlayer, this.field_75228_b);
if (!this.thePlayer.worldObj.isRemote)
{
int i = this.field_75228_b;
float f = ParticleMergerRecipes.compacting().func_151398_b(par1ItemStack);
int j;
if (f == 0.0F)
{
i = 0;
}
else if (f < 1.0F)
{
j = MathHelper.floor_float((float)i * f);
if (j < MathHelper.ceiling_float_int((float)i * f) && (float)Math.random() < (float)i * f - (float)j)
{
++j;
}
i = j;
}
while (i > 0)
{
j = EntityXPOrb.getXPSplit(i);
i -= j;
this.thePlayer.worldObj.spawnEntityInWorld(new EntityXPOrb(this.thePlayer.worldObj, this.thePlayer.posX, this.thePlayer.posY + 0.5D, this.thePlayer.posZ + 0.5D, j));
}
}
this.field_75228_b = 0;
}
| void function(ItemStack par1ItemStack) { par1ItemStack.onCrafting(this.thePlayer.worldObj, this.thePlayer, this.field_75228_b); if (!this.thePlayer.worldObj.isRemote) { int i = this.field_75228_b; float f = ParticleMergerRecipes.compacting().func_151398_b(par1ItemStack); int j; if (f == 0.0F) { i = 0; } else if (f < 1.0F) { j = MathHelper.floor_float((float)i * f); if (j < MathHelper.ceiling_float_int((float)i * f) && (float)Math.random() < (float)i * f - (float)j) { ++j; } i = j; } while (i > 0) { j = EntityXPOrb.getXPSplit(i); i -= j; this.thePlayer.worldObj.spawnEntityInWorld(new EntityXPOrb(this.thePlayer.worldObj, this.thePlayer.posX, this.thePlayer.posY + 0.5D, this.thePlayer.posZ + 0.5D, j)); } } this.field_75228_b = 0; } | /**
* the itemStack passed in is the output - ie, iron ingots, and pickaxes, not ore and wood.
*/ | the itemStack passed in is the output - ie, iron ingots, and pickaxes, not ore and wood | onCrafting | {
"repo_name": "NEMESIS13cz/Evercraft",
"path": "java/evercraft/NEMESIS13cz/TileEntity/Slot/SlotParticleMerger.java",
"license": "gpl-3.0",
"size": 3230
} | [
"net.minecraft.entity.item.EntityXPOrb",
"net.minecraft.item.ItemStack",
"net.minecraft.util.MathHelper"
] | import net.minecraft.entity.item.EntityXPOrb; import net.minecraft.item.ItemStack; import net.minecraft.util.MathHelper; | import net.minecraft.entity.item.*; import net.minecraft.item.*; import net.minecraft.util.*; | [
"net.minecraft.entity",
"net.minecraft.item",
"net.minecraft.util"
] | net.minecraft.entity; net.minecraft.item; net.minecraft.util; | 1,829,565 |
public File getLibrariesPath() {
if (librariesPath == null) {
return new File(EXTERNAL_LIBRARIES_PATH);
}
return librariesPath;
}
/**
* Sets the path to the external dependency libraries folder.
* @param librariesPath the libraries folder path
* @throws IllegalArgumentException if some parameters were {@code null} | File function() { if (librariesPath == null) { return new File(EXTERNAL_LIBRARIES_PATH); } return librariesPath; } /** * Sets the path to the external dependency libraries folder. * @param librariesPath the libraries folder path * @throws IllegalArgumentException if some parameters were {@code null} | /**
* Returns the path to the dependency libraries path.
* The dependency library files are in the target folder directly.
* @return the librariesPath the libraries path
* @since 0.5.1
*/ | Returns the path to the dependency libraries path. The dependency library files are in the target folder directly | getLibrariesPath | {
"repo_name": "cocoatomo/asakusafw",
"path": "testing-project/asakusa-test-driver/src/main/java/com/asakusafw/testdriver/TestDriverContext.java",
"license": "apache-2.0",
"size": 33349
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 1,757,711 |
private boolean hasSameResults(Criteria criteria) {
try {
Set<Object> a = Sets.newHashSet(client.get("ipeds_id",
client.find(criteria)).values());
String query = "SELECT ipeds_id FROM data WHERE "
+ criteria.toString();
ResultSet rs = sql.executeQuery(query);
rs.next(); // skip column header
Set<Object> b = Sets.newHashSet();
while (rs.next()) {
b.add(rs.getInt(1));
}
Variables.register("query", query);
Variables.register("con", a);
Variables.register("sql", b);
Variables.register("diff", Sets.symmetricDifference(a, b));
return a.equals(b);
}
catch (Exception e) {
throw Throwables.propagate(e);
}
} | boolean function(Criteria criteria) { try { Set<Object> a = Sets.newHashSet(client.get(STR, client.find(criteria)).values()); String query = STR + criteria.toString(); ResultSet rs = sql.executeQuery(query); rs.next(); Set<Object> b = Sets.newHashSet(); while (rs.next()) { b.add(rs.getInt(1)); } Variables.register("query", query); Variables.register("con", a); Variables.register("sql", b); Variables.register("diff", Sets.symmetricDifference(a, b)); return a.equals(b); } catch (Exception e) { throw Throwables.propagate(e); } } | /**
* Validate that the {@code criteria} returns the same result in Concourse
* as it does in a relational database.
*
* @param criteria
* @return {@code true} if the Concourse and SQL result sets are the same
*/ | Validate that the criteria returns the same result in Concourse as it does in a relational database | hasSameResults | {
"repo_name": "hcuffy/concourse",
"path": "concourse-integration-tests/src/test/java/com/cinchapi/concourse/FindCriteriaTest.java",
"license": "apache-2.0",
"size": 8959
} | [
"com.cinchapi.concourse.lang.Criteria",
"com.cinchapi.concourse.test.Variables",
"com.google.common.base.Throwables",
"com.google.common.collect.Sets",
"java.sql.ResultSet",
"java.util.Set"
] | import com.cinchapi.concourse.lang.Criteria; import com.cinchapi.concourse.test.Variables; import com.google.common.base.Throwables; import com.google.common.collect.Sets; import java.sql.ResultSet; import java.util.Set; | import com.cinchapi.concourse.lang.*; import com.cinchapi.concourse.test.*; import com.google.common.base.*; import com.google.common.collect.*; import java.sql.*; import java.util.*; | [
"com.cinchapi.concourse",
"com.google.common",
"java.sql",
"java.util"
] | com.cinchapi.concourse; com.google.common; java.sql; java.util; | 2,451,581 |
public void initConnection(int timeout, long synchOnInstant) throws
IOException,
StandardException,
ClassNotFoundException {
Socket s = null; | void function(int timeout, long synchOnInstant) throws IOException, StandardException, ClassNotFoundException { Socket s = null; | /**
* Used to create a <code>Socket</code> connection to the slave and
* establish compatibility with the database version of the slave by
* comparing the UID's of the <code>ReplicationMessage</code> classes
* of the master and the slave.
*
* @param timeout the amount of time for which the connection should
* block before being established.
*
* @param synchOnInstant the master log instant, used to check
* that the master and slave log files are in synch. If no chunks
* of log records have been shipped to the slave yet, this is the
* end position in the current log file. If a chunk of log has
* been shipped, this is the instant of the log record shipped
* last. Note that there is a difference!
*
* @throws IOException if an exception occurs while trying to create the
* <code>SocketConnection</code> class or open a connection.
*
* @throws StandardException If an error message is received from the
* server indicating incompatible software versions of master
* and slave.
*
* @throws ClassNotFoundException Class of a serialized object cannot
* be found.
*/ | Used to create a <code>Socket</code> connection to the slave and establish compatibility with the database version of the slave by comparing the UID's of the <code>ReplicationMessage</code> classes of the master and the slave | initConnection | {
"repo_name": "trejkaz/derby",
"path": "java/engine/org/apache/derby/impl/store/replication/net/ReplicationMessageTransmit.java",
"license": "apache-2.0",
"size": 15834
} | [
"java.io.IOException",
"java.net.Socket",
"org.apache.derby.iapi.error.StandardException"
] | import java.io.IOException; import java.net.Socket; import org.apache.derby.iapi.error.StandardException; | import java.io.*; import java.net.*; import org.apache.derby.iapi.error.*; | [
"java.io",
"java.net",
"org.apache.derby"
] | java.io; java.net; org.apache.derby; | 82,117 |
@Test(dataProvider = "testEnsureNotNullElements3")
public void testEnsureNotNull3(Object o1, Object o2, Object o3,
boolean shouldFail)
{
boolean failed;
try
{
Validator.ensureNotNull(o1, o2, o3);
failed = false;
}
catch (LDAPSDKUsageException lue)
{
failed = true;
}
assertEquals(failed, shouldFail);
} | @Test(dataProvider = STR) void function(Object o1, Object o2, Object o3, boolean shouldFail) { boolean failed; try { Validator.ensureNotNull(o1, o2, o3); failed = false; } catch (LDAPSDKUsageException lue) { failed = true; } assertEquals(failed, shouldFail); } | /**
* Tests the {@code ensureNotNull} method variant that takes three arguments.
*
* @param o1 The first test object.
* @param o2 The second test object.
* @param o3 The third test object.
* @param shouldFail Indicates whether the test should fail.
*/ | Tests the ensureNotNull method variant that takes three arguments | testEnsureNotNull3 | {
"repo_name": "UnboundID/ldapsdk",
"path": "tests/unit/src/com/unboundid/util/ValidatorTestCase.java",
"license": "gpl-2.0",
"size": 22669
} | [
"org.testng.annotations.Test"
] | import org.testng.annotations.Test; | import org.testng.annotations.*; | [
"org.testng.annotations"
] | org.testng.annotations; | 1,004,452 |
protected void fetch(@Nonnull String url, @Nonnull Path target) throws IOException {
this.fetch(new URL(url), target);
} | void function(@Nonnull String url, @Nonnull Path target) throws IOException { this.fetch(new URL(url), target); } | /**
* Fetches any resource from a remote HTTP server and stores it in a specified file.
*/ | Fetches any resource from a remote HTTP server and stores it in a specified file | fetch | {
"repo_name": "BasinMC/minecraft-maven-plugin",
"path": "src/main/java/org/basinmc/maven/plugins/minecraft/AbstractArtifactMojo.java",
"license": "apache-2.0",
"size": 9222
} | [
"java.io.IOException",
"java.nio.file.Path",
"javax.annotation.Nonnull"
] | import java.io.IOException; import java.nio.file.Path; import javax.annotation.Nonnull; | import java.io.*; import java.nio.file.*; import javax.annotation.*; | [
"java.io",
"java.nio",
"javax.annotation"
] | java.io; java.nio; javax.annotation; | 2,033,565 |
TTransportFactory getServerTransportFactory(String serverName) throws SaslException; | TTransportFactory getServerTransportFactory(String serverName) throws SaslException; | /**
* For server side, this method returns a {@link TTransportFactory} based on the auth type. It is
* used as one argument to build a Thrift {@link org.apache.thrift.server.TServer}. If the auth
* type is not supported or recognized, an {@link UnsupportedOperationException} is thrown.
*
* @param serverName the name for this server
* @return a corresponding TTransportFactory
* @throws SaslException if building a TransportFactory fails
*/ | For server side, this method returns a <code>TTransportFactory</code> based on the auth type. It is used as one argument to build a Thrift <code>org.apache.thrift.server.TServer</code>. If the auth type is not supported or recognized, an <code>UnsupportedOperationException</code> is thrown | getServerTransportFactory | {
"repo_name": "Reidddddd/mo-alluxio",
"path": "core/common/src/main/java/alluxio/security/authentication/TransportProvider.java",
"license": "apache-2.0",
"size": 4572
} | [
"javax.security.sasl.SaslException",
"org.apache.thrift.transport.TTransportFactory"
] | import javax.security.sasl.SaslException; import org.apache.thrift.transport.TTransportFactory; | import javax.security.sasl.*; import org.apache.thrift.transport.*; | [
"javax.security",
"org.apache.thrift"
] | javax.security; org.apache.thrift; | 1,386,285 |
@Transactional
public DeploymentScheduleEvent createDeploymentScheduleEvent(DeploymentSchedule schedule,
String fileScope, String fileName) {
try (MdcUtils.MdcCloseable requestId = MdcUtils
.setRequestIdCloseable(UUID.randomUUID().toString())) {
DeploymentScheduleRequest deploymentRequest = DeploymentScheduleRequest
.deploymentScheduleBuilder()
.callback(schedule.getCallback())
.parameters(new HashMap<>(schedule.getParameters()))
.template(schedule.getTemplate())
.build();
OidcTokenId requestedWithToken = Optional.ofNullable(schedule.getRequestedWithToken())
.map(OidcRefreshToken::getOidcTokenId).orElse(null);
Deployment deployment = deploymentService
.createDeployment(deploymentRequest, schedule.getOwner(), requestedWithToken);
DeploymentScheduleEvent event = new DeploymentScheduleEvent();
event.setScope(fileScope);
event.setName(fileName);
event.setDeployment(deployment);
deployment.setDeploymentScheduleEvent(event);
event.setDeploymentSchedule(schedule);
event.setOwner(schedule.getOwner());
return deploymentScheduleEventRepository.save(event);
}
} | DeploymentScheduleEvent function(DeploymentSchedule schedule, String fileScope, String fileName) { try (MdcUtils.MdcCloseable requestId = MdcUtils .setRequestIdCloseable(UUID.randomUUID().toString())) { DeploymentScheduleRequest deploymentRequest = DeploymentScheduleRequest .deploymentScheduleBuilder() .callback(schedule.getCallback()) .parameters(new HashMap<>(schedule.getParameters())) .template(schedule.getTemplate()) .build(); OidcTokenId requestedWithToken = Optional.ofNullable(schedule.getRequestedWithToken()) .map(OidcRefreshToken::getOidcTokenId).orElse(null); Deployment deployment = deploymentService .createDeployment(deploymentRequest, schedule.getOwner(), requestedWithToken); DeploymentScheduleEvent event = new DeploymentScheduleEvent(); event.setScope(fileScope); event.setName(fileName); event.setDeployment(deployment); deployment.setDeploymentScheduleEvent(event); event.setDeploymentSchedule(schedule); event.setOwner(schedule.getOwner()); return deploymentScheduleEventRepository.save(event); } } | /**
* Create a new Deployment schedule event.
* @param schedule the Deployment schedule
* @param fileName the name of the file that triggered the event
* @param fileScope the scope of the file that triggered the event
* @return the deployment schedule event
*/ | Create a new Deployment schedule event | createDeploymentScheduleEvent | {
"repo_name": "indigo-dc/orchestrator",
"path": "src/main/java/it/reply/orchestrator/service/DeploymentScheduleServiceImpl.java",
"license": "apache-2.0",
"size": 9651
} | [
"it.reply.orchestrator.dal.entity.Deployment",
"it.reply.orchestrator.dal.entity.DeploymentSchedule",
"it.reply.orchestrator.dal.entity.DeploymentScheduleEvent",
"it.reply.orchestrator.dal.entity.OidcRefreshToken",
"it.reply.orchestrator.dal.entity.OidcTokenId",
"it.reply.orchestrator.dto.request.DeploymentScheduleRequest",
"it.reply.orchestrator.utils.MdcUtils",
"java.util.HashMap",
"java.util.Optional",
"java.util.UUID"
] | import it.reply.orchestrator.dal.entity.Deployment; import it.reply.orchestrator.dal.entity.DeploymentSchedule; import it.reply.orchestrator.dal.entity.DeploymentScheduleEvent; import it.reply.orchestrator.dal.entity.OidcRefreshToken; import it.reply.orchestrator.dal.entity.OidcTokenId; import it.reply.orchestrator.dto.request.DeploymentScheduleRequest; import it.reply.orchestrator.utils.MdcUtils; import java.util.HashMap; import java.util.Optional; import java.util.UUID; | import it.reply.orchestrator.dal.entity.*; import it.reply.orchestrator.dto.request.*; import it.reply.orchestrator.utils.*; import java.util.*; | [
"it.reply.orchestrator",
"java.util"
] | it.reply.orchestrator; java.util; | 2,818,125 |
void setClasspath(FileCollection classpath); | void setClasspath(FileCollection classpath); | /**
* A convenient method for setting a Bundle-Classpath instruction. The information of the classpath elements are only
* used if they are OSGi bundles. In this case for example the version information provided by the bundle is used in the Import-Package of the generated
* OSGi bundle.
*
* @param classpath The classpath elements
*/ | A convenient method for setting a Bundle-Classpath instruction. The information of the classpath elements are only used if they are OSGi bundles. In this case for example the version information provided by the bundle is used in the Import-Package of the generated OSGi bundle | setClasspath | {
"repo_name": "lsmaira/gradle",
"path": "subprojects/osgi/src/main/java/org/gradle/api/plugins/osgi/OsgiManifest.java",
"license": "apache-2.0",
"size": 5856
} | [
"org.gradle.api.file.FileCollection"
] | import org.gradle.api.file.FileCollection; | import org.gradle.api.file.*; | [
"org.gradle.api"
] | org.gradle.api; | 1,014,142 |
protected HibernateTemplate getHibernateTemplate() {
return _dbConnector.getHibernateTemplate();
} | HibernateTemplate function() { return _dbConnector.getHibernateTemplate(); } | /**
* Gets the Hibernate Spring template.
*
* @return the template
*/ | Gets the Hibernate Spring template | getHibernateTemplate | {
"repo_name": "McLeodMoores/starling",
"path": "projects/master-db/src/main/java/com/opengamma/masterdb/security/hibernate/HibernateSecurityMasterDetailProvider.java",
"license": "apache-2.0",
"size": 18888
} | [
"org.springframework.orm.hibernate5.HibernateTemplate"
] | import org.springframework.orm.hibernate5.HibernateTemplate; | import org.springframework.orm.hibernate5.*; | [
"org.springframework.orm"
] | org.springframework.orm; | 380,252 |
if (!httpTimerData.getHttpInfo().isUriDefined()) {
throw new IllegalArgumentException("URI is not defined for the given HttpTimerData.");
}
if (null == regEx) {
throw new IllegalArgumentException("Regular expression to use can not be null.");
}
try {
Pattern pattern = Pattern.compile(regEx);
Matcher matcher = pattern.matcher(httpTimerData.getHttpInfo().getUri());
String result = "";
if (null != regExTemplate) {
result = regExTemplate;
}
if (matcher.find()) {
for (int i = 1; i <= matcher.groupCount(); i++) {
if (null != regExTemplate) {
String matched = matcher.group(i);
if (null != matched) {
result = result.replace("$" + i + "$", matcher.group(i));
}
} else {
result += matcher.group(i);
}
}
if (!ObjectUtils.equals(result, regExTemplate)) {
return result;
}
}
return "Regular Expression " + regEx + " does not match URI";
} catch (PatternSyntaxException patternSyntaxException) {
throw new IllegalArgumentException("Provided Regular expression is not correct.", patternSyntaxException);
}
} | if (!httpTimerData.getHttpInfo().isUriDefined()) { throw new IllegalArgumentException(STR); } if (null == regEx) { throw new IllegalArgumentException(STR); } try { Pattern pattern = Pattern.compile(regEx); Matcher matcher = pattern.matcher(httpTimerData.getHttpInfo().getUri()); String result = STR$STR$STRRegular Expression STR does not match URISTRProvided Regular expression is not correct.", patternSyntaxException); } } | /**
* Transforms the URI from the {@link HttpTimerData} with the given regular expression. If
* template is provided all found groups will be replaced in template, if template specifies
* them.
*
* @param httpTimerData
* {@link HttpTimerData}
* @param regEx
* Regular expression
* @param regExTemplate
* Template
* @return Transformed URI.
* @throws IllegalArgumentException
* If {@link HttpTimerData} does not define the URI, if regular expression is
* <code>null</code> or can not be compiled.
*/ | Transforms the URI from the <code>HttpTimerData</code> with the given regular expression. If template is provided all found groups will be replaced in template, if template specifies them | getTransformedUri | {
"repo_name": "andy32323/inspectIT",
"path": "Commons/src/info/novatec/inspectit/communication/data/HttpTimerDataHelper.java",
"license": "agpl-3.0",
"size": 2257
} | [
"java.util.regex.Matcher",
"java.util.regex.Pattern"
] | import java.util.regex.Matcher; import java.util.regex.Pattern; | import java.util.regex.*; | [
"java.util"
] | java.util; | 2,777,571 |
@Override
public void setCR_TaxBaseAmt (java.math.BigDecimal CR_TaxBaseAmt)
{
set_Value (COLUMNNAME_CR_TaxBaseAmt, CR_TaxBaseAmt);
} | void function (java.math.BigDecimal CR_TaxBaseAmt) { set_Value (COLUMNNAME_CR_TaxBaseAmt, CR_TaxBaseAmt); } | /** Set Bezugswert (Haben).
@param CR_TaxBaseAmt
Bezugswert für die Berechnung der Steuer
*/ | Set Bezugswert (Haben) | setCR_TaxBaseAmt | {
"repo_name": "klst-com/metasfresh",
"path": "de.metas.adempiere.adempiere/base/src/main/java-gen/org/compiere/model/X_GL_JournalLine.java",
"license": "gpl-2.0",
"size": 29546
} | [
"java.math.BigDecimal"
] | import java.math.BigDecimal; | import java.math.*; | [
"java.math"
] | java.math; | 1,446,065 |
public void testNameSpaceFromEncoding_RFC1779() throws Exception {
byte[] mess = { 0x30, 0x0E, 0x31, 0x0C, 0x30, 0x0A, 0x06, 0x03, 0x55,
0x04, 0x03, 0x13, 0x03, 0x20, 0x41, 0x20, };
X500Principal principal = new X500Principal(mess);
String s = principal.getName(X500Principal.RFC1779);
assertEquals("CN=\" A \"", s);
} | void function() throws Exception { byte[] mess = { 0x30, 0x0E, 0x31, 0x0C, 0x30, 0x0A, 0x06, 0x03, 0x55, 0x04, 0x03, 0x13, 0x03, 0x20, 0x41, 0x20, }; X500Principal principal = new X500Principal(mess); String s = principal.getName(X500Principal.RFC1779); assertEquals("CN=\" A \"", s); } | /**
* Inits X500Principal with byte array, where there are leading and tailing spaces
* gets Name in RFC1779 format
* compares with expected value of name
*/ | Inits X500Principal with byte array, where there are leading and tailing spaces gets Name in RFC1779 format compares with expected value of name | testNameSpaceFromEncoding_RFC1779 | {
"repo_name": "AdmireTheDistance/android_libcore",
"path": "harmony-tests/src/test/java/org/apache/harmony/tests/javax/security/auth/x500/X500PrincipalTest.java",
"license": "gpl-2.0",
"size": 127872
} | [
"javax.security.auth.x500.X500Principal"
] | import javax.security.auth.x500.X500Principal; | import javax.security.auth.x500.*; | [
"javax.security"
] | javax.security; | 1,320,142 |
private HTableDescriptor convertSchemaToDescriptor(final TableSchema schema) {
HTableDescriptor desc;
desc = new HTableDescriptor(schema.getName());
desc.setDeferredLogFlush(schema.isDeferredLogFlush());
desc.setMaxFileSize(schema.getMaxFileSize());
desc.setMemStoreFlushSize(schema.getMemStoreFlushSize());
desc.setReadOnly(schema.isReadOnly());
final Collection<ColumnDefinition> cols = schema.getColumns();
for (final ColumnDefinition col : cols) {
final HColumnDescriptor cd = new HColumnDescriptor(Bytes.toBytes(col.getColumnName()), col.getMaxVersions(),
col.getCompression(), col.isInMemory(), col.isBlockCacheEnabled(), col.getBlockSize(), col.getTimeToLive(),
col.getBloomFilter(), col.getReplicationScope());
desc.addFamily(cd);
}
return desc;
} | HTableDescriptor function(final TableSchema schema) { HTableDescriptor desc; desc = new HTableDescriptor(schema.getName()); desc.setDeferredLogFlush(schema.isDeferredLogFlush()); desc.setMaxFileSize(schema.getMaxFileSize()); desc.setMemStoreFlushSize(schema.getMemStoreFlushSize()); desc.setReadOnly(schema.isReadOnly()); final Collection<ColumnDefinition> cols = schema.getColumns(); for (final ColumnDefinition col : cols) { final HColumnDescriptor cd = new HColumnDescriptor(Bytes.toBytes(col.getColumnName()), col.getMaxVersions(), col.getCompression(), col.isInMemory(), col.isBlockCacheEnabled(), col.getBlockSize(), col.getTimeToLive(), col.getBloomFilter(), col.getReplicationScope()); desc.addFamily(cd); } return desc; } | /**
* Converts the XML based schema to a version HBase can take natively.
*
* @param schema The schema with the all tables.
* @return The converted schema as a HBase object.
*/ | Converts the XML based schema to a version HBase can take natively | convertSchemaToDescriptor | {
"repo_name": "larsgeorge/hbase-schema-manager",
"path": "src/main/java/com/larsgeorge/hadoop/hbase/HBaseManager.java",
"license": "apache-2.0",
"size": 23587
} | [
"java.util.Collection",
"org.apache.hadoop.hbase.HColumnDescriptor",
"org.apache.hadoop.hbase.HTableDescriptor",
"org.apache.hadoop.hbase.util.Bytes"
] | import java.util.Collection; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.util.Bytes; | import java.util.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.util.*; | [
"java.util",
"org.apache.hadoop"
] | java.util; org.apache.hadoop; | 1,499,317 |
public void setAdditionalStopWords(final Set<String> additionalStopWords) {
this.additionalStopWords = Objects.requireNonNull(
additionalStopWords,
"additionalStopWords must not be null!");
} | void function(final Set<String> additionalStopWords) { this.additionalStopWords = Objects.requireNonNull( additionalStopWords, STR); } | /**
* Sets the new set containing the additional stopwords to consider.
*
* @param additionalStopWords the new set containing the additional
* stopwords to concider
*/ | Sets the new set containing the additional stopwords to consider | setAdditionalStopWords | {
"repo_name": "reinhapa/TweetwallFX",
"path": "config/src/main/java/org/tweetwallfx/config/TweetwallSettings.java",
"license": "mit",
"size": 5699
} | [
"java.util.Objects",
"java.util.Set"
] | import java.util.Objects; import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 2,873,360 |
public static void finishEvent(EventData eventData) {
if(eventData.getState() == EventState.RUNNING) {
eventData.stopTimer();
if(eventData.getCountdownTimer() != null && eventData.getCountdownTimer().isRunning()) {
eventData.getCountdownTimer().stop();
}
String[] winner = new String[0];
try {
winner = ((Event) eventData.getClz().newInstance()).getWinner(eventData);
} catch (InstantiationException | IllegalAccessException e) {
System.out.println(e);
e.printStackTrace();
}
int i = 0;
String[] players = new String[eventData.getPlayerList().size()+1];
for(Player members : eventData.getPlayerList()) {
players[i] = members.getName();
i++;
}
players[i] = eventData.getOwner().getName();
EventResult eventResult = new EventResult(winner, eventData.getTime(), players);
eventData.setEventResult(eventResult);
String timeString = EventFunctions.timeToString(eventData.getTime());
for(Player all : EventFunctions.getAllPlayers(eventData)) {
all.sendMessage(Color.YELLOW, localizedStringSet.format(all, "Event.Finish.Message", EventFunctions.getDescriptionForEvent(eventData.getClz()).name(), timeString));
PlayerData playerData = EventSystem.getInstance().getPlayerLifecycleHolder().getObject(all, PlayerData.class);
if(playerData.getColor() != null)
all.setColor(playerData.getColor());
}
EventFunctions.getAllPlayers(eventData).forEach(EventBase::resetData);
if(eventData.getMap() != null) {
if(eventData.getZone() != null) {
eventData.getZone().hideForAll();
eventData.getZone().destroy();
eventData.setZone(null);
}
eventData.getMap().destroy();
eventData.setMap(null);
}
destroyGameTimer(eventData);
try {
((Event) eventData.getClz().newInstance()).finish(eventData);
} catch (InstantiationException | IllegalAccessException e) {
System.out.println(e);
e.printStackTrace();
}
EventNPCBase.finish(eventData);
eventData.setState(EventState.START);
}
}
| static void function(EventData eventData) { if(eventData.getState() == EventState.RUNNING) { eventData.stopTimer(); if(eventData.getCountdownTimer() != null && eventData.getCountdownTimer().isRunning()) { eventData.getCountdownTimer().stop(); } String[] winner = new String[0]; try { winner = ((Event) eventData.getClz().newInstance()).getWinner(eventData); } catch (InstantiationException IllegalAccessException e) { System.out.println(e); e.printStackTrace(); } int i = 0; String[] players = new String[eventData.getPlayerList().size()+1]; for(Player members : eventData.getPlayerList()) { players[i] = members.getName(); i++; } players[i] = eventData.getOwner().getName(); EventResult eventResult = new EventResult(winner, eventData.getTime(), players); eventData.setEventResult(eventResult); String timeString = EventFunctions.timeToString(eventData.getTime()); for(Player all : EventFunctions.getAllPlayers(eventData)) { all.sendMessage(Color.YELLOW, localizedStringSet.format(all, STR, EventFunctions.getDescriptionForEvent(eventData.getClz()).name(), timeString)); PlayerData playerData = EventSystem.getInstance().getPlayerLifecycleHolder().getObject(all, PlayerData.class); if(playerData.getColor() != null) all.setColor(playerData.getColor()); } EventFunctions.getAllPlayers(eventData).forEach(EventBase::resetData); if(eventData.getMap() != null) { if(eventData.getZone() != null) { eventData.getZone().hideForAll(); eventData.getZone().destroy(); eventData.setZone(null); } eventData.getMap().destroy(); eventData.setMap(null); } destroyGameTimer(eventData); try { ((Event) eventData.getClz().newInstance()).finish(eventData); } catch (InstantiationException IllegalAccessException e) { System.out.println(e); e.printStackTrace(); } EventNPCBase.finish(eventData); eventData.setState(EventState.START); } } | /**
* initialize the setup for event results, call the event class and finish the event
* @param eventData the event
*/ | initialize the setup for event results, call the event class and finish the event | finishEvent | {
"repo_name": "Alf21/event-system",
"path": "src/main/java/me/alf21/eventsystem/EventBase.java",
"license": "gpl-3.0",
"size": 88807
} | [
"net.gtaun.shoebill.data.Color",
"net.gtaun.shoebill.object.Player"
] | import net.gtaun.shoebill.data.Color; import net.gtaun.shoebill.object.Player; | import net.gtaun.shoebill.data.*; import net.gtaun.shoebill.object.*; | [
"net.gtaun.shoebill"
] | net.gtaun.shoebill; | 791,683 |
// ===================== WEEKNUM function ===================== //
@Function("WEEKNUM")
@FunctionParameters({
@FunctionParameter("dateObj")})
public Integer WEEKNUM(Object dateObj){
Date date = convertDateObject(dateObj);
if(date==null){
logCannotConvertToDate();
return null;
}
else{
DateTime dt=new DateTime(date);
return dt.getWeekOfWeekyear();
}
}
| @Function(STR) @FunctionParameters({ @FunctionParameter(STR)}) Integer function(Object dateObj){ Date date = convertDateObject(dateObj); if(date==null){ logCannotConvertToDate(); return null; } else{ DateTime dt=new DateTime(date); return dt.getWeekOfWeekyear(); } } | /**
* Returns the week number of a given date.
*/ | Returns the week number of a given date | WEEKNUM | {
"repo_name": "aleatorio12/ProVentasConnector",
"path": "jasperreports-6.2.1-project/jasperreports-6.2.1/demo/samples/functions/src/net/sf/jasperreports/functions/standard/DateTimeFunctions.java",
"license": "gpl-3.0",
"size": 21272
} | [
"java.util.Date",
"net.sf.jasperreports.functions.annotations.Function",
"net.sf.jasperreports.functions.annotations.FunctionParameter",
"net.sf.jasperreports.functions.annotations.FunctionParameters",
"org.joda.time.DateTime"
] | import java.util.Date; import net.sf.jasperreports.functions.annotations.Function; import net.sf.jasperreports.functions.annotations.FunctionParameter; import net.sf.jasperreports.functions.annotations.FunctionParameters; import org.joda.time.DateTime; | import java.util.*; import net.sf.jasperreports.functions.annotations.*; import org.joda.time.*; | [
"java.util",
"net.sf.jasperreports",
"org.joda.time"
] | java.util; net.sf.jasperreports; org.joda.time; | 2,648,033 |
public InputSplit getWrappedSplit() {
return wrappedSplit;
} | InputSplit function() { return wrappedSplit; } | /**
* This methods returns the actual InputSplit (as returned by the
* {@link InputFormat}) which this class is wrapping.
* @return the wrappedSplit
*/ | This methods returns the actual InputSplit (as returned by the <code>InputFormat</code>) which this class is wrapping | getWrappedSplit | {
"repo_name": "hirohanin/pig7hadoop21",
"path": "src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigSplit.java",
"license": "apache-2.0",
"size": 8653
} | [
"org.apache.hadoop.mapreduce.InputSplit"
] | import org.apache.hadoop.mapreduce.InputSplit; | import org.apache.hadoop.mapreduce.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 2,713,209 |
public void multiplyThisBy2() {
GF2nElement T1, T2, T3, T4;
// if this point is zero, do nothing!
//
if (!isZero()) {
T1 = (GF2nElement) mX.clone();
T2 = (GF2nElement) mY.clone();
T3 = (GF2nElement) mZ.clone();
T4 = (GF2nElement) mB.clone();
for (int i = 1; i < mDeg - 1; i++) {
T4.squareThis();
}
if (T1.isZero() || T3.isZero()) {
assignZero();
} else {
T2.multiplyThisBy(T3);
T3.squareThis();
T4.multiplyThisBy(T3);
T3.multiplyThisBy(T1); // = Z2
T2.addToThis(T3);
T4.addToThis(T1);
T4.squareThis();
T4.squareThis(); // = X2
T1.squareThis();
T2.addToThis(T1); // = U
T2.multiplyThisBy(T4);
T1.squareThis();
T1.multiplyThisBy(T3);
T2.addToThis(T1); // = Y2
}
assign(T4, T2, T3);
}
} | void function() { GF2nElement T1, T2, T3, T4; T1 = (GF2nElement) mX.clone(); T2 = (GF2nElement) mY.clone(); T3 = (GF2nElement) mZ.clone(); T4 = (GF2nElement) mB.clone(); for (int i = 1; i < mDeg - 1; i++) { T4.squareThis(); } if (T1.isZero() T3.isZero()) { assignZero(); } else { T2.multiplyThisBy(T3); T3.squareThis(); T4.multiplyThisBy(T3); T3.multiplyThisBy(T1); T2.addToThis(T3); T4.addToThis(T1); T4.squareThis(); T4.squareThis(); T1.squareThis(); T2.addToThis(T1); T2.multiplyThisBy(T4); T1.squareThis(); T1.multiplyThisBy(T3); T2.addToThis(T1); } assign(T4, T2, T3); } } | /**
* Doubles this point. <br>
* input : <tt>(X<sub>1</sub>, Y<sub>1</sub>, Z<sub>1</sub>)</tt><br>
* output: <tt>2*(X<sub>1</sub>, Y<sub>1</sub>, Z<sub>1</sub>)
* = (X<sub>2</sub>, Y<sub>2</sub>, Z<sub>2</sub>)</tt><br>
* <br>
* <tt>c = b<sup>2<sup>m-2</sup></sup>,<br>
* Z<sub>2</sub> = X<sub>1</sub>Z<sub>1</sub><sup>2</sup><br>
* X<sub>2</sub> = (X<sub>1</sub> + cZ<sub>1</sub><sup>2</sup>)
* <sup>4</sup><br>
* U = Z<sub>2</sub> + X<sub>1</sub><sup>2</sup> + Y<sub>1</sub>Z
* n <sub>1</sub><br>
* Y<sub>2</sub> = X<sub>1</sub><sub>4</sub>Z<sub>2</sub> + UX<sub></sub>
* 2</tt><br>
*/ | Doubles this point. input : (X1, Y1, Z1) output: 2*(X1, Y1, Z1) = (X2, Y2, Z2) c = b2m-2, Z2 = X1Z12 X2 = (X1 + cZ12) 4 U = Z2 + X12 + Y1Z n 1 Y2 = X14Z2 + UX 2 | multiplyThisBy2 | {
"repo_name": "sharebookproject/AndroidPQCrypto",
"path": "MCElieceProject/Flexiprovider/src/main/java/de/flexiprovider/common/math/ellipticcurves/PointGF2n.java",
"license": "gpl-3.0",
"size": 35023
} | [
"de.flexiprovider.common.math.finitefields.GF2nElement"
] | import de.flexiprovider.common.math.finitefields.GF2nElement; | import de.flexiprovider.common.math.finitefields.*; | [
"de.flexiprovider.common"
] | de.flexiprovider.common; | 1,808,143 |
public static <E extends Comparable<?>> Builder<E> naturalOrder() {
return new Builder<E>(Ordering.natural());
}
public static final class Builder<E> extends ImmutableSet.Builder<E> {
private final Comparator<? super E> comparator;
public Builder(Comparator<? super E> comparator) {
this.comparator = checkNotNull(comparator);
} | static <E extends Comparable<?>> Builder<E> function() { return new Builder<E>(Ordering.natural()); } public static final class Builder<E> extends ImmutableSet.Builder<E> { private final Comparator<? super E> comparator; public Builder(Comparator<? super E> comparator) { this.comparator = checkNotNull(comparator); } | /**
* Returns a builder that creates immutable sorted sets whose elements are
* ordered by their natural ordering. The sorted sets use {@link
* Ordering#natural()} as the comparator. This method provides more
* type-safety than {@link #builder}, as it can be called only for classes
* that implement {@link Comparable}.
*/ | Returns a builder that creates immutable sorted sets whose elements are ordered by their natural ordering. The sorted sets use <code>Ordering#natural()</code> as the comparator. This method provides more type-safety than <code>#builder</code>, as it can be called only for classes that implement <code>Comparable</code> | naturalOrder | {
"repo_name": "aiyanbo/guava",
"path": "guava/src/com/google/common/collect/ImmutableSortedSet.java",
"license": "apache-2.0",
"size": 28782
} | [
"com.google.common.base.Preconditions",
"java.util.Comparator"
] | import com.google.common.base.Preconditions; import java.util.Comparator; | import com.google.common.base.*; import java.util.*; | [
"com.google.common",
"java.util"
] | com.google.common; java.util; | 898,793 |
public final int doEndTag() throws JspException {
validateAttributes();
doEndTagWithoutRuntimeValidation();
return EVAL_PAGE;
} | final int function() throws JspException { validateAttributes(); doEndTagWithoutRuntimeValidation(); return EVAL_PAGE; } | /**
* Performs runtime validation and delegates to the overridable method
* {@link #doEndTagWithoutRuntimeValidation()}.
*
* @return <code>EVAL_PAGE</code>
* @throws JspException to communicate error
*/ | Performs runtime validation and delegates to the overridable method <code>#doEndTagWithoutRuntimeValidation()</code> | doEndTag | {
"repo_name": "UCSFMemoryAndAging/lava",
"path": "uitags/uitags-main/src/main/java/net/sf/uitags/tag/formGuide/ObserveForNullTag.java",
"license": "bsd-2-clause",
"size": 3956
} | [
"javax.servlet.jsp.JspException"
] | import javax.servlet.jsp.JspException; | import javax.servlet.jsp.*; | [
"javax.servlet"
] | javax.servlet; | 1,995,413 |
Set<Locale> getSupportedLocales(); | Set<Locale> getSupportedLocales(); | /**
* Gets all supported languages of the interpreter by their {@link Locale}s
*
* @return Set of supported languages (each given by a {@link Locale}) or null, if there is no constraint
*/ | Gets all supported languages of the interpreter by their <code>Locale</code>s | getSupportedLocales | {
"repo_name": "philomatic/smarthome",
"path": "bundles/core/org.eclipse.smarthome.core.voice/src/main/java/org/eclipse/smarthome/core/voice/text/HumanLanguageInterpreter.java",
"license": "epl-1.0",
"size": 2136
} | [
"java.util.Locale",
"java.util.Set"
] | import java.util.Locale; import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 1,886,938 |
private Node.Nodes doParse(String inFileName, Node parent, Jar jar)
throws FileNotFoundException, JasperException, IOException {
Node.Nodes parsedPage = null;
isEncodingSpecifiedInProlog = false;
isBomPresent = false;
isDefaultPageEncoding = false;
String absFileName = resolveFileName(inFileName);
String jspConfigPageEnc = getJspConfigPageEncoding(absFileName);
// Figure out what type of JSP document and encoding type we are
// dealing with
determineSyntaxAndEncoding(absFileName, jar, jspConfigPageEnc);
if (parent != null) {
// Included resource, add to dependent list
if (jar == null) {
compiler.getPageInfo().addDependant(absFileName,
ctxt.getLastModified(absFileName));
} else {
String entry = absFileName.substring(1);
compiler.getPageInfo().addDependant(jar.getURL(entry),
Long.valueOf(jar.getLastModified(entry)));
}
}
if ((isXml && isEncodingSpecifiedInProlog) || isBomPresent) {
if (jspConfigPageEnc != null && !jspConfigPageEnc.equals(sourceEnc)
&& (!jspConfigPageEnc.startsWith("UTF-16")
|| !sourceEnc.startsWith("UTF-16"))) {
err.jspError("jsp.error.prolog_config_encoding_mismatch",
sourceEnc, jspConfigPageEnc);
}
}
// Dispatch to the appropriate parser
if (isXml) {
// JSP document (XML syntax)
// InputStream for jspx page is created and properly closed in
// JspDocumentParser.
parsedPage = JspDocumentParser.parse(this, absFileName, jar, parent,
isTagFile, directiveOnly, sourceEnc, jspConfigPageEnc,
isEncodingSpecifiedInProlog, isBomPresent);
} else {
// Standard syntax
try (InputStreamReader inStreamReader = JspUtil.getReader(
absFileName, sourceEnc, jar, ctxt, err, skip);) {
JspReader jspReader = new JspReader(ctxt, absFileName,
inStreamReader, err);
parsedPage = Parser.parse(this, jspReader, parent, isTagFile,
directiveOnly, jar, sourceEnc, jspConfigPageEnc,
isDefaultPageEncoding, isBomPresent);
}
}
baseDirStack.pop();
return parsedPage;
}
| Node.Nodes function(String inFileName, Node parent, Jar jar) throws FileNotFoundException, JasperException, IOException { Node.Nodes parsedPage = null; isEncodingSpecifiedInProlog = false; isBomPresent = false; isDefaultPageEncoding = false; String absFileName = resolveFileName(inFileName); String jspConfigPageEnc = getJspConfigPageEncoding(absFileName); determineSyntaxAndEncoding(absFileName, jar, jspConfigPageEnc); if (parent != null) { if (jar == null) { compiler.getPageInfo().addDependant(absFileName, ctxt.getLastModified(absFileName)); } else { String entry = absFileName.substring(1); compiler.getPageInfo().addDependant(jar.getURL(entry), Long.valueOf(jar.getLastModified(entry))); } } if ((isXml && isEncodingSpecifiedInProlog) isBomPresent) { if (jspConfigPageEnc != null && !jspConfigPageEnc.equals(sourceEnc) && (!jspConfigPageEnc.startsWith(STR) !sourceEnc.startsWith(STR))) { err.jspError(STR, sourceEnc, jspConfigPageEnc); } } if (isXml) { parsedPage = JspDocumentParser.parse(this, absFileName, jar, parent, isTagFile, directiveOnly, sourceEnc, jspConfigPageEnc, isEncodingSpecifiedInProlog, isBomPresent); } else { try (InputStreamReader inStreamReader = JspUtil.getReader( absFileName, sourceEnc, jar, ctxt, err, skip);) { JspReader jspReader = new JspReader(ctxt, absFileName, inStreamReader, err); parsedPage = Parser.parse(this, jspReader, parent, isTagFile, directiveOnly, jar, sourceEnc, jspConfigPageEnc, isDefaultPageEncoding, isBomPresent); } } baseDirStack.pop(); return parsedPage; } | /**
* Parses the JSP page or tag file with the given path name.
*
* @param inFileName The name of the JSP page or tag file to be parsed.
* @param parent The parent node (non-null when processing an include
* directive)
* @param isTagFile true if file to be parsed is tag file, and false if it
* is a regular JSP page
* @param directivesOnly true if the file to be parsed is a tag file and
* we are only interested in the directives needed for constructing a
* TagFileInfo.
* @param jar The JAR file from which to read the JSP page or tag file,
* or null if the JSP page or tag file is to be read from the filesystem
*/ | Parses the JSP page or tag file with the given path name | doParse | {
"repo_name": "nkasvosve/beyondj",
"path": "apache-jsp/src/main/java/org/apache/jasper/compiler/ParserController.java",
"license": "apache-2.0",
"size": 21908
} | [
"java.io.FileNotFoundException",
"java.io.IOException",
"java.io.InputStreamReader",
"org.apache.jasper.JasperException",
"org.apache.tomcat.util.scan.Jar"
] | import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import org.apache.jasper.JasperException; import org.apache.tomcat.util.scan.Jar; | import java.io.*; import org.apache.jasper.*; import org.apache.tomcat.util.scan.*; | [
"java.io",
"org.apache.jasper",
"org.apache.tomcat"
] | java.io; org.apache.jasper; org.apache.tomcat; | 1,495,729 |
public void add(String key, byte[] val) throws IgniteCheckedException {
try {
while (!current.add(key, val))
chain.add(current = new FileTmpStorage());
}
catch (IOException e) {
throw new IgniteCheckedException(e);
}
} | void function(String key, byte[] val) throws IgniteCheckedException { try { while (!current.add(key, val)) chain.add(current = new FileTmpStorage()); } catch (IOException e) { throw new IgniteCheckedException(e); } } | /**
* Put data
*
* @param key Key.
* @param val Value.
*/ | Put data | add | {
"repo_name": "andrey-kuznetsov/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/metastorage/MetaStorage.java",
"license": "apache-2.0",
"size": 32356
} | [
"java.io.IOException",
"org.apache.ignite.IgniteCheckedException"
] | import java.io.IOException; import org.apache.ignite.IgniteCheckedException; | import java.io.*; import org.apache.ignite.*; | [
"java.io",
"org.apache.ignite"
] | java.io; org.apache.ignite; | 1,692,111 |
public static TableHFileArchiveTracker create(Configuration conf)
throws ZooKeeperConnectionException, IOException {
ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "hfileArchiveCleaner", null);
return create(zkw, new HFileArchiveTableMonitor());
} | static TableHFileArchiveTracker function(Configuration conf) throws ZooKeeperConnectionException, IOException { ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, STR, null); return create(zkw, new HFileArchiveTableMonitor()); } | /**
* Create an archive tracker for the passed in server
* @param conf to read for zookeeper connection information
* @return ZooKeeper tracker to monitor for this server if this server should archive hfiles for a
* given table
* @throws IOException If a unexpected exception occurs
* @throws ZooKeeperConnectionException if we can't reach zookeeper
*/ | Create an archive tracker for the passed in server | create | {
"repo_name": "throughsky/lywebank",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java",
"license": "apache-2.0",
"size": 10222
} | [
"java.io.IOException",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.hbase.ZooKeeperConnectionException",
"org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher"
] | import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; | import java.io.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.zookeeper.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 1,919,022 |
protected long calculateExpiration(long timeToLive)
{
if (timeToLive <= 0)
return timeToLive;
else
return timeToLive + CurrentTime.getCurrentTime();
} | long function(long timeToLive) { if (timeToLive <= 0) return timeToLive; else return timeToLive + CurrentTime.getCurrentTime(); } | /**
* Calculates the expires time.
*/ | Calculates the expires time | calculateExpiration | {
"repo_name": "mdaniel/svn-caucho-com-resin",
"path": "modules/resin/src/com/caucho/jms/connection/MessageProducerImpl.java",
"license": "gpl-2.0",
"size": 8871
} | [
"com.caucho.util.CurrentTime"
] | import com.caucho.util.CurrentTime; | import com.caucho.util.*; | [
"com.caucho.util"
] | com.caucho.util; | 1,104,618 |
public TimeValue getMaxTaskWaitTime() {
return threadPoolExecutor.getMaxTaskWaitTime();
} | TimeValue function() { return threadPoolExecutor.getMaxTaskWaitTime(); } | /**
* Returns the maximum wait time for tasks in the queue
*
* @return A zero time value if the queue is empty, otherwise the time value oldest task waiting in the queue
*/ | Returns the maximum wait time for tasks in the queue | getMaxTaskWaitTime | {
"repo_name": "s1monw/elasticsearch",
"path": "server/src/main/java/org/elasticsearch/cluster/service/MasterService.java",
"license": "apache-2.0",
"size": 34460
} | [
"org.elasticsearch.common.unit.TimeValue"
] | import org.elasticsearch.common.unit.TimeValue; | import org.elasticsearch.common.unit.*; | [
"org.elasticsearch.common"
] | org.elasticsearch.common; | 285,356 |
public Carbon getTimestamp(String name) {
return getTimestamp(name, null);
} | Carbon function(String name) { return getTimestamp(name, null); } | /**
* Gets a carbon timestamp object from the data rows item list.
*
* @param name The index(name) to get.
* @return either (1) The value of the index given,
* or (2) <code>NULL</code> if the index doesn't exists.
*/ | Gets a carbon timestamp object from the data rows item list | getTimestamp | {
"repo_name": "avaire/orion",
"path": "src/main/java/com/avairebot/database/collection/DataRow.java",
"license": "gpl-3.0",
"size": 13398
} | [
"com.avairebot.time.Carbon"
] | import com.avairebot.time.Carbon; | import com.avairebot.time.*; | [
"com.avairebot.time"
] | com.avairebot.time; | 2,110,489 |
public Set<String> assertAllShardsOnNodes(String index, String... pattern) {
Set<String> nodes = new HashSet<>();
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
for (ShardRouting shardRouting : indexShardRoutingTable) {
if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndexName())) {
String name = clusterState.nodes().get(shardRouting.currentNodeId()).getName();
nodes.add(name);
assertThat("Allocated on new node: " + name, Regex.simpleMatch(pattern, name), is(true));
}
}
}
}
return nodes;
}
protected static class NumShards {
public final int numPrimaries;
public final int numReplicas;
public final int totalNumShards;
public final int dataCopies;
private NumShards(int numPrimaries, int numReplicas) {
this.numPrimaries = numPrimaries;
this.numReplicas = numReplicas;
this.dataCopies = numReplicas + 1;
this.totalNumShards = numPrimaries * dataCopies;
}
} | Set<String> function(String index, String... pattern) { Set<String> nodes = new HashSet<>(); ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { for (ShardRouting shardRouting : indexShardRoutingTable) { if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndexName())) { String name = clusterState.nodes().get(shardRouting.currentNodeId()).getName(); nodes.add(name); assertThat(STR + name, Regex.simpleMatch(pattern, name), is(true)); } } } } return nodes; } protected static class NumShards { public final int numPrimaries; public final int numReplicas; public final int totalNumShards; public final int dataCopies; private NumShards(int numPrimaries, int numReplicas) { this.numPrimaries = numPrimaries; this.numReplicas = numReplicas; this.dataCopies = numReplicas + 1; this.totalNumShards = numPrimaries * dataCopies; } } | /**
* Asserts that all shards are allocated on nodes matching the given node pattern.
*/ | Asserts that all shards are allocated on nodes matching the given node pattern | assertAllShardsOnNodes | {
"repo_name": "zkidkid/elasticsearch",
"path": "test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java",
"license": "apache-2.0",
"size": 101030
} | [
"java.util.HashSet",
"java.util.Set",
"org.elasticsearch.cluster.ClusterState",
"org.elasticsearch.cluster.routing.IndexRoutingTable",
"org.elasticsearch.cluster.routing.IndexShardRoutingTable",
"org.elasticsearch.cluster.routing.ShardRouting",
"org.elasticsearch.common.regex.Regex",
"org.hamcrest.Matchers"
] | import java.util.HashSet; import java.util.Set; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.regex.Regex; import org.hamcrest.Matchers; | import java.util.*; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.routing.*; import org.elasticsearch.common.regex.*; import org.hamcrest.*; | [
"java.util",
"org.elasticsearch.cluster",
"org.elasticsearch.common",
"org.hamcrest"
] | java.util; org.elasticsearch.cluster; org.elasticsearch.common; org.hamcrest; | 2,596,602 |
public ChannelFuture writePriority(ChannelHandlerContext ctx, int streamId,
int streamDependency, short weight, boolean exclusive, ChannelPromise promise) {
try {
if (connection.isGoAway()) {
throw protocolError("Sending priority after connection going away.");
}
// Update the priority on this stream.
connection.requireStream(streamId).setPriority(streamDependency, weight, exclusive);
ChannelFuture future = frameWriter.writePriority(ctx, streamId, streamDependency, weight,
exclusive, promise);
ctx.flush();
return future;
} catch (Http2Exception e) {
return promise.setFailure(e);
}
} | ChannelFuture function(ChannelHandlerContext ctx, int streamId, int streamDependency, short weight, boolean exclusive, ChannelPromise promise) { try { if (connection.isGoAway()) { throw protocolError(STR); } connection.requireStream(streamId).setPriority(streamDependency, weight, exclusive); ChannelFuture future = frameWriter.writePriority(ctx, streamId, streamDependency, weight, exclusive, promise); ctx.flush(); return future; } catch (Http2Exception e) { return promise.setFailure(e); } } | /**
* Writes (and flushes) the given priority to the remote endpoint.
*/ | Writes (and flushes) the given priority to the remote endpoint | writePriority | {
"repo_name": "kamyu104/netty",
"path": "codec-http2/src/main/java/io/netty/handler/codec/http2/AbstractHttp2ConnectionHandler.java",
"license": "apache-2.0",
"size": 45115
} | [
"io.netty.channel.ChannelFuture",
"io.netty.channel.ChannelHandlerContext",
"io.netty.channel.ChannelPromise",
"io.netty.handler.codec.http2.Http2Exception"
] | import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; import io.netty.handler.codec.http2.Http2Exception; | import io.netty.channel.*; import io.netty.handler.codec.http2.*; | [
"io.netty.channel",
"io.netty.handler"
] | io.netty.channel; io.netty.handler; | 866,442 |
public void mouseEntered(MouseEvent e)
{
handleEvent(e);
} | void function(MouseEvent e) { handleEvent(e); } | /**
* This method is called when the mouse enters the glass pane.
*
* @param e The MouseEvent.
*/ | This method is called when the mouse enters the glass pane | mouseEntered | {
"repo_name": "aosm/gcc_40",
"path": "libjava/javax/swing/plaf/basic/BasicInternalFrameUI.java",
"license": "gpl-2.0",
"size": 44949
} | [
"java.awt.event.MouseEvent"
] | import java.awt.event.MouseEvent; | import java.awt.event.*; | [
"java.awt"
] | java.awt; | 1,097,976 |
@CalledByNative
public static byte[] getPrivateKeyEncodedBytes(PrivateKey key) {
return key.getEncoded();
} | static byte[] function(PrivateKey key) { return key.getEncoded(); } | /**
* Returns the encoded data corresponding to a given PrivateKey.
* Note that this will fail for platform keys on Android 4.0.4
* and higher. It can be used on 4.0.3 and older platforms to
* route around the platform bug described below.
* @param key A PrivateKey instance
* @return encoded key as PKCS#8 byte array, can be null.
*/ | Returns the encoded data corresponding to a given PrivateKey. Note that this will fail for platform keys on Android 4.0.4 and higher. It can be used on 4.0.3 and older platforms to route around the platform bug described below | getPrivateKeyEncodedBytes | {
"repo_name": "ZhanJohn/AG_Modules",
"path": "ag_webview/src/main/java/org/chromium/net/AndroidKeyStore.java",
"license": "gpl-2.0",
"size": 12673
} | [
"java.security.PrivateKey"
] | import java.security.PrivateKey; | import java.security.*; | [
"java.security"
] | java.security; | 1,535,727 |
RolloutRequest apply(Context context);
} | RolloutRequest apply(Context context); } | /**
* Executes the update request.
*
* @param context The context to associate with this operation.
* @return the updated resource.
*/ | Executes the update request | apply | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/deploymentmanager/azure-resourcemanager-deploymentmanager/src/main/java/com/azure/resourcemanager/deploymentmanager/models/RolloutRequest.java",
"license": "mit",
"size": 14485
} | [
"com.azure.core.util.Context"
] | import com.azure.core.util.Context; | import com.azure.core.util.*; | [
"com.azure.core"
] | com.azure.core; | 1,404,263 |
public static java.util.List extractGpList(ims.domain.ILightweightDomainFactory domainFactory, ims.core.vo.GpLiteVoCollection voCollection)
{
return extractGpList(domainFactory, voCollection, null, new HashMap());
}
| static java.util.List function(ims.domain.ILightweightDomainFactory domainFactory, ims.core.vo.GpLiteVoCollection voCollection) { return extractGpList(domainFactory, voCollection, null, new HashMap()); } | /**
* Create the ims.core.resource.people.domain.objects.Gp list from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/ | Create the ims.core.resource.people.domain.objects.Gp list from the value object collection | extractGpList | {
"repo_name": "openhealthcare/openMAXIMS",
"path": "openmaxims_workspace/ValueObjects/src/ims/core/vo/domain/GpLiteVoAssembler.java",
"license": "agpl-3.0",
"size": 15928
} | [
"java.util.HashMap"
] | import java.util.HashMap; | import java.util.*; | [
"java.util"
] | java.util; | 891,905 |
public IScope getScope();
| IScope function(); | /**
* Get the scope this is connected to.
*
* @return The connected scope
*/ | Get the scope this is connected to | getScope | {
"repo_name": "bigbluebutton/red5-server-common",
"path": "src/main/java/org/red5/server/api/IConnection.java",
"license": "apache-2.0",
"size": 8578
} | [
"org.red5.server.api.scope.IScope"
] | import org.red5.server.api.scope.IScope; | import org.red5.server.api.scope.*; | [
"org.red5.server"
] | org.red5.server; | 125,175 |
@ReplacedBy("destinationDirectory")
@Deprecated
public File getDestinationDir() {
// Used in Kotlin plugin - needs updating there and bumping the version first. Followup with https://github.com/gradle/gradle/issues/16783
return destinationDirectory.getAsFile().getOrNull();
} | @ReplacedBy(STR) File function() { return destinationDirectory.getAsFile().getOrNull(); } | /**
* Returns the directory to generate the {@code .class} files into.
*
* @return The destination directory.
*
* @deprecated Use {@link #getDestinationDirectory()} instead. This method will be removed in Gradle 8.0.
*/ | Returns the directory to generate the .class files into | getDestinationDir | {
"repo_name": "gradle/gradle",
"path": "subprojects/language-jvm/src/main/java/org/gradle/api/tasks/compile/AbstractCompile.java",
"license": "apache-2.0",
"size": 7857
} | [
"java.io.File",
"org.gradle.api.model.ReplacedBy"
] | import java.io.File; import org.gradle.api.model.ReplacedBy; | import java.io.*; import org.gradle.api.model.*; | [
"java.io",
"org.gradle.api"
] | java.io; org.gradle.api; | 723,642 |
protected boolean isLandscape() {
return (getResources().getConfiguration().orientation
== Configuration.ORIENTATION_LANDSCAPE);
} | boolean function() { return (getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE); } | /**
* Returns true if current orientation is LANDSCAPE, false otherwise.
* @return
*/ | Returns true if current orientation is LANDSCAPE, false otherwise | isLandscape | {
"repo_name": "miptliot/edx-app-android",
"path": "VideoLocker/src/main/java/org/edx/mobile/view/MyRecentVideosFragment.java",
"license": "apache-2.0",
"size": 28818
} | [
"android.content.res.Configuration"
] | import android.content.res.Configuration; | import android.content.res.*; | [
"android.content"
] | android.content; | 354,346 |
//BattleTac code starts
//Added by Krisztian Schaffer, 2010.02.26
public void setImageProcessor(final ImageProcessor processor) {
imageProcessor = processor;
for (int i = 0; i < size; i++) {
valid[i] = false;
tiles[i] = null;
images[i] = null;
}
}
//BattleTac code ends
| void function(final ImageProcessor processor) { imageProcessor = processor; for (int i = 0; i < size; i++) { valid[i] = false; tiles[i] = null; images[i] = null; } } | /**
* Sets the tile image processor. The given ImageProcessor will process every
* tile which is added after this call. This method also clears the cache.
*
* @param processor
* the new ImageProcessor to use, can be null to delete the current
* processor.
*/ | Sets the tile image processor. The given ImageProcessor will process every tile which is added after this call. This method also clears the cache | setImageProcessor | {
"repo_name": "ryfx/maps-lib-nutiteq",
"path": "src/com/mgmaps/cache/ScreenCache.java",
"license": "gpl-2.0",
"size": 6067
} | [
"com.nutiteq.ui.ImageProcessor"
] | import com.nutiteq.ui.ImageProcessor; | import com.nutiteq.ui.*; | [
"com.nutiteq.ui"
] | com.nutiteq.ui; | 949,841 |
@SuppressWarnings({"IOResourceOpenedButNotSafelyClosed"})
public synchronized T fromResource(String resource)
throws IOException, JsonParseException, JsonMappingException {
InputStream resStream = null;
try {
resStream = this.getClass().getResourceAsStream(resource);
if (resStream == null) {
throw new FileNotFoundException(resource);
}
return mapper.readValue(resStream, classType);
} catch (IOException e) {
LOG.error("Exception while parsing json resource {}: {}", resource, e);
throw e;
} finally {
IOUtils.closeStream(resStream);
}
} | @SuppressWarnings({STR}) synchronized T function(String resource) throws IOException, JsonParseException, JsonMappingException { InputStream resStream = null; try { resStream = this.getClass().getResourceAsStream(resource); if (resStream == null) { throw new FileNotFoundException(resource); } return mapper.readValue(resStream, classType); } catch (IOException e) { LOG.error(STR, resource, e); throw e; } finally { IOUtils.closeStream(resStream); } } | /**
* Convert from a JSON file
* @param resource input file
* @return the parsed JSON
* @throws IOException IO problems
* @throws JsonParseException If the input is not well-formatted
* @throws JsonMappingException failure to map from the JSON to this class
*/ | Convert from a JSON file | fromResource | {
"repo_name": "ronny-macmaster/hadoop",
"path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java",
"license": "apache-2.0",
"size": 10624
} | [
"com.fasterxml.jackson.core.JsonParseException",
"com.fasterxml.jackson.databind.JsonMappingException",
"java.io.FileNotFoundException",
"java.io.IOException",
"java.io.InputStream",
"org.apache.hadoop.io.IOUtils"
] | import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import org.apache.hadoop.io.IOUtils; | import com.fasterxml.jackson.core.*; import com.fasterxml.jackson.databind.*; import java.io.*; import org.apache.hadoop.io.*; | [
"com.fasterxml.jackson",
"java.io",
"org.apache.hadoop"
] | com.fasterxml.jackson; java.io; org.apache.hadoop; | 2,819,595 |
public static Set<PropertySet> resolveSetImageRefProperties(Set<PropertySet> propertySets) {
for (PropertySet ps : propertySets) {
if (isMultiHostPropertySet(ps)) {
resolveImageRefProperties(ps);
}
}
return propertySets;
} | static Set<PropertySet> function(Set<PropertySet> propertySets) { for (PropertySet ps : propertySets) { if (isMultiHostPropertySet(ps)) { resolveImageRefProperties(ps); } } return propertySets; } | /**
* Resolve all IMAGE_REF Properties in a Set of PropertySets to absolute URI. The absolute URI will be set per
* Property as a new value.
*
* @param propertySets Set to be changed
* @return Set of PropertySets with new IMAGE_REF Property values
*/ | Resolve all IMAGE_REF Properties in a Set of PropertySets to absolute URI. The absolute URI will be set per Property as a new value | resolveSetImageRefProperties | {
"repo_name": "vtkio/vtk",
"path": "src/main/java/vtk/web/search/MultiHostUtil.java",
"license": "bsd-3-clause",
"size": 8711
} | [
"java.util.Set"
] | import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 255,726 |
public void serialize(final DataOutput pOutput) throws TTIOException {
try {
pOutput.writeLong(getFirstChildKey());
pOutput.writeLong(getRightSiblingKey());
pOutput.writeLong(getLeftSiblingKey());
pOutput.writeLong(getChildCount());
} catch (final IOException exc) {
throw new TTIOException(exc);
}
} | void function(final DataOutput pOutput) throws TTIOException { try { pOutput.writeLong(getFirstChildKey()); pOutput.writeLong(getRightSiblingKey()); pOutput.writeLong(getLeftSiblingKey()); pOutput.writeLong(getChildCount()); } catch (final IOException exc) { throw new TTIOException(exc); } } | /**
* Serializing to given dataput
*
* @param pOutput
* to serialize to
* @throws TTIOException
*/ | Serializing to given dataput | serialize | {
"repo_name": "sebastiangraf/treetank",
"path": "coremodules/node/src/main/java/org/treetank/data/delegates/StructNodeDelegate.java",
"license": "bsd-3-clause",
"size": 9343
} | [
"java.io.DataOutput",
"java.io.IOException",
"org.treetank.exception.TTIOException"
] | import java.io.DataOutput; import java.io.IOException; import org.treetank.exception.TTIOException; | import java.io.*; import org.treetank.exception.*; | [
"java.io",
"org.treetank.exception"
] | java.io; org.treetank.exception; | 2,700,175 |
public static List<Path> findByFromPointLazy(Point point) {
return find.where().eq("fromPoint", point).findList();
} | static List<Path> function(Point point) { return find.where().eq(STR, point).findList(); } | /**
* Gib mir alle existierenden Pfade die ab diesem Punkt starten
* lazy loading
*
* @return List<Path>
*/ | Gib mir alle existierenden Pfade die ab diesem Punkt starten lazy loading | findByFromPointLazy | {
"repo_name": "stefanil/play2-prototypen",
"path": "treasurehunt/app/models/Path.java",
"license": "apache-2.0",
"size": 2565
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 908,618 |
RealMatrix getMeasurementMatrix(); | RealMatrix getMeasurementMatrix(); | /**
* Returns the measurement matrix.
*
* @return the measurement matrix
*/ | Returns the measurement matrix | getMeasurementMatrix | {
"repo_name": "happyjack27/autoredistrict",
"path": "src/org/apache/commons/math3/filter/MeasurementModel.java",
"license": "gpl-3.0",
"size": 1649
} | [
"org.apache.commons.math3.linear.RealMatrix"
] | import org.apache.commons.math3.linear.RealMatrix; | import org.apache.commons.math3.linear.*; | [
"org.apache.commons"
] | org.apache.commons; | 1,814,438 |
@SuppressWarnings("checkstyle:OverloadMethodsDeclarationOrder")
public static boolean isContentAlwaysEmpty(int statusCode) {
switch (statusCode) {
case 204:
case 205:
case 304:
return true;
}
return false;
}
private final int code;
private final String codeAsText;
private final HttpStatusClass codeClass;
private final String reasonPhrase;
private final HttpData httpData;
private final String strVal;
private HttpStatus(int statusCode) {
this(statusCode, HttpStatusClass.valueOf(statusCode).defaultReasonPhrase() + " (" + statusCode + ')');
}
public HttpStatus(int statusCode, @Nullable String reasonPhrase) {
if (statusCode < 0) {
throw new IllegalArgumentException(
"statusCode: " + statusCode + " (expected: 0+)");
}
if (reasonPhrase == null) {
throw new NullPointerException("reasonPhrase");
}
for (int i = 0; i < reasonPhrase.length(); i++) {
final char c = reasonPhrase.charAt(i);
// Check prohibited characters.
switch (c) {
case '\n':
case '\r':
throw new IllegalArgumentException(
"reasonPhrase contains one of the following prohibited characters: " +
"\\r\\n: " + reasonPhrase);
}
}
code = statusCode;
codeAsText = Integer.toString(statusCode);
codeClass = HttpStatusClass.valueOf(statusCode);
this.reasonPhrase = reasonPhrase;
strVal = new StringBuilder(reasonPhrase.length() + 5).append(statusCode)
.append(' ')
.append(reasonPhrase)
.toString();
httpData = HttpData.ofUtf8(strVal);
} | @SuppressWarnings(STR) static boolean function(int statusCode) { switch (statusCode) { case 204: case 205: case 304: return true; } return false; } private final int code; private final String codeAsText; private final HttpStatusClass codeClass; private final String reasonPhrase; private final HttpData httpData; private final String strVal; private HttpStatus(int statusCode) { this(statusCode, HttpStatusClass.valueOf(statusCode).defaultReasonPhrase() + STR + statusCode + ')'); } public HttpStatus(int statusCode, @Nullable String reasonPhrase) { if (statusCode < 0) { throw new IllegalArgumentException( STR + statusCode + STR); } if (reasonPhrase == null) { throw new NullPointerException(STR); } for (int i = 0; i < reasonPhrase.length(); i++) { final char c = reasonPhrase.charAt(i); switch (c) { case '\n': case '\r': throw new IllegalArgumentException( STR + STR + reasonPhrase); } } code = statusCode; codeAsText = Integer.toString(statusCode); codeClass = HttpStatusClass.valueOf(statusCode); this.reasonPhrase = reasonPhrase; strVal = new StringBuilder(reasonPhrase.length() + 5).append(statusCode) .append(' ') .append(reasonPhrase) .toString(); httpData = HttpData.ofUtf8(strVal); } | /**
* Returns {@code true} if the content of the response for the specified status code is expected to
* be always empty (204, 205 and 304 responses.)
*/ | Returns true if the content of the response for the specified status code is expected to be always empty (204, 205 and 304 responses.) | isContentAlwaysEmpty | {
"repo_name": "minwoox/armeria",
"path": "core/src/main/java/com/linecorp/armeria/common/HttpStatus.java",
"license": "apache-2.0",
"size": 16721
} | [
"javax.annotation.Nullable"
] | import javax.annotation.Nullable; | import javax.annotation.*; | [
"javax.annotation"
] | javax.annotation; | 1,699,448 |
myRobot.setSafetyEnabled(false);
myRobot.driveCartesian(0.0, -0.5, 0.0); // drive forwards half speed
Timer.delay(2.0); // for 2 seconds
myRobot.driveCartesian(0.0, 0.0, 0.0); // stop robot
}
static final double kOffBalanceAngleThresholdDegrees = 10;
static final double kOonBalanceAngleThresholdDegrees = 5; | myRobot.setSafetyEnabled(false); myRobot.driveCartesian(0.0, -0.5, 0.0); Timer.delay(2.0); myRobot.driveCartesian(0.0, 0.0, 0.0); } static final double kOffBalanceAngleThresholdDegrees = 10; static final double kOonBalanceAngleThresholdDegrees = 5; | /**
* Drive left & right motors for 2 seconds then stop
*/ | Drive left & right motors for 2 seconds then stop | autonomous | {
"repo_name": "Beachbot330/navxmxp",
"path": "roborio/java/navXMXP_Java_AutoBalance/src/org/usfirst/frc/team2465/robot/Robot.java",
"license": "mit",
"size": 5553
} | [
"edu.wpi.first.wpilibj.Timer"
] | import edu.wpi.first.wpilibj.Timer; | import edu.wpi.first.wpilibj.*; | [
"edu.wpi.first"
] | edu.wpi.first; | 1,692,805 |
void updateWorkspaceProperties(final Workspace workspace, final String userLogin, final Date saveDate); | void updateWorkspaceProperties(final Workspace workspace, final String userLogin, final Date saveDate); | /**
* Update workspace properties.
* Not Layout nor Views
*
* @param workspace the workspace
* @param userLogin the user login
* @param saveDate the save date
*/ | Update workspace properties. Not Layout nor Views | updateWorkspaceProperties | {
"repo_name": "sguisse/InfoWkspOrga",
"path": "10-Application/Business/InfoWkspOrga-Biz/src/main/java/com/sgu/infowksporga/business/dao/api/IWorkspaceDao.java",
"license": "apache-2.0",
"size": 1024
} | [
"com.sgu.infowksporga.business.entity.Workspace",
"java.util.Date"
] | import com.sgu.infowksporga.business.entity.Workspace; import java.util.Date; | import com.sgu.infowksporga.business.entity.*; import java.util.*; | [
"com.sgu.infowksporga",
"java.util"
] | com.sgu.infowksporga; java.util; | 1,457,252 |
private Slice[] createSampleDataset() {
Slice[] result = new Slice[4];
int passed = 0;
int failed = 0;
int skipped = 0;
int notAnalyzed = 0;
for (int i = 0; i < campaign.size(); i++) {
Step cmdLine = (Step) campaign.get(i);
if (cmdLine.getVerdict() == Step.PASSED) {
passed++;
} else if (cmdLine.getVerdict() == Step.FAILED) {
failed++;
} else if (cmdLine.getVerdict() == Step.SKIPPED) {
skipped++;
} else if (cmdLine.getVerdict() == Step.NONE) {
notAnalyzed++;
}
}
int total = passed + failed + skipped + notAnalyzed;
result[0] = new Slice(Double.valueOf(passed * 100. / total), Color.GREEN, "Passed");
result[0] = new Slice(Double.valueOf(failed * 100. / total), Color.RED, "Failed");
result[0] = new Slice(Double.valueOf(skipped * 100. / total), Color.ORANGE, "Skipped");
result[0] = new Slice(Double.valueOf(notAnalyzed * 100. / total), Color.GRAY,
"Not analyzed");
return result;
} | Slice[] function() { Slice[] result = new Slice[4]; int passed = 0; int failed = 0; int skipped = 0; int notAnalyzed = 0; for (int i = 0; i < campaign.size(); i++) { Step cmdLine = (Step) campaign.get(i); if (cmdLine.getVerdict() == Step.PASSED) { passed++; } else if (cmdLine.getVerdict() == Step.FAILED) { failed++; } else if (cmdLine.getVerdict() == Step.SKIPPED) { skipped++; } else if (cmdLine.getVerdict() == Step.NONE) { notAnalyzed++; } } int total = passed + failed + skipped + notAnalyzed; result[0] = new Slice(Double.valueOf(passed * 100. / total), Color.GREEN, STR); result[0] = new Slice(Double.valueOf(failed * 100. / total), Color.RED, STR); result[0] = new Slice(Double.valueOf(skipped * 100. / total), Color.ORANGE, STR); result[0] = new Slice(Double.valueOf(notAnalyzed * 100. / total), Color.GRAY, STR); return result; } | /**
* Creates data set with percentage of passed, failed, skipped and not
* analysed.
*
* @return the data set
*/ | Creates data set with percentage of passed, failed, skipped and not analysed | createSampleDataset | {
"repo_name": "Orange-OpenSource/matos-tool",
"path": "matos/src/main/java/com/orange/matos/StatisticTool.java",
"license": "apache-2.0",
"size": 6991
} | [
"com.orange.matos.core.Step",
"java.awt.Color"
] | import com.orange.matos.core.Step; import java.awt.Color; | import com.orange.matos.core.*; import java.awt.*; | [
"com.orange.matos",
"java.awt"
] | com.orange.matos; java.awt; | 2,695,578 |
public void logp(Level level, String sourceClassName, String sourceMethodName,
String msg, Object... params) {
if (!isLoggable(level)) {
return;
}
Record record = new Record(level, msg, sourceClassName, sourceMethodName);
if (params != null && params.length != 0) {
record.setParameters(params);
}
nameAndLog(record);
} | void function(Level level, String sourceClassName, String sourceMethodName, String msg, Object... params) { if (!isLoggable(level)) { return; } Record record = new Record(level, msg, sourceClassName, sourceMethodName); if (params != null && params.length != 0) { record.setParameters(params); } nameAndLog(record); } | /**
* Logs an event with a known context, similar to
* {@link java.util.logging.Logger#logp(java.util.logging.Level, String, String, String)}.
* However, this delays formatting of the message.
* @param level one of the message level identifiers, e.g. SEVERE
* @param sourceClassName the class generating this log event
* @param sourceMethodName the method generating this log event
* @param msg the basic message string
* @param params the parameters for the message string
*/ | Logs an event with a known context, similar to <code>java.util.logging.Logger#logp(java.util.logging.Level, String, String, String)</code>. However, this delays formatting of the message | logp | {
"repo_name": "simonrrr/gdata-java-client",
"path": "java/src/com/google/gdata/util/common/logging/FormattingLogger.java",
"license": "apache-2.0",
"size": 37949
} | [
"java.util.logging.Level"
] | import java.util.logging.Level; | import java.util.logging.*; | [
"java.util"
] | java.util; | 1,380,602 |
protected ConnectionFactory getTargetConnectionFactory() {
return this.targetConnectionFactory;
} | ConnectionFactory function() { return this.targetConnectionFactory; } | /**
* Return the target ConnectionFactory that this ConnectionFactory should delegate to.
*/ | Return the target ConnectionFactory that this ConnectionFactory should delegate to | getTargetConnectionFactory | {
"repo_name": "mattxia/spring-2.5-analysis",
"path": "src/org/springframework/jms/connection/TransactionAwareConnectionFactoryProxy.java",
"license": "apache-2.0",
"size": 13005
} | [
"javax.jms.ConnectionFactory"
] | import javax.jms.ConnectionFactory; | import javax.jms.*; | [
"javax.jms"
] | javax.jms; | 1,205,340 |
public Joint isEmpty() {
return setEvaluation(new Evaluation<Collection<E>>() { | Joint function() { return setEvaluation(new Evaluation<Collection<E>>() { | /**
* Ensures that the target does not contain elements.
*
* @return an instance of {@link Joint} class
*/ | Ensures that the target does not contain elements | isEmpty | {
"repo_name": "holmes-org/holmes-validation",
"path": "src/main/java/org/holmes/evaluator/CollectionEvaluator.java",
"license": "mit",
"size": 5054
} | [
"java.util.Collection",
"org.holmes.Joint"
] | import java.util.Collection; import org.holmes.Joint; | import java.util.*; import org.holmes.*; | [
"java.util",
"org.holmes"
] | java.util; org.holmes; | 497,976 |
private Session getSession() {
// Lock here
synchronized (sf) {
while (sessionLocked) {
try {
sf.wait();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
throw new ConcurrencyFailureException(
"Session wait got interrupted", e);
}
}
// Create Session interceptor...
Session session = sf.openSession();
//sessionLocked = true;
return (session);
}
}
| Session function() { synchronized (sf) { while (sessionLocked) { try { sf.wait(); } catch (InterruptedException e) { e.printStackTrace(); throw new ConcurrencyFailureException( STR, e); } } Session session = sf.openSession(); return (session); } } | /**
* Retrive a session. There will only exists only ONE open session per
* application. This means that the closeSession() MUST be called once a
* getSession() has been called, else the system will be blocked!
*
* @return
*/ | Retrive a session. There will only exists only ONE open session per application. This means that the closeSession() MUST be called once a getSession() has been called, else the system will be blocked | getSession | {
"repo_name": "afnet/OneCMDBwithMaven",
"path": "src/org.onecmdb.core/src/main/java/org/onecmdb/core/internal/storage/hibernate/HibernateDao.java",
"license": "gpl-2.0",
"size": 37963
} | [
"org.hibernate.Session",
"org.springframework.dao.ConcurrencyFailureException"
] | import org.hibernate.Session; import org.springframework.dao.ConcurrencyFailureException; | import org.hibernate.*; import org.springframework.dao.*; | [
"org.hibernate",
"org.springframework.dao"
] | org.hibernate; org.springframework.dao; | 156,321 |
public static TreeMap<String, MetaAttribute> toPIMSMetaAttributes(
final Map<String, MetaAttribute> attributes) {
final TreeMap<String, MetaAttribute> attr = new TreeMap<String, MetaAttribute>();
for (final Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) {
final Map.Entry<String, MetaAttribute> entry = (Map.Entry<String, MetaAttribute>) iter.next();
//final String attrName = entry.getKey();
final MetaAttribute attribute = entry.getValue();
if (attribute != null) {
final MetaAttribute pattr = ServletUtil.getPIMSMetaAttribute(attribute);
// This cannot be only PIMSMetaAttribute
// because for some classes there may be no such attributes defined.
// This is more likely to be mix of both
//System.out.println("toPIMSMetaAttributes for [" + pattr.getName() + ":" + pattr.toString()
// + "]");
attr.put(pattr.getName(), pattr);
}
}
return attr;
}
| static TreeMap<String, MetaAttribute> function( final Map<String, MetaAttribute> attributes) { final TreeMap<String, MetaAttribute> attr = new TreeMap<String, MetaAttribute>(); for (final Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { final Map.Entry<String, MetaAttribute> entry = (Map.Entry<String, MetaAttribute>) iter.next(); final MetaAttribute attribute = entry.getValue(); if (attribute != null) { final MetaAttribute pattr = ServletUtil.getPIMSMetaAttribute(attribute); attr.put(pattr.getName(), pattr); } } return attr; } | /**
* This takes a Map metaAttribute.Name (key)-> MetaAttribute (value) and convert it to Map
* metaAttribute.Name (key)-> PIMSMetaAttribute (value) if some has been defined otherwise it will stay
* MetaAttribute
*
* @param attributes TreeMap<String, MetaAttribute>
* @return attributes TreeMap<String, MetaAttribute(PIMSMetaAttribute)>
*/ | This takes a Map metaAttribute.Name (key)-> MetaAttribute (value) and convert it to Map metaAttribute.Name (key)-> PIMSMetaAttribute (value) if some has been defined otherwise it will stay MetaAttribute | toPIMSMetaAttributes | {
"repo_name": "homiak/pims-lims",
"path": "src/presentation/org/pimslims/presentation/ServletUtil.java",
"license": "bsd-2-clause",
"size": 25653
} | [
"java.util.Iterator",
"java.util.Map",
"java.util.TreeMap",
"org.pimslims.metamodel.MetaAttribute"
] | import java.util.Iterator; import java.util.Map; import java.util.TreeMap; import org.pimslims.metamodel.MetaAttribute; | import java.util.*; import org.pimslims.metamodel.*; | [
"java.util",
"org.pimslims.metamodel"
] | java.util; org.pimslims.metamodel; | 1,666,913 |
public String getBMPString() throws IOException {
return readString(DerValue.tag_BMPString, "BMP",
"UnicodeBigUnmarked");
} | String function() throws IOException { return readString(DerValue.tag_BMPString, "BMP", STR); } | /**
* Read a string that was encoded as a BMPString DER value.
*/ | Read a string that was encoded as a BMPString DER value | getBMPString | {
"repo_name": "openjdk/jdk7u",
"path": "jdk/src/share/classes/sun/security/util/DerInputStream.java",
"license": "gpl-2.0",
"size": 22989
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,271,741 |
public Raster readRaster(InputStream in, JPEG2000CodecOptions options)
throws IOException, ServiceException; | Raster function(InputStream in, JPEG2000CodecOptions options) throws IOException, ServiceException; | /**
* Reads an image into a raster using JAI Image I/O using the JPEG 2000 codec.
* @param in Target input stream.
* @param options Options for the JPEG 2000 codec.
* @returns An AWT image raster.
* @throws IOException Thrown if there is an error reading from or writing
* to one of the target streams / buffers.
* @throws ServiceException Thrown if there is an error initializing or
* interacting with the dependencies of the service.
*/ | Reads an image into a raster using JAI Image I/O using the JPEG 2000 codec | readRaster | {
"repo_name": "berl/v3draw-bioformats",
"path": "components/formats-bsd/src/loci/formats/services/JAIIIOService.java",
"license": "gpl-2.0",
"size": 5515
} | [
"java.awt.image.Raster",
"java.io.IOException",
"java.io.InputStream"
] | import java.awt.image.Raster; import java.io.IOException; import java.io.InputStream; | import java.awt.image.*; import java.io.*; | [
"java.awt",
"java.io"
] | java.awt; java.io; | 996,081 |
Collection<N> getNodes(); | Collection<N> getNodes(); | /**
* All Redis nodes used by Redisson.
* This collection may change during master change, cluster topology update and etc.
*
* @return collection of nodes
*/ | All Redis nodes used by Redisson. This collection may change during master change, cluster topology update and etc | getNodes | {
"repo_name": "zhoffice/redisson",
"path": "redisson/src/main/java/org/redisson/api/NodesGroup.java",
"license": "apache-2.0",
"size": 2037
} | [
"java.util.Collection"
] | import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 2,088,620 |
public synchronized void start() throws LifecycleException {
try {
StandardServer server = (StandardServer) ServerFactory.getServer();
Context context = server.getGlobalNamingContext();
database = (UserDatabase) context.lookup(resourceName);
} catch (Throwable e) {
e.printStackTrace();
log(sm.getString("userDatabaseRealm.lookup", resourceName), e);
database = null;
}
if (database == null) {
throw new LifecycleException
(sm.getString("userDatabaseRealm.noDatabase", resourceName));
}
// Perform normal superclass initialization
super.start();
} | synchronized void function() throws LifecycleException { try { StandardServer server = (StandardServer) ServerFactory.getServer(); Context context = server.getGlobalNamingContext(); database = (UserDatabase) context.lookup(resourceName); } catch (Throwable e) { e.printStackTrace(); log(sm.getString(STR, resourceName), e); database = null; } if (database == null) { throw new LifecycleException (sm.getString(STR, resourceName)); } super.start(); } | /**
* Prepare for active use of the public methods of this Component.
*
* @exception LifecycleException if this component detects a fatal error
* that prevents it from being started
*/ | Prepare for active use of the public methods of this Component | start | {
"repo_name": "devjin24/howtomcatworks",
"path": "bookrefer/jakarta-tomcat-5.0.18-src/jakarta-tomcat-catalina/catalina/src/share/org/apache/catalina/realm/UserDatabaseRealm.java",
"license": "apache-2.0",
"size": 10176
} | [
"javax.naming.Context",
"org.apache.catalina.LifecycleException",
"org.apache.catalina.ServerFactory",
"org.apache.catalina.UserDatabase",
"org.apache.catalina.core.StandardServer"
] | import javax.naming.Context; import org.apache.catalina.LifecycleException; import org.apache.catalina.ServerFactory; import org.apache.catalina.UserDatabase; import org.apache.catalina.core.StandardServer; | import javax.naming.*; import org.apache.catalina.*; import org.apache.catalina.core.*; | [
"javax.naming",
"org.apache.catalina"
] | javax.naming; org.apache.catalina; | 242,182 |
public static void setDefaultViewInsets(final int pTop, final int pLeft, final int pBottom,
final int pRight) {
sDefaultViewInsets = new Insets(pTop, pLeft, pBottom, pRight);
} | static void function(final int pTop, final int pLeft, final int pBottom, final int pRight) { sDefaultViewInsets = new Insets(pTop, pLeft, pBottom, pRight); } | /**
* Sets the default margins for {@code ComponentCells}.
*
* @param pTop The top insets.
* @param pLeft The left insets.
* @param pBottom The bottom insets.
* @param pRight The right insets.
*/ | Sets the default margins for ComponentCells | setDefaultViewInsets | {
"repo_name": "Fuusio/fuusio-app",
"path": "fuusio.api/src/main/java/org/fuusio/api/view/layout/cell/ViewCell.java",
"license": "apache-2.0",
"size": 22568
} | [
"org.fuusio.api.util.Insets"
] | import org.fuusio.api.util.Insets; | import org.fuusio.api.util.*; | [
"org.fuusio.api"
] | org.fuusio.api; | 2,527,852 |
public void drawSegments(Mat _image, Mat lines) {
drawSegments_0(nativeObj, _image.nativeObj, lines.nativeObj);
} | void function(Mat _image, Mat lines) { drawSegments_0(nativeObj, _image.nativeObj, lines.nativeObj); } | /**
* Draws the line segments on a given image.
* @param _image The image, where the lines will be drawn. Should be bigger or equal to the image,
* where the lines were found.
* @param lines A vector of the lines that needed to be drawn.
*/ | Draws the line segments on a given image | drawSegments | {
"repo_name": "HuTianQi/QQ",
"path": "openCVLibrary411/src/main/java/org/opencv/imgproc/LineSegmentDetector.java",
"license": "mit",
"size": 10175
} | [
"org.opencv.core.Mat"
] | import org.opencv.core.Mat; | import org.opencv.core.*; | [
"org.opencv.core"
] | org.opencv.core; | 375,817 |
public long getSumFreq() {
long result = 0;
Iterator<Long> iterator = freqTable.values().iterator();
while (iterator.hasNext()) {
result += iterator.next().longValue();
}
return result;
} | long function() { long result = 0; Iterator<Long> iterator = freqTable.values().iterator(); while (iterator.hasNext()) { result += iterator.next().longValue(); } return result; } | /**
* Returns the sum of all frequencies.
*
* @return the total frequency count.
*/ | Returns the sum of all frequencies | getSumFreq | {
"repo_name": "SpoonLabs/astor",
"path": "examples/math_85/src/java/org/apache/commons/math/stat/Frequency.java",
"license": "gpl-2.0",
"size": 18933
} | [
"java.util.Iterator"
] | import java.util.Iterator; | import java.util.*; | [
"java.util"
] | java.util; | 907,392 |
public byte[] getByteArray(String par1Str) {
try {
return !this.tagMap.containsKey(par1Str) ? new byte[0] : ((NBTTagByteArray) this.tagMap.get(par1Str)).byteArray;
} catch (ClassCastException var3) {
throw new ReportedException(this.createCrashReport(par1Str, 7, var3));
}
} | byte[] function(String par1Str) { try { return !this.tagMap.containsKey(par1Str) ? new byte[0] : ((NBTTagByteArray) this.tagMap.get(par1Str)).byteArray; } catch (ClassCastException var3) { throw new ReportedException(this.createCrashReport(par1Str, 7, var3)); } } | /**
* Retrieves a byte array using the specified key, or a zero-length array if no such key was stored.
*/ | Retrieves a byte array using the specified key, or a zero-length array if no such key was stored | getByteArray | {
"repo_name": "DirectCodeGraveyard/Minetweak",
"path": "src/main/java/net/minecraft/nbt/NBTTagCompound.java",
"license": "lgpl-3.0",
"size": 12710
} | [
"net.minecraft.src.ReportedException"
] | import net.minecraft.src.ReportedException; | import net.minecraft.src.*; | [
"net.minecraft.src"
] | net.minecraft.src; | 1,058,762 |
public static MappedSamBamSequenceDataSource fromInputFiles(List<File> files, boolean paired, boolean flattenPaired, boolean keepSingletons, SamFilter filter) {
return new MappedSamBamSequenceDataSource(new FileStreamIterator(files), paired, flattenPaired, keepSingletons, filter);
}
@Override
protected void checkSortOrder() { } | static MappedSamBamSequenceDataSource function(List<File> files, boolean paired, boolean flattenPaired, boolean keepSingletons, SamFilter filter) { return new MappedSamBamSequenceDataSource(new FileStreamIterator(files), paired, flattenPaired, keepSingletons, filter); } protected void checkSortOrder() { } | /**
* Construct a pre-mapped SAM or BAM sequence data source from list of SAM or BAM files
* @param files list of the SAM or BAM file to use as a sequence data source
* @param paired true if input will be paired, false otherwise
* @param flattenPaired if <code>paired</code> is false then this will load both arms into a single SDF
* @param keepSingletons if true, then arms without a mate will be output with an empty mate, otherwise such singletons will be dropped
* @param filter this filter will be applied to the sam records
* @return SamBamSequenceDataSource the sequence data source for the inputs
*/ | Construct a pre-mapped SAM or BAM sequence data source from list of SAM or BAM files | fromInputFiles | {
"repo_name": "RealTimeGenomics/rtg-tools",
"path": "src/com/rtg/reader/MappedSamBamSequenceDataSource.java",
"license": "bsd-2-clause",
"size": 5201
} | [
"com.rtg.sam.SamFilter",
"java.io.File",
"java.util.List"
] | import com.rtg.sam.SamFilter; import java.io.File; import java.util.List; | import com.rtg.sam.*; import java.io.*; import java.util.*; | [
"com.rtg.sam",
"java.io",
"java.util"
] | com.rtg.sam; java.io; java.util; | 2,141,573 |
void addFilteredCategory(FileTypeCategory fileTypeCategory); | void addFilteredCategory(FileTypeCategory fileTypeCategory); | /**
* Add all {@link FileType}s belonging to the given {@link FileTypeCategory}.
*
* @param fileTypeCategory
* {@link FileTypeCategory} to be added.
*/ | Add all <code>FileType</code>s belonging to the given <code>FileTypeCategory</code> | addFilteredCategory | {
"repo_name": "leesh77/ngrinder-recorder",
"path": "src/main/java/net/grinder/plugin/http/tcpproxyfilter/FileTypeFilter.java",
"license": "apache-2.0",
"size": 2260
} | [
"net.grinder.plugin.http.tcpproxyfilter.options.FileTypeCategory"
] | import net.grinder.plugin.http.tcpproxyfilter.options.FileTypeCategory; | import net.grinder.plugin.http.tcpproxyfilter.options.*; | [
"net.grinder.plugin"
] | net.grinder.plugin; | 29,502 |
void dropContainerReservation(RMContainer container,
TransactionState transactionState); | void dropContainerReservation(RMContainer container, TransactionState transactionState); | /**
* If the scheduler support container reservations, this method is used to
* ask the scheduler to drop the reservation for the given container.
*
* @param container
* Reference to reserved container allocation.
*/ | If the scheduler support container reservations, this method is used to ask the scheduler to drop the reservation for the given container | dropContainerReservation | {
"repo_name": "srijeyanthan/hops",
"path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/PreemptableResourceScheduler.java",
"license": "apache-2.0",
"size": 2090
} | [
"io.hops.ha.common.TransactionState",
"org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer"
] | import io.hops.ha.common.TransactionState; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; | import io.hops.ha.common.*; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.*; | [
"io.hops.ha",
"org.apache.hadoop"
] | io.hops.ha; org.apache.hadoop; | 694,459 |
void start(int durationLimit) {
if (!playing) {
if (database.started() && shouldBeCached() && getCachedPath(youtubeId) == null) {
if (!roomForMoreCache()) {
getLeastPopularCachedSong().delete();
}
String titlePart = title.replaceAll("[<>:\"/\\|*?]", "");
writePath = "\"" + Paths.get(CACHE_PATH + "/" + titlePart + " (" + youtubeId + ")." + FILE_TYPE)
.toAbsolutePath().toString() + "\"";
}
String source = getSourceToUse();
process = AudioUtilities.playAudio(getSourceToUse(), (duration + 1) * 1000, true, null);
if (writePath != null) {
AudioUtilities.writeAudio(source, writePath);
}
playing = true;
passedDurationLimit = false;
limitTimer.cancel();
finishTimer.cancel();
limitTimer = new Timer();
finishTimer = new Timer();
startTime = System.nanoTime();
limitTimer.schedule(new TimerTask() { | void start(int durationLimit) { if (!playing) { if (database.started() && shouldBeCached() && getCachedPath(youtubeId) == null) { if (!roomForMoreCache()) { getLeastPopularCachedSong().delete(); } String titlePart = title.replaceAll("[<>:\"/\\ *?]STRSTR\STR/STR (STR).STR\""; } String source = getSourceToUse(); process = AudioUtilities.playAudio(getSourceToUse(), (duration + 1) * 1000, true, null); if (writePath != null) { AudioUtilities.writeAudio(source, writePath); } playing = true; passedDurationLimit = false; limitTimer.cancel(); finishTimer.cancel(); limitTimer = new Timer(); finishTimer = new Timer(); startTime = System.nanoTime(); limitTimer.schedule(new TimerTask() { | /**
* Starts playing this song.
*
* @param durationLimit
* - the amount of time in seconds that this song is allowed to play if other songs are queued
*/ | Starts playing this song | start | {
"repo_name": "joelamos/SourceRadio",
"path": "src/com/joelchristophel/sourceradio/Song.java",
"license": "gpl-3.0",
"size": 20282
} | [
"java.util.Timer",
"java.util.TimerTask"
] | import java.util.Timer; import java.util.TimerTask; | import java.util.*; | [
"java.util"
] | java.util; | 1,254,473 |
KeyFactory AES = new AESKeyFactory();
KeyFactory DES = new DESKeyFactory();
Key keyFromPassword(char[] password);
Key randomKey();
Key randomKey(int size); | KeyFactory AES = new AESKeyFactory(); KeyFactory DES = new DESKeyFactory(); Key keyFromPassword(char[] password); Key randomKey(); Key randomKey(int size); | /**
* <p>Generates a random key of size <code>size</code>.</p>
* @param size
* @return
*/ | Generates a random key of size <code>size</code> | randomKey | {
"repo_name": "martinwithaar/Encryptor4j",
"path": "src/main/java/org/encryptor4j/factory/KeyFactory.java",
"license": "mit",
"size": 718
} | [
"java.security.Key"
] | import java.security.Key; | import java.security.*; | [
"java.security"
] | java.security; | 82,874 |
public List<Route> getMatchingRoutes(String path)
{
for (List<Route> patternRoutes : routesByPattern.values())
{
if (patternRoutes.get(0).match(path) != null)
{
return Collections.unmodifiableList(patternRoutes);
}
}
return Collections.emptyList();
} | List<Route> function(String path) { for (List<Route> patternRoutes : routesByPattern.values()) { if (patternRoutes.get(0).match(path) != null) { return Collections.unmodifiableList(patternRoutes); } } return Collections.emptyList(); } | /**
* Returns a list of Route instances that the given path resolves to.
*
* @param path the path portion of the URL (e.g. after the domain and port).
* @return A list of Route instances matching the given path. Never null.
*/ | Returns a list of Route instances that the given path resolves to | getMatchingRoutes | {
"repo_name": "gargdeendayal/RestExpress",
"path": "src/java/com/strategicgains/restexpress/route/RouteMapping.java",
"license": "apache-2.0",
"size": 5691
} | [
"java.util.Collections",
"java.util.List"
] | import java.util.Collections; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,644,613 |
private Object loadInstance( String factoryName, ClassLoader cl, IJavaProject jproj, LoadFailureHandler failureHandler )
{
Object f = null;
try
{
Class<?> c = cl.loadClass( factoryName );
f = c.newInstance();
}
catch( Exception e )
{
AptPlugin.trace("Failed to load factory " + factoryName, e); //$NON-NLS-1$
failureHandler.addFailedFactory(factoryName);
}
catch ( NoClassDefFoundError ncdfe )
{
AptPlugin.trace("Failed to load " + factoryName, ncdfe); //$NON-NLS-1$
failureHandler.addFailedFactory(factoryName);
}
return f;
}
| Object function( String factoryName, ClassLoader cl, IJavaProject jproj, LoadFailureHandler failureHandler ) { Object f = null; try { Class<?> c = cl.loadClass( factoryName ); f = c.newInstance(); } catch( Exception e ) { AptPlugin.trace(STR + factoryName, e); failureHandler.addFailedFactory(factoryName); } catch ( NoClassDefFoundError ncdfe ) { AptPlugin.trace(STR + factoryName, ncdfe); failureHandler.addFailedFactory(factoryName); } return f; } | /**
* Wrapper around ClassLoader.loadClass().newInstance() to handle reporting of errors.
*/ | Wrapper around ClassLoader.loadClass().newInstance() to handle reporting of errors | loadInstance | {
"repo_name": "maxeler/eclipse",
"path": "eclipse.jdt.core/org.eclipse.jdt.apt.core/src/org/eclipse/jdt/apt/core/internal/AnnotationProcessorFactoryLoader.java",
"license": "epl-1.0",
"size": 30608
} | [
"org.eclipse.jdt.core.IJavaProject"
] | import org.eclipse.jdt.core.IJavaProject; | import org.eclipse.jdt.core.*; | [
"org.eclipse.jdt"
] | org.eclipse.jdt; | 197,116 |
public synchronized boolean remove(String key) throws IOException {
checkNotClosed();
validateKey(key);
Entry entry = lruEntries.get(key);
if (entry == null || entry.currentEditor != null) {
return false;
}
for (int i = 0; i < valueCount; i++) {
File file = entry.getCleanFile(i);
if (!file.delete()) {
throw new IOException("failed to delete " + file);
}
size -= entry.lengths[i];
entry.lengths[i] = 0;
}
redundantOpCount++;
journalWriter.append(REMOVE + ' ' + key + '\n');
lruEntries.remove(key);
if (journalRebuildRequired()) {
executorService.submit(cleanupCallable);
}
return true;
} | synchronized boolean function(String key) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null entry.currentEditor != null) { return false; } for (int i = 0; i < valueCount; i++) { File file = entry.getCleanFile(i); if (!file.delete()) { throw new IOException(STR + file); } size -= entry.lengths[i]; entry.lengths[i] = 0; } redundantOpCount++; journalWriter.append(REMOVE + ' ' + key + '\n'); lruEntries.remove(key); if (journalRebuildRequired()) { executorService.submit(cleanupCallable); } return true; } | /**
* Drops the entry for {@code key} if it exists and can be removed. Entries
* actively being edited cannot be removed.
*
* @return true if an entry was removed.
*/ | Drops the entry for key if it exists and can be removed. Entries actively being edited cannot be removed | remove | {
"repo_name": "yuifan/pexus4_external_okhttp",
"path": "src/main/java/com/squareup/okhttp/internal/DiskLruCache.java",
"license": "apache-2.0",
"size": 26994
} | [
"java.io.File",
"java.io.IOException"
] | import java.io.File; import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,603,861 |
public static void setProjectSource(UnmarshallingContext context, IFile source) {
context.put(GraphModelSource.PROJECT, source);
} | static void function(UnmarshallingContext context, IFile source) { context.put(GraphModelSource.PROJECT, source); } | /**
* Store the project-based path for the XML source in the supplied
* {@link UnmarshallingContext}.
*/ | Store the project-based path for the XML source in the supplied <code>UnmarshallingContext</code> | setProjectSource | {
"repo_name": "google/depan",
"path": "DepanGraphDoc/prod/src/com/google/devtools/depan/graph_doc/persistence/GraphModelReferenceConverter.java",
"license": "apache-2.0",
"size": 5781
} | [
"com.thoughtworks.xstream.converters.UnmarshallingContext",
"org.eclipse.core.resources.IFile"
] | import com.thoughtworks.xstream.converters.UnmarshallingContext; import org.eclipse.core.resources.IFile; | import com.thoughtworks.xstream.converters.*; import org.eclipse.core.resources.*; | [
"com.thoughtworks.xstream",
"org.eclipse.core"
] | com.thoughtworks.xstream; org.eclipse.core; | 31,229 |
ActionFuture<RestoreSnapshotResponse> restoreSnapshot(RestoreSnapshotRequest request); | ActionFuture<RestoreSnapshotResponse> restoreSnapshot(RestoreSnapshotRequest request); | /**
* Restores a snapshot.
*/ | Restores a snapshot | restoreSnapshot | {
"repo_name": "ern/elasticsearch",
"path": "server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java",
"license": "apache-2.0",
"size": 28517
} | [
"org.elasticsearch.action.ActionFuture",
"org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest",
"org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse"
] | import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; | import org.elasticsearch.action.*; import org.elasticsearch.action.admin.cluster.snapshots.restore.*; | [
"org.elasticsearch.action"
] | org.elasticsearch.action; | 737,642 |
public void injectBlocks(int dataNodeIndex,
Iterable<Block> blocksToInject, String bpid) throws IOException {
if (dataNodeIndex < 0 || dataNodeIndex > dataNodes.size()) {
throw new IndexOutOfBoundsException();
}
final DataNode dn = dataNodes.get(dataNodeIndex).datanode;
final FsDatasetSpi<?> dataSet = DataNodeTestUtils.getFSDataset(dn);
if (!(dataSet instanceof SimulatedFSDataset)) {
throw new IOException("injectBlocks is valid only for SimilatedFSDataset");
}
if (bpid == null) {
bpid = getNamesystem().getBlockPoolId();
}
SimulatedFSDataset sdataset = (SimulatedFSDataset) dataSet;
sdataset.injectBlocks(bpid, blocksToInject);
dataNodes.get(dataNodeIndex).datanode.scheduleAllBlockReport(0);
} | void function(int dataNodeIndex, Iterable<Block> blocksToInject, String bpid) throws IOException { if (dataNodeIndex < 0 dataNodeIndex > dataNodes.size()) { throw new IndexOutOfBoundsException(); } final DataNode dn = dataNodes.get(dataNodeIndex).datanode; final FsDatasetSpi<?> dataSet = DataNodeTestUtils.getFSDataset(dn); if (!(dataSet instanceof SimulatedFSDataset)) { throw new IOException(STR); } if (bpid == null) { bpid = getNamesystem().getBlockPoolId(); } SimulatedFSDataset sdataset = (SimulatedFSDataset) dataSet; sdataset.injectBlocks(bpid, blocksToInject); dataNodes.get(dataNodeIndex).datanode.scheduleAllBlockReport(0); } | /**
* This method is valid only if the data nodes have simulated data
* @param dataNodeIndex - data node i which to inject - the index is same as for getDataNodes()
* @param blocksToInject - the blocks
* @param bpid - (optional) the block pool id to use for injecting blocks.
* If not supplied then it is queried from the in-process NameNode.
* @throws IOException
* if not simulatedFSDataset
* if any of blocks already exist in the data node
*
*/ | This method is valid only if the data nodes have simulated data | injectBlocks | {
"repo_name": "vlajos/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java",
"license": "apache-2.0",
"size": 105726
} | [
"java.io.IOException",
"org.apache.hadoop.hdfs.protocol.Block",
"org.apache.hadoop.hdfs.server.datanode.DataNode",
"org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils",
"org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset",
"org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi"
] | import java.io.IOException; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; | import java.io.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.server.datanode.*; import org.apache.hadoop.hdfs.server.datanode.fsdataset.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 952,939 |
@NonNull
public Drawable[] getCompoundDrawablesRelative() {
final Drawables dr = mDrawables;
if (dr != null) {
return new Drawable[] {
dr.mDrawableStart, dr.mShowing[Drawables.TOP],
dr.mDrawableEnd, dr.mShowing[Drawables.BOTTOM]
};
} else {
return new Drawable[] { null, null, null, null };
}
} | Drawable[] function() { final Drawables dr = mDrawables; if (dr != null) { return new Drawable[] { dr.mDrawableStart, dr.mShowing[Drawables.TOP], dr.mDrawableEnd, dr.mShowing[Drawables.BOTTOM] }; } else { return new Drawable[] { null, null, null, null }; } } | /**
* Returns drawables for the start, top, end, and bottom borders.
*
* @attr ref android.R.styleable#TextView_drawableStart
* @attr ref android.R.styleable#TextView_drawableTop
* @attr ref android.R.styleable#TextView_drawableEnd
* @attr ref android.R.styleable#TextView_drawableBottom
*/ | Returns drawables for the start, top, end, and bottom borders | getCompoundDrawablesRelative | {
"repo_name": "OmniEvo/android_frameworks_base",
"path": "core/java/android/widget/TextView.java",
"license": "gpl-3.0",
"size": 380764
} | [
"android.graphics.drawable.Drawable"
] | import android.graphics.drawable.Drawable; | import android.graphics.drawable.*; | [
"android.graphics"
] | android.graphics; | 2,627,190 |
@Aspect(advice = org.support.project.ormapping.transaction.Transaction.class)
public LikeCommentsEntity physicalSelectOnKey(Long no) {
String sql = SQLManager.getInstance().getSql("/org/support/project/knowledge/dao/sql/LikeCommentsDao/LikeCommentsDao_physical_select_on_key.sql");
return executeQuerySingle(sql, LikeCommentsEntity.class, no);
} | @Aspect(advice = org.support.project.ormapping.transaction.Transaction.class) LikeCommentsEntity function(Long no) { String sql = SQLManager.getInstance().getSql(STR); return executeQuerySingle(sql, LikeCommentsEntity.class, no); } | /**
* Select data on key.
* @param no no
* @return data
*/ | Select data on key | physicalSelectOnKey | {
"repo_name": "support-project/knowledge",
"path": "src/main/java/org/support/project/knowledge/dao/gen/GenLikeCommentsDao.java",
"license": "apache-2.0",
"size": 16777
} | [
"org.support.project.aop.Aspect",
"org.support.project.knowledge.entity.LikeCommentsEntity",
"org.support.project.ormapping.common.SQLManager"
] | import org.support.project.aop.Aspect; import org.support.project.knowledge.entity.LikeCommentsEntity; import org.support.project.ormapping.common.SQLManager; | import org.support.project.aop.*; import org.support.project.knowledge.entity.*; import org.support.project.ormapping.common.*; | [
"org.support.project"
] | org.support.project; | 1,450,141 |
private void tstUnknownJobCounters() throws Exception {
IgniteHadoopClientProtocolProvider provider = provider();
ClientProtocol proto = provider.create(config(HadoopAbstractSelfTest.REST_PORT));
try {
proto.getJobCounters(new JobID(UUID.randomUUID().toString(), -1));
fail("exception must be thrown");
}
catch (Exception e) {
assert e instanceof IOException : "wrong error has been thrown";
}
} | void function() throws Exception { IgniteHadoopClientProtocolProvider provider = provider(); ClientProtocol proto = provider.create(config(HadoopAbstractSelfTest.REST_PORT)); try { proto.getJobCounters(new JobID(UUID.randomUUID().toString(), -1)); fail(STR); } catch (Exception e) { assert e instanceof IOException : STR; } } | /**
* Tests job counters retrieval for unknown job id.
*
* @throws Exception If failed.
*/ | Tests job counters retrieval for unknown job id | tstUnknownJobCounters | {
"repo_name": "ilantukh/ignite",
"path": "modules/hadoop/src/test/java/org/apache/ignite/internal/processors/hadoop/impl/client/HadoopClientProtocolSelfTest.java",
"license": "apache-2.0",
"size": 22240
} | [
"java.io.IOException",
"java.util.UUID",
"org.apache.hadoop.mapreduce.JobID",
"org.apache.hadoop.mapreduce.protocol.ClientProtocol",
"org.apache.ignite.hadoop.mapreduce.IgniteHadoopClientProtocolProvider",
"org.apache.ignite.internal.processors.hadoop.impl.HadoopAbstractSelfTest"
] | import java.io.IOException; import java.util.UUID; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.protocol.ClientProtocol; import org.apache.ignite.hadoop.mapreduce.IgniteHadoopClientProtocolProvider; import org.apache.ignite.internal.processors.hadoop.impl.HadoopAbstractSelfTest; | import java.io.*; import java.util.*; import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.protocol.*; import org.apache.ignite.hadoop.mapreduce.*; import org.apache.ignite.internal.processors.hadoop.impl.*; | [
"java.io",
"java.util",
"org.apache.hadoop",
"org.apache.ignite"
] | java.io; java.util; org.apache.hadoop; org.apache.ignite; | 1,107,788 |
List<ChildView> findByChildID(String childID); | List<ChildView> findByChildID(String childID); | /**
* Find by child id.
*
* @param childID
* the child id
* @return the list
*/ | Find by child id | findByChildID | {
"repo_name": "gudipatiharitha/medicmobile",
"path": "modules/medicmobile/trunk/src/main/java/org/medicmobile/service/ChildService.java",
"license": "bsd-3-clause",
"size": 2155
} | [
"java.util.List",
"org.medicmobile.model.ChildView"
] | import java.util.List; import org.medicmobile.model.ChildView; | import java.util.*; import org.medicmobile.model.*; | [
"java.util",
"org.medicmobile.model"
] | java.util; org.medicmobile.model; | 1,118,194 |
ServiceCall<Void> arrayStringCsvValidAsync(List<String> arrayQuery, final ServiceCallback<Void> serviceCallback); | ServiceCall<Void> arrayStringCsvValidAsync(List<String> arrayQuery, final ServiceCallback<Void> serviceCallback); | /**
* Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the csv-array format.
*
* @param arrayQuery an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the csv-array format
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceCall} object
*/ | Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the csv-array format | arrayStringCsvValidAsync | {
"repo_name": "yugangw-msft/autorest",
"path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/url/Queries.java",
"license": "mit",
"size": 53223
} | [
"com.microsoft.rest.ServiceCall",
"com.microsoft.rest.ServiceCallback",
"java.util.List"
] | import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback; import java.util.List; | import com.microsoft.rest.*; import java.util.*; | [
"com.microsoft.rest",
"java.util"
] | com.microsoft.rest; java.util; | 1,955,355 |
static void assertAnnotationsMatch(Annotation[] annotations,
String[] expectedAnnotationStrings) {
// Due to Android's dex format insisting that Annotations are sorted by name the ordering of
// annotations is determined by the (simple?) name of the Annotation, not just the order
// that they are defined in the source. Tests have to be sensitive to that when handling
// mixed usage of "Container" and "Repeated" - the "Container" annotations will be
// discovered before "Repeated" due to their sort ordering.
//
// This code assumes that repeated annotations with the same name will be specified in the
// source their natural sort order when attributes are considered, just to make the testing
// simpler.
// e.g. @Repeated(1) @Repeated(2), never @Repeated(2) @Repeated(1)
// Sorting the expected and actual strings _should_ work providing the assumptions above
// hold. It may mask random ordering issues but it's harder to deal with that while the
// source ordering is no observed. Providing no developers are ascribing meaning to the
// relative order of annotations things should be ok.
Arrays.sort(expectedAnnotationStrings);
String[] actualAnnotationStrings = createAnnotationTestStrings(annotations);
Arrays.sort(actualAnnotationStrings);
assertEquals(
Arrays.asList(expectedAnnotationStrings),
Arrays.asList(actualAnnotationStrings));
} | static void assertAnnotationsMatch(Annotation[] annotations, String[] expectedAnnotationStrings) { Arrays.sort(expectedAnnotationStrings); String[] actualAnnotationStrings = createAnnotationTestStrings(annotations); Arrays.sort(actualAnnotationStrings); assertEquals( Arrays.asList(expectedAnnotationStrings), Arrays.asList(actualAnnotationStrings)); } | /**
* Asserts that the supplied annotations match the expectation Strings. See
* {@link AnnotatedElementTestSupport} for the string syntax.
*/ | Asserts that the supplied annotations match the expectation Strings. See <code>AnnotatedElementTestSupport</code> for the string syntax | assertAnnotationsMatch | {
"repo_name": "google/desugar_jdk_libs",
"path": "jdk11/src/libcore/luni/src/test/java/libcore/java/lang/reflect/annotations/AnnotatedElementTestSupport.java",
"license": "gpl-2.0",
"size": 13763
} | [
"java.lang.annotation.Annotation",
"java.util.Arrays",
"junit.framework.Assert"
] | import java.lang.annotation.Annotation; import java.util.Arrays; import junit.framework.Assert; | import java.lang.annotation.*; import java.util.*; import junit.framework.*; | [
"java.lang",
"java.util",
"junit.framework"
] | java.lang; java.util; junit.framework; | 805,621 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.