method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
private void addKeywords(Collection<String> set, DBPKeywordType type) {
for (String keyword : set) {
keyword = keyword.toUpperCase(Locale.ENGLISH);
DBPKeywordType oldType = allKeywords.get(keyword);
if (oldType != DBPKeywordType.KEYWORD) {
// We can't mark keywords as functions or types because keywords are reserved and
// if some identifier conflicts with keyword it must be quoted.
allKeywords.put(keyword, type);
}
}
} | void function(Collection<String> set, DBPKeywordType type) { for (String keyword : set) { keyword = keyword.toUpperCase(Locale.ENGLISH); DBPKeywordType oldType = allKeywords.get(keyword); if (oldType != DBPKeywordType.KEYWORD) { allKeywords.put(keyword, type); } } } | /**
* Add keywords.
* @param set keywords. Must be in upper case.
* @param type keyword type
*/ | Add keywords | addKeywords | {
"repo_name": "diffplug/spotless",
"path": "lib/src/main/java/com/diffplug/spotless/sql/dbeaver/SQLDialect.java",
"license": "apache-2.0",
"size": 3665
} | [
"java.util.Collection",
"java.util.Locale"
] | import java.util.Collection; import java.util.Locale; | import java.util.*; | [
"java.util"
] | java.util; | 15,152 |
private void reset() {
this.title = null;
this.description = null;
this.keywords = null;
this.categories = null;
this.href = null;
this.author = null;
this.contents.setLength(0);
this.links = new ArrayList();
this.published = -1;
// Robot Instructions
this.robotIndex = true;
this.robotFollow = true;
this.state = NONE;
} | void function() { this.title = null; this.description = null; this.keywords = null; this.categories = null; this.href = null; this.author = null; this.contents.setLength(0); this.links = new ArrayList(); this.published = -1; this.robotIndex = true; this.robotFollow = true; this.state = NONE; } | /**
* Return contents
*/ | Return contents | reset | {
"repo_name": "idega/platform2",
"path": "src/com/idega/block/websearch/business/HTMLHandler.java",
"license": "gpl-3.0",
"size": 8167
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 498,270 |
public String toString(int indentFactor) throws JSONException {
StringWriter w = new StringWriter();
synchronized (w.getBuffer()) {
return this.write(w, indentFactor, 0).toString();
}
} | String function(int indentFactor) throws JSONException { StringWriter w = new StringWriter(); synchronized (w.getBuffer()) { return this.write(w, indentFactor, 0).toString(); } } | /**
* Make a prettyprinted JSON text of this JSONObject.
* <p>
* Warning: This method assumes that the data structure is acyclical.
*
* @param indentFactor
* The number of spaces to add to each level of indentation.
* @return a printable, displayable, portable, transmittable representation
* of the object, beginning with <code>{</code> <small>(left
* brace)</small> and ending with <code>}</code> <small>(right
* brace)</small>.
* @throws JSONException
* If the object contains an invalid number.
*/ | Make a prettyprinted JSON text of this JSONObject. Warning: This method assumes that the data structure is acyclical | toString | {
"repo_name": "joewitt99/okta-auth-report",
"path": "src/main/java/oktareport/JSONObject.java",
"license": "apache-2.0",
"size": 56718
} | [
"java.io.StringWriter"
] | import java.io.StringWriter; | import java.io.*; | [
"java.io"
] | java.io; | 1,549,406 |
protected void _doFinishRenameItem()
{
Object item = ((Editor)this.getCellEditor()).getItemBeingRenamed();
String newName = (String)(this.getCellEditor().getCellEditorValue());
this.setSelectionPath(null);
try
{
TreePath itemPath = this.getItemPath(item);
boolean wasExpanded = this.isExpanded(itemPath);
Object newItem = null;
if (item instanceof Account) { newItem = ((Account)item).rename(newName); }
else if (item instanceof Contact) { newItem = ((Contact)item).rename(newName); }
else if (item instanceof Group) { newItem = ((Group)item).rename(newName); }
else return;
if (wasExpanded) this.expandPath(this.getItemPath(newItem));
this.focusItem(newItem);
}
catch (Exception e)
{
JOptionPane.showMessageDialog(this.getTopLevelAncestor(), e.getMessage(), "Error", JOptionPane.WARNING_MESSAGE);
}
}
| void function() { Object item = ((Editor)this.getCellEditor()).getItemBeingRenamed(); String newName = (String)(this.getCellEditor().getCellEditorValue()); this.setSelectionPath(null); try { TreePath itemPath = this.getItemPath(item); boolean wasExpanded = this.isExpanded(itemPath); Object newItem = null; if (item instanceof Account) { newItem = ((Account)item).rename(newName); } else if (item instanceof Contact) { newItem = ((Contact)item).rename(newName); } else if (item instanceof Group) { newItem = ((Group)item).rename(newName); } else return; if (wasExpanded) this.expandPath(this.getItemPath(newItem)); this.focusItem(newItem); } catch (Exception e) { JOptionPane.showMessageDialog(this.getTopLevelAncestor(), e.getMessage(), "Error", JOptionPane.WARNING_MESSAGE); } } | /**
* Finishes the "rename item" command as called from
* the popup menu (this is called after the editor is
* closed).
*/ | Finishes the "rename item" command as called from the popup menu (this is called after the editor is closed) | _doFinishRenameItem | {
"repo_name": "goc9000/UniArchive",
"path": "src/uniarchive/widgets/ArchiveGroupsView.java",
"license": "gpl-3.0",
"size": 55106
} | [
"javax.swing.JOptionPane",
"javax.swing.tree.TreePath"
] | import javax.swing.JOptionPane; import javax.swing.tree.TreePath; | import javax.swing.*; import javax.swing.tree.*; | [
"javax.swing"
] | javax.swing; | 502,391 |
private String getCoreFormatterOption(String key) {
if (fProject == null)
return JavaCore.getOption(key);
return fProject.getOption(key, true);
}
CorePrefs(IJavaProject project) {
fProject= project;
if (isStandalone()) {
prefUseTabs= true;
prefTabSize= 4;
prefIndentationSize= 4;
prefArrayDimensionsDeepIndent= true;
prefContinuationIndent= 2;
prefBlockIndent= 1;
prefArrayIndent= prefContinuationIndent;
prefArrayDeepIndent= true;
prefTernaryDeepAlign= false;
prefTernaryIndent= prefContinuationIndent;
prefCaseIndent= 0;
prefCaseBlockIndent= prefBlockIndent;
prefIndentBracesForBlocks= false;
prefSimpleIndent= (prefIndentBracesForBlocks && prefBlockIndent == 0) ? 1 : prefBlockIndent;
prefBracketIndent= prefBlockIndent;
prefMethodDeclDeepIndent= true;
prefMethodDeclIndent= 1;
prefMethodCallDeepIndent= false;
prefMethodCallIndent= 1;
prefParenthesisDeepIndent= false;
prefParenthesisIndent= prefContinuationIndent;
prefMethodBodyIndent= 1;
prefTypeIndent= 1;
prefIndentBracesForArrays= false;
prefIndentBracesForMethods= false;
prefIndentBracesForTypes= false;
prefHasGenerics= false;
prefTabChar= JavaCore.TAB;
} else {
prefUseTabs= prefUseTabs();
prefTabSize= prefTabSize();
prefIndentationSize= prefIndentationSize();
prefArrayDimensionsDeepIndent= prefArrayDimensionsDeepIndent();
prefContinuationIndent= prefContinuationIndent();
prefBlockIndent= prefBlockIndent();
prefArrayIndent= prefArrayIndent();
prefArrayDeepIndent= prefArrayDeepIndent();
prefTernaryDeepAlign= prefTernaryDeepAlign();
prefTernaryIndent= prefTernaryIndent();
prefCaseIndent= prefCaseIndent();
prefCaseBlockIndent= prefCaseBlockIndent();
prefIndentBracesForBlocks= prefIndentBracesForBlocks();
prefSimpleIndent= prefSimpleIndent();
prefBracketIndent= prefBracketIndent();
prefMethodDeclDeepIndent= prefMethodDeclDeepIndent();
prefMethodDeclIndent= prefMethodDeclIndent();
prefMethodCallDeepIndent= prefMethodCallDeepIndent();
prefMethodCallIndent= prefMethodCallIndent();
prefParenthesisDeepIndent= prefParenthesisDeepIndent();
prefParenthesisIndent= prefParenthesisIndent();
prefMethodBodyIndent= prefMethodBodyIndent();
prefTypeIndent= prefTypeIndent();
prefIndentBracesForArrays= prefIndentBracesForArrays();
prefIndentBracesForMethods= prefIndentBracesForMethods();
prefIndentBracesForTypes= prefIndentBracesForTypes();
prefHasGenerics= hasGenerics();
prefTabChar= getCoreFormatterOption(DefaultCodeFormatterConstants.FORMATTER_TAB_CHAR);
}
} | String function(String key) { if (fProject == null) return JavaCore.getOption(key); return fProject.getOption(key, true); } CorePrefs(IJavaProject project) { fProject= project; if (isStandalone()) { prefUseTabs= true; prefTabSize= 4; prefIndentationSize= 4; prefArrayDimensionsDeepIndent= true; prefContinuationIndent= 2; prefBlockIndent= 1; prefArrayIndent= prefContinuationIndent; prefArrayDeepIndent= true; prefTernaryDeepAlign= false; prefTernaryIndent= prefContinuationIndent; prefCaseIndent= 0; prefCaseBlockIndent= prefBlockIndent; prefIndentBracesForBlocks= false; prefSimpleIndent= (prefIndentBracesForBlocks && prefBlockIndent == 0) ? 1 : prefBlockIndent; prefBracketIndent= prefBlockIndent; prefMethodDeclDeepIndent= true; prefMethodDeclIndent= 1; prefMethodCallDeepIndent= false; prefMethodCallIndent= 1; prefParenthesisDeepIndent= false; prefParenthesisIndent= prefContinuationIndent; prefMethodBodyIndent= 1; prefTypeIndent= 1; prefIndentBracesForArrays= false; prefIndentBracesForMethods= false; prefIndentBracesForTypes= false; prefHasGenerics= false; prefTabChar= JavaCore.TAB; } else { prefUseTabs= prefUseTabs(); prefTabSize= prefTabSize(); prefIndentationSize= prefIndentationSize(); prefArrayDimensionsDeepIndent= prefArrayDimensionsDeepIndent(); prefContinuationIndent= prefContinuationIndent(); prefBlockIndent= prefBlockIndent(); prefArrayIndent= prefArrayIndent(); prefArrayDeepIndent= prefArrayDeepIndent(); prefTernaryDeepAlign= prefTernaryDeepAlign(); prefTernaryIndent= prefTernaryIndent(); prefCaseIndent= prefCaseIndent(); prefCaseBlockIndent= prefCaseBlockIndent(); prefIndentBracesForBlocks= prefIndentBracesForBlocks(); prefSimpleIndent= prefSimpleIndent(); prefBracketIndent= prefBracketIndent(); prefMethodDeclDeepIndent= prefMethodDeclDeepIndent(); prefMethodDeclIndent= prefMethodDeclIndent(); prefMethodCallDeepIndent= prefMethodCallDeepIndent(); prefMethodCallIndent= prefMethodCallIndent(); prefParenthesisDeepIndent= prefParenthesisDeepIndent(); prefParenthesisIndent= prefParenthesisIndent(); prefMethodBodyIndent= prefMethodBodyIndent(); prefTypeIndent= prefTypeIndent(); prefIndentBracesForArrays= prefIndentBracesForArrays(); prefIndentBracesForMethods= prefIndentBracesForMethods(); prefIndentBracesForTypes= prefIndentBracesForTypes(); prefHasGenerics= hasGenerics(); prefTabChar= getCoreFormatterOption(DefaultCodeFormatterConstants.FORMATTER_TAB_CHAR); } } | /**
* Returns the possibly project-specific core preference defined under <code>key</code>.
*
* @param key the key of the preference
* @return the value of the preference
* @since 3.1
*/ | Returns the possibly project-specific core preference defined under <code>key</code> | getCoreFormatterOption | {
"repo_name": "brunyuriy/quick-fix-scout",
"path": "org.eclipse.jdt.ui_3.7.1.r371_v20110824-0800/src/org/eclipse/jdt/internal/ui/text/JavaIndenter.java",
"license": "mit",
"size": 64751
} | [
"org.eclipse.jdt.core.IJavaProject",
"org.eclipse.jdt.core.JavaCore",
"org.eclipse.jdt.core.formatter.DefaultCodeFormatterConstants"
] | import org.eclipse.jdt.core.IJavaProject; import org.eclipse.jdt.core.JavaCore; import org.eclipse.jdt.core.formatter.DefaultCodeFormatterConstants; | import org.eclipse.jdt.core.*; import org.eclipse.jdt.core.formatter.*; | [
"org.eclipse.jdt"
] | org.eclipse.jdt; | 1,887,112 |
void export(Label skylarkLabel, String ruleClassName) throws EvalException {
Preconditions.checkState(ruleClass == null && builder != null);
this.skylarkLabel = skylarkLabel;
if (type == RuleClassType.TEST != TargetUtils.isTestRuleName(ruleClassName)) {
throw new EvalException(definitionLocation, "Invalid rule class name '" + ruleClassName
+ "', test rule class names must end with '_test' and other rule classes must not");
}
for (Pair<String, SkylarkAttr.Descriptor> attribute : attributes) {
SkylarkAttr.Descriptor descriptor = attribute.getSecond();
Attribute.Builder<?> attributeBuilder = descriptor.getAttributeBuilder();
for (SkylarkAspect skylarkAspect : descriptor.getAspects()) {
if (!skylarkAspect.isExported()) {
throw new EvalException(definitionLocation,
"All aspects applied to rule dependencies must be top-level values");
}
attributeBuilder.aspect(skylarkAspect);
}
addAttribute(definitionLocation, builder,
descriptor.getAttributeBuilder().build(attribute.getFirst()));
}
this.ruleClass = builder.build(ruleClassName);
this.builder = null;
this.attributes = null;
} | void export(Label skylarkLabel, String ruleClassName) throws EvalException { Preconditions.checkState(ruleClass == null && builder != null); this.skylarkLabel = skylarkLabel; if (type == RuleClassType.TEST != TargetUtils.isTestRuleName(ruleClassName)) { throw new EvalException(definitionLocation, STR + ruleClassName + STR); } for (Pair<String, SkylarkAttr.Descriptor> attribute : attributes) { SkylarkAttr.Descriptor descriptor = attribute.getSecond(); Attribute.Builder<?> attributeBuilder = descriptor.getAttributeBuilder(); for (SkylarkAspect skylarkAspect : descriptor.getAspects()) { if (!skylarkAspect.isExported()) { throw new EvalException(definitionLocation, STR); } attributeBuilder.aspect(skylarkAspect); } addAttribute(definitionLocation, builder, descriptor.getAttributeBuilder().build(attribute.getFirst())); } this.ruleClass = builder.build(ruleClassName); this.builder = null; this.attributes = null; } | /**
* Export a RuleFunction from a Skylark file with a given name.
*/ | Export a RuleFunction from a Skylark file with a given name | export | {
"repo_name": "abergmeier-dsfishlabs/bazel",
"path": "src/main/java/com/google/devtools/build/lib/rules/SkylarkRuleClassFunctions.java",
"license": "apache-2.0",
"size": 41823
} | [
"com.google.devtools.build.lib.cmdline.Label",
"com.google.devtools.build.lib.packages.Attribute",
"com.google.devtools.build.lib.packages.RuleClass",
"com.google.devtools.build.lib.packages.SkylarkAspect",
"com.google.devtools.build.lib.packages.TargetUtils",
"com.google.devtools.build.lib.rules.SkylarkAttr",
"com.google.devtools.build.lib.syntax.EvalException",
"com.google.devtools.build.lib.util.Pair",
"com.google.devtools.build.lib.util.Preconditions"
] | import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.packages.SkylarkAspect; import com.google.devtools.build.lib.packages.TargetUtils; import com.google.devtools.build.lib.rules.SkylarkAttr; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.util.Preconditions; | import com.google.devtools.build.lib.cmdline.*; import com.google.devtools.build.lib.packages.*; import com.google.devtools.build.lib.rules.*; import com.google.devtools.build.lib.syntax.*; import com.google.devtools.build.lib.util.*; | [
"com.google.devtools"
] | com.google.devtools; | 569,524 |
private static void compareTotalPoolUsage(DatanodeStorageReport[] preReports,
DatanodeStorageReport[] postReports) {
Assert.assertNotNull(preReports);
Assert.assertNotNull(postReports);
Assert.assertEquals(preReports.length, postReports.length);
for (DatanodeStorageReport preReport : preReports) {
String dnUuid = preReport.getDatanodeInfo().getDatanodeUuid();
for(DatanodeStorageReport postReport : postReports) {
if(postReport.getDatanodeInfo().getDatanodeUuid().equals(dnUuid)) {
Assert.assertEquals(getTotalPoolUsage(preReport),
getTotalPoolUsage(postReport));
LOG.info("Comparision of datanode pool usage pre/post balancer run. "
+ "PrePoolUsage: " + getTotalPoolUsage(preReport)
+ ", PostPoolUsage: " + getTotalPoolUsage(postReport));
break;
}
}
}
} | static void function(DatanodeStorageReport[] preReports, DatanodeStorageReport[] postReports) { Assert.assertNotNull(preReports); Assert.assertNotNull(postReports); Assert.assertEquals(preReports.length, postReports.length); for (DatanodeStorageReport preReport : preReports) { String dnUuid = preReport.getDatanodeInfo().getDatanodeUuid(); for(DatanodeStorageReport postReport : postReports) { if(postReport.getDatanodeInfo().getDatanodeUuid().equals(dnUuid)) { Assert.assertEquals(getTotalPoolUsage(preReport), getTotalPoolUsage(postReport)); LOG.info(STR + STR + getTotalPoolUsage(preReport) + STR + getTotalPoolUsage(postReport)); break; } } } } | /**
* Compare the total blockpool usage on each datanode to ensure that nothing
* was balanced.
*
* @param preReports storage reports from pre balancer run
* @param postReports storage reports from post balancer run
*/ | Compare the total blockpool usage on each datanode to ensure that nothing was balanced | compareTotalPoolUsage | {
"repo_name": "dennishuo/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancerWithMultipleNameNodes.java",
"license": "apache-2.0",
"size": 23127
} | [
"org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport",
"org.junit.Assert"
] | import org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport; import org.junit.Assert; | import org.apache.hadoop.hdfs.server.protocol.*; import org.junit.*; | [
"org.apache.hadoop",
"org.junit"
] | org.apache.hadoop; org.junit; | 2,566 |
public void propertyModified(Property property)
{
if(property == driverName)
{
driverInitialized = false;
}
super.propertyModified(property);
}
| void function(Property property) { if(property == driverName) { driverInitialized = false; } super.propertyModified(property); } | /**
* Called when a property owned by this ObjectPool has changed.
*
* @param property the property that has changed.
*/ | Called when a property owned by this ObjectPool has changed | propertyModified | {
"repo_name": "tolo/JServer",
"path": "src/java/com/teletalk/jserver/pool/DBConnectionPool.java",
"license": "apache-2.0",
"size": 25039
} | [
"com.teletalk.jserver.property.Property"
] | import com.teletalk.jserver.property.Property; | import com.teletalk.jserver.property.*; | [
"com.teletalk.jserver"
] | com.teletalk.jserver; | 2,110,860 |
private int decodeOptions(byte[] bytes, int position) throws IncorrectMessageException {
int cur = bytes[position];
position++;
int delta = getBitsInByteAsByte((byte) cur, OPTION_DELTA_START, OPTION_DELTA_LENGTH);
int length = getBitsInByteAsByte((byte) cur, OPTION_LENGTH_START, OPTION_LENGTH_LENGTH);
int optionNumber = 0;
// An Option can be followed by the end of the message, by another Option, or by the Payload Marker and the payload.
while (delta != PAYLOAD_MARKER) {
if (length == 0 && delta == 0) {
return position;
}
//Determine option number
if (delta == ADDITIONAL_DELTA) {
delta += bytes[position];
position++;
} else if (delta == ADDITIONAL_DELTA_2) {
cur = bytes[position];
position++;
delta = ADDITIONAL_DELTA_MAX + ((cur << 8) + bytes[position]);
position++;
}
optionNumber += delta;
CoAPOptionName name = CoAPOptionName.getFromNo(optionNumber);
//Determine length
if (length == ADDITIONAL_LENGTH) {
length += bytes[position];
position++;
} else if (length == ADDITIONAL_LENGTH_2) {
cur = bytes[position];
position++;
length = ADDITIONAL_LENGTH_MAX + ((cur << 8) + bytes[position]);
position++;
}
if (!name.isLegalSize(length)) {
//If the length of an option value in a request is outside the defined range, that option MUST be treated like an unrecognized option
okOptions = false; //TODO: Handle this
}
byte[] value = new byte[length];
System.arraycopy(bytes, position, value, 0, length);
position += length;
boolean okToAdd = message.addOptionHeader(new CoAPOptionHeader(name, value));
// TODO draft-ietf-core-coap-08 specifies behaviour when
// unrecognized options are received in confirmable req/resp.
// So apply to CON messages only
if (!okToAdd && name.isCritical() && message.getMessageType() == CoAPMessageType.CONFIRMABLE) {
this.okOptions = false; //TODO: Handle this
LOGGER.debug("Unrecognized options in a confirmable message");
}
if (position >= bytes.length) {
return position;
}
cur = bytes[position];
position++;
delta = getBitsInByteAsByte((byte) cur, OPTION_DELTA_START, OPTION_DELTA_LENGTH);
length = getBitsInByteAsByte((byte) cur, OPTION_LENGTH_START, OPTION_LENGTH_LENGTH);
}
if (length != PAYLOAD_MARKER) {
// If the field is set to this value but the entire byte is not the payload marker, this MUST be processed as a message format error.
throw new IncorrectMessageException("Payload marker was bad: " + length + " at position " + position); //TODO: Handle this
}
if (position == bytes.length - 1) {
// The presence of a marker followed by a zero-length payload MUST be processed as a message format error.
throw new IncorrectMessageException("Empty payload after payload marker"); //TODO: Handle this
}
return position;
} | int function(byte[] bytes, int position) throws IncorrectMessageException { int cur = bytes[position]; position++; int delta = getBitsInByteAsByte((byte) cur, OPTION_DELTA_START, OPTION_DELTA_LENGTH); int length = getBitsInByteAsByte((byte) cur, OPTION_LENGTH_START, OPTION_LENGTH_LENGTH); int optionNumber = 0; while (delta != PAYLOAD_MARKER) { if (length == 0 && delta == 0) { return position; } if (delta == ADDITIONAL_DELTA) { delta += bytes[position]; position++; } else if (delta == ADDITIONAL_DELTA_2) { cur = bytes[position]; position++; delta = ADDITIONAL_DELTA_MAX + ((cur << 8) + bytes[position]); position++; } optionNumber += delta; CoAPOptionName name = CoAPOptionName.getFromNo(optionNumber); if (length == ADDITIONAL_LENGTH) { length += bytes[position]; position++; } else if (length == ADDITIONAL_LENGTH_2) { cur = bytes[position]; position++; length = ADDITIONAL_LENGTH_MAX + ((cur << 8) + bytes[position]); position++; } if (!name.isLegalSize(length)) { okOptions = false; } byte[] value = new byte[length]; System.arraycopy(bytes, position, value, 0, length); position += length; boolean okToAdd = message.addOptionHeader(new CoAPOptionHeader(name, value)); if (!okToAdd && name.isCritical() && message.getMessageType() == CoAPMessageType.CONFIRMABLE) { this.okOptions = false; LOGGER.debug(STR); } if (position >= bytes.length) { return position; } cur = bytes[position]; position++; delta = getBitsInByteAsByte((byte) cur, OPTION_DELTA_START, OPTION_DELTA_LENGTH); length = getBitsInByteAsByte((byte) cur, OPTION_LENGTH_START, OPTION_LENGTH_LENGTH); } if (length != PAYLOAD_MARKER) { throw new IncorrectMessageException(STR + length + STR + position); } if (position == bytes.length - 1) { throw new IncorrectMessageException(STR); } return position; } | /**
* This method decodes the options in the given data
*
* @param bytes bytes to decode
* @param optionCount option count (determined from the option count header)
*/ | This method decodes the options in the given data | decodeOptions | {
"repo_name": "Ericsson-LMF/IoT-Gateway",
"path": "osgi/basedriver.coap/src/main/java/com/ericsson/deviceaccess/coap/basedriver/util/CoAPMessageReader.java",
"license": "gpl-2.0",
"size": 11997
} | [
"com.ericsson.common.util.BitUtil",
"com.ericsson.deviceaccess.coap.basedriver.api.message.CoAPMessage",
"com.ericsson.deviceaccess.coap.basedriver.api.message.CoAPOptionHeader",
"com.ericsson.deviceaccess.coap.basedriver.api.message.CoAPOptionName"
] | import com.ericsson.common.util.BitUtil; import com.ericsson.deviceaccess.coap.basedriver.api.message.CoAPMessage; import com.ericsson.deviceaccess.coap.basedriver.api.message.CoAPOptionHeader; import com.ericsson.deviceaccess.coap.basedriver.api.message.CoAPOptionName; | import com.ericsson.common.util.*; import com.ericsson.deviceaccess.coap.basedriver.api.message.*; | [
"com.ericsson.common",
"com.ericsson.deviceaccess"
] | com.ericsson.common; com.ericsson.deviceaccess; | 2,889,217 |
public AzureIaaSvmJob withDuration(Duration duration) {
this.duration = duration;
return this;
} | AzureIaaSvmJob function(Duration duration) { this.duration = duration; return this; } | /**
* Set the duration property: Time elapsed during the execution of this job.
*
* @param duration the duration value to set.
* @return the AzureIaaSvmJob object itself.
*/ | Set the duration property: Time elapsed during the execution of this job | withDuration | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/recoveryservicesbackup/azure-resourcemanager-recoveryservicesbackup/src/main/java/com/azure/resourcemanager/recoveryservicesbackup/models/AzureIaaSvmJob.java",
"license": "mit",
"size": 8193
} | [
"java.time.Duration"
] | import java.time.Duration; | import java.time.*; | [
"java.time"
] | java.time; | 2,450,414 |
public SearchRequest setAssignees(List<String> assignees) {
this.assignees = assignees;
return this;
} | SearchRequest function(List<String> assignees) { this.assignees = assignees; return this; } | /**
* Example value: "admin,usera,__me__"
*/ | Example value: "admin,usera,__me__" | setAssignees | {
"repo_name": "SonarSource/sonarqube",
"path": "sonar-ws/src/main/java/org/sonarqube/ws/client/issues/SearchRequest.java",
"license": "lgpl-3.0",
"size": 13818
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,582,581 |
public JspConfigType<WebAppType<T>> createJspConfig(); | JspConfigType<WebAppType<T>> function(); | /**
* Creates a new <code>jsp-config</code> element
* @return the new created instance of <code>JspConfigType<WebAppType<T>></code>
*/ | Creates a new <code>jsp-config</code> element | createJspConfig | {
"repo_name": "forge/javaee-descriptors",
"path": "api/src/main/java/org/jboss/shrinkwrap/descriptor/api/webapp31/WebAppType.java",
"license": "epl-1.0",
"size": 60822
} | [
"org.jboss.shrinkwrap.descriptor.api.jsp23.JspConfigType"
] | import org.jboss.shrinkwrap.descriptor.api.jsp23.JspConfigType; | import org.jboss.shrinkwrap.descriptor.api.jsp23.*; | [
"org.jboss.shrinkwrap"
] | org.jboss.shrinkwrap; | 2,563,353 |
public static <E> ImmutableList<E> copyOf(Iterable<? extends E> elements) {
checkNotNull(elements); // TODO(kevinb): is this here only for GWT?
return (elements instanceof Collection)
? copyOf(Collections2.cast(elements))
: copyOf(elements.iterator());
}
/**
* Returns an immutable list containing the given elements, in order.
*
* <p>Despite the method name, this method attempts to avoid actually copying
* the data when it is safe to do so. The exact circumstances under which a
* copy will or will not be performed are undocumented and subject to change.
*
* <p>Note that if {@code list} is a {@code List<String>}, then {@code
* ImmutableList.copyOf(list)} returns an {@code ImmutableList<String>}
* containing each of the strings in {@code list}, while
* ImmutableList.of(list)} returns an {@code ImmutableList<List<String>>} | static <E> ImmutableList<E> function(Iterable<? extends E> elements) { checkNotNull(elements); return (elements instanceof Collection) ? copyOf(Collections2.cast(elements)) : copyOf(elements.iterator()); } /** * Returns an immutable list containing the given elements, in order. * * <p>Despite the method name, this method attempts to avoid actually copying * the data when it is safe to do so. The exact circumstances under which a * copy will or will not be performed are undocumented and subject to change. * * <p>Note that if {@code list} is a {@code List<String>}, then { * ImmutableList.copyOf(list)} returns an {@code ImmutableList<String>} * containing each of the strings in {@code list}, while * ImmutableList.of(list)} returns an {@code ImmutableList<List<String>>} | /**
* Returns an immutable list containing the given elements, in order. If
* {@code elements} is a {@link Collection}, this method behaves exactly as
* {@link #copyOf(Collection)}; otherwise, it behaves exactly as {@code
* copyOf(elements.iterator()}.
*
* @throws NullPointerException if any of {@code elements} is null
*/ | Returns an immutable list containing the given elements, in order. If elements is a <code>Collection</code>, this method behaves exactly as <code>#copyOf(Collection)</code>; otherwise, it behaves exactly as copyOf(elements.iterator() | copyOf | {
"repo_name": "mike10004/appengine-imaging",
"path": "gaecompat-awt-imaging/src/common/com/gaecompat/repackaged/com/google/common/collect/ImmutableList.java",
"license": "apache-2.0",
"size": 21972
} | [
"com.gaecompat.repackaged.com.google.common.base.Preconditions",
"java.util.Collection",
"java.util.List"
] | import com.gaecompat.repackaged.com.google.common.base.Preconditions; import java.util.Collection; import java.util.List; | import com.gaecompat.repackaged.com.google.common.base.*; import java.util.*; | [
"com.gaecompat.repackaged",
"java.util"
] | com.gaecompat.repackaged; java.util; | 1,463,753 |
@PUT
@Consumes(MediaType.APPLICATION_JSON)
public void putJson(String content) {
}
| @Consumes(MediaType.APPLICATION_JSON) void function(String content) { } | /**
* PUT method for updating or creating an instance of StudenteResource
* @param content representation for the resource
*/ | PUT method for updating or creating an instance of StudenteResource | putJson | {
"repo_name": "simoleo95/progettoMobile",
"path": "MYUNIVAQ2/src/main/java/gid/myunivaq2/jpa/StudenteResource.java",
"license": "apache-2.0",
"size": 1598
} | [
"javax.ws.rs.Consumes",
"javax.ws.rs.core.MediaType"
] | import javax.ws.rs.Consumes; import javax.ws.rs.core.MediaType; | import javax.ws.rs.*; import javax.ws.rs.core.*; | [
"javax.ws"
] | javax.ws; | 784,273 |
public void createH2Link(String string){
jdbcLink = JdbcLink.createH2Link(string);
}
| void function(String string){ jdbcLink = JdbcLink.createH2Link(string); } | /**
* Create a H2 db {@link JdbcLink}. It is not connected, but driver, connection string, etc. is
* configured.
*
* @param string
*/ | Create a H2 db <code>JdbcLink</code>. It is not connected, but driver, connection string, etc. is configured | createH2Link | {
"repo_name": "elexis/elexis-3-core",
"path": "bundles/ch.elexis.core.data/src/ch/elexis/data/DBConnection.java",
"license": "epl-1.0",
"size": 8728
} | [
"ch.rgw.tools.JdbcLink"
] | import ch.rgw.tools.JdbcLink; | import ch.rgw.tools.*; | [
"ch.rgw.tools"
] | ch.rgw.tools; | 1,385,661 |
public static TenantCache getTenantCache(RegionCoprocessorEnvironment env, ImmutableBytesWritable tenantId) {
GlobalCache globalCache = GlobalCache.getInstance(env);
TenantCache tenantCache = tenantId == null ? globalCache : globalCache.getChildTenantCache(tenantId);
return tenantCache;
} | static TenantCache function(RegionCoprocessorEnvironment env, ImmutableBytesWritable tenantId) { GlobalCache globalCache = GlobalCache.getInstance(env); TenantCache tenantCache = tenantId == null ? globalCache : globalCache.getChildTenantCache(tenantId); return tenantCache; } | /**
* Get the tenant cache associated with the tenantId. If tenantId is not applicable, null may be
* used in which case a global tenant cache is returned.
* @param env the HBase configuration
* @param tenantId the tenant ID or null if not applicable.
* @return TenantCache
*/ | Get the tenant cache associated with the tenantId. If tenantId is not applicable, null may be used in which case a global tenant cache is returned | getTenantCache | {
"repo_name": "chiastic-security/phoenix-for-cloudera",
"path": "phoenix-core/src/main/java/org/apache/phoenix/cache/GlobalCache.java",
"license": "apache-2.0",
"size": 8139
} | [
"org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment",
"org.apache.hadoop.hbase.io.ImmutableBytesWritable"
] | import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; | import org.apache.hadoop.hbase.coprocessor.*; import org.apache.hadoop.hbase.io.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 531,904 |
"application/octet-stream",
"application/xml",
"application/json",
"application/atom+xml",
"application/rdf+xml",
"application/rss+xml",
"application/zip"
})
public ReferenceResource getReference(
@QueryParam("productId") String productId, @QueryParam("refIndex") int refIndex)
throws WebApplicationException {
if (productId == null || productId.trim().equals("")) {
throw new BadRequestException(
ErrorType.BAD_REQUEST_EXCEPTION_REFERENCE_RESOURCE.getErrorType());
}
try {
FileManagerClient client = getContextClient();
Product product = client.getProductById(productId);
List<Reference> references = client.getProductReferences(product);
return new ReferenceResource(
productId, refIndex, references.get(refIndex), getContextWorkingDir());
} catch (Exception e) {
// Just for Logging Purposes
String message = "Unable to find the requested resource.";
LOGGER.log(Level.FINE, message, e);
throw new NotFoundException(e.getMessage());
}
} | STR, STR, STR, STR, STR, STR, STR }) ReferenceResource function( @QueryParam(STR) String productId, @QueryParam(STR) int refIndex) throws WebApplicationException { if (productId == null productId.trim().equals(STRUnable to find the requested resource."; LOGGER.log(Level.FINE, message, e); throw new NotFoundException(e.getMessage()); } } | /**
* Gets an HTTP response that represents a {@link Reference} from a {@link Product} from the file
* manager.
*
* @param productId the ID of the product that the reference belongs to
* @param refIndex the index of the reference within the product's list of references
* @return an HTTP response that represents a {@link Reference} from a {@link Product} from the
* file manager
*/ | Gets an HTTP response that represents a <code>Reference</code> from a <code>Product</code> from the file manager | getReference | {
"repo_name": "apache/oodt",
"path": "webapp/fmprod/src/main/java/org/apache/oodt/cas/product/jaxrs/services/CasProductJaxrsService.java",
"license": "apache-2.0",
"size": 13080
} | [
"java.util.logging.Level",
"javax.ws.rs.QueryParam",
"javax.ws.rs.WebApplicationException",
"org.apache.oodt.cas.product.jaxrs.exceptions.NotFoundException",
"org.apache.oodt.cas.product.jaxrs.resources.ReferenceResource"
] | import java.util.logging.Level; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import org.apache.oodt.cas.product.jaxrs.exceptions.NotFoundException; import org.apache.oodt.cas.product.jaxrs.resources.ReferenceResource; | import java.util.logging.*; import javax.ws.rs.*; import org.apache.oodt.cas.product.jaxrs.exceptions.*; import org.apache.oodt.cas.product.jaxrs.resources.*; | [
"java.util",
"javax.ws",
"org.apache.oodt"
] | java.util; javax.ws; org.apache.oodt; | 2,903,817 |
public static Class<?> resolveType(
final Type genericType,
final Map<TypeVariable<?>, Type> typeVariableMap) {
final Type rawType = getRawType(genericType, typeVariableMap);
return (rawType instanceof Class ? (Class<?>) rawType : Object.class);
} | static Class<?> function( final Type genericType, final Map<TypeVariable<?>, Type> typeVariableMap) { final Type rawType = getRawType(genericType, typeVariableMap); return (rawType instanceof Class ? (Class<?>) rawType : Object.class); } | /**
* Resolve the specified generic type against the given TypeVariable map.
*
* @param genericType the generic type to resolve
* @param typeVariableMap the TypeVariable Map to resolved against
* @return the type if it resolves to a Class, or <code>Object.class</code> otherwise
*/ | Resolve the specified generic type against the given TypeVariable map | resolveType | {
"repo_name": "spohnan/geowave",
"path": "core/store/src/main/java/org/locationtech/geowave/core/store/util/GenericTypeResolver.java",
"license": "apache-2.0",
"size": 13525
} | [
"java.lang.reflect.Type",
"java.lang.reflect.TypeVariable",
"java.util.Map"
] | import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.util.Map; | import java.lang.reflect.*; import java.util.*; | [
"java.lang",
"java.util"
] | java.lang; java.util; | 2,374,506 |
static void logUnifiedConsentPreviousEnabledState(boolean wasPreviouslyUndecided) {
RecordHistogram.recordBooleanHistogram(
"Search.ContextualSearch.UnifiedConsent.PreviouslyUndecided",
wasPreviouslyUndecided);
} | static void logUnifiedConsentPreviousEnabledState(boolean wasPreviouslyUndecided) { RecordHistogram.recordBooleanHistogram( STR, wasPreviouslyUndecided); } | /**
* Logs the previous enabled-state of this user before the feature was turned full-on for
* Unified Consent (when integration is enabled).
* @param wasPreviouslyUndecided Whether the user was previously undecided.
*/ | Logs the previous enabled-state of this user before the feature was turned full-on for Unified Consent (when integration is enabled) | logUnifiedConsentPreviousEnabledState | {
"repo_name": "chromium/chromium",
"path": "chrome/android/java/src/org/chromium/chrome/browser/contextualsearch/ContextualSearchUma.java",
"license": "bsd-3-clause",
"size": 83424
} | [
"org.chromium.base.metrics.RecordHistogram"
] | import org.chromium.base.metrics.RecordHistogram; | import org.chromium.base.metrics.*; | [
"org.chromium.base"
] | org.chromium.base; | 369,174 |
@Override public void exitBinaryCriterion(@NotNull CriterionParser.BinaryCriterionContext ctx) { } | @Override public void exitBinaryCriterion(@NotNull CriterionParser.BinaryCriterionContext ctx) { } | /**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/ | The default implementation does nothing | enterBinaryCriterion | {
"repo_name": "Haixing-Hu/criteria",
"path": "src/main/java/com/github/haixing_hu/criteria/parser/sql/CriterionBaseListener.java",
"license": "apache-2.0",
"size": 5111
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 2,615,106 |
public List<ActiveRuleDto> selectByRule(DbSession dbSession, RuleDto rule) {
Preconditions.checkNotNull(rule.getId(), RULE_IS_NOT_PERSISTED);
return mapper(dbSession).selectByRuleId(rule.getId());
} | List<ActiveRuleDto> function(DbSession dbSession, RuleDto rule) { Preconditions.checkNotNull(rule.getId(), RULE_IS_NOT_PERSISTED); return mapper(dbSession).selectByRuleId(rule.getId()); } | /**
* Finder methods for Rules
*/ | Finder methods for Rules | selectByRule | {
"repo_name": "joansmith/sonarqube",
"path": "server/sonar-server/src/main/java/org/sonar/server/qualityprofile/db/ActiveRuleDao.java",
"license": "lgpl-3.0",
"size": 9007
} | [
"com.google.common.base.Preconditions",
"java.util.List",
"org.sonar.db.DbSession",
"org.sonar.db.qualityprofile.ActiveRuleDto",
"org.sonar.db.rule.RuleDto"
] | import com.google.common.base.Preconditions; import java.util.List; import org.sonar.db.DbSession; import org.sonar.db.qualityprofile.ActiveRuleDto; import org.sonar.db.rule.RuleDto; | import com.google.common.base.*; import java.util.*; import org.sonar.db.*; import org.sonar.db.qualityprofile.*; import org.sonar.db.rule.*; | [
"com.google.common",
"java.util",
"org.sonar.db"
] | com.google.common; java.util; org.sonar.db; | 2,677,649 |
public static boolean hasJavaDoc(ProgramElementDoc elementDoc) {
String methodDoc = elementDoc.commentText();
return (methodDoc != null && methodDoc.length() != 0);
} | static boolean function(ProgramElementDoc elementDoc) { String methodDoc = elementDoc.commentText(); return (methodDoc != null && methodDoc.length() != 0); } | /**
* Returns true of the {@link MethodDoc} has JavaDoc documentation.
*
* @param elementDoc method to check.
* @return true if the JavaDoc is present, false if missing.
*/ | Returns true of the <code>MethodDoc</code> has JavaDoc documentation | hasJavaDoc | {
"repo_name": "egonw/ojdcheck",
"path": "com.github.ojdcheck/src/main/java/com/github/ojdcheck/util/JavaDocHelper.java",
"license": "bsd-3-clause",
"size": 2641
} | [
"com.sun.javadoc.ProgramElementDoc"
] | import com.sun.javadoc.ProgramElementDoc; | import com.sun.javadoc.*; | [
"com.sun.javadoc"
] | com.sun.javadoc; | 88,571 |
public void save(String fileName) throws IOException, COSVisitorException {
save(new File(fileName));
} | void function(String fileName) throws IOException, COSVisitorException { save(new File(fileName)); } | /**
* Save the document to a file.
*
* @param fileName The file to save as.
*
* @throws IOException If there is an error saving the document.
* @throws COSVisitorException If an error occurs while generating the data.
*/ | Save the document to a file | save | {
"repo_name": "sencko/NALB",
"path": "nalb2013/src/org/apache/pdfbox/pdmodel/PDDocument.java",
"license": "gpl-2.0",
"size": 52170
} | [
"java.io.File",
"java.io.IOException",
"org.apache.pdfbox.exceptions.COSVisitorException"
] | import java.io.File; import java.io.IOException; import org.apache.pdfbox.exceptions.COSVisitorException; | import java.io.*; import org.apache.pdfbox.exceptions.*; | [
"java.io",
"org.apache.pdfbox"
] | java.io; org.apache.pdfbox; | 1,387,779 |
@Test
public void verifyApply() {
try {
this.policy.apply(this.crl);
if (this.expected != null) {
Assert.fail("Expected exception of type " + this.expected.getClass());
}
} catch (final GeneralSecurityException e) {
if (this.expected == null) {
e.printStackTrace();
Assert.fail("Revocation check failed unexpectedly with exception: " + e);
} else {
final Class<?> expectedClass = this.expected.getClass();
final Class<?> actualClass = e.getClass();
Assert.assertTrue(
String.format("Expected exception of type %s but got %s", expectedClass, actualClass),
expectedClass.isAssignableFrom(actualClass));
}
}
} | void function() { try { this.policy.apply(this.crl); if (this.expected != null) { Assert.fail(STR + this.expected.getClass()); } } catch (final GeneralSecurityException e) { if (this.expected == null) { e.printStackTrace(); Assert.fail(STR + e); } else { final Class<?> expectedClass = this.expected.getClass(); final Class<?> actualClass = e.getClass(); Assert.assertTrue( String.format(STR, expectedClass, actualClass), expectedClass.isAssignableFrom(actualClass)); } } } | /**
* Test method for {@link ThresholdExpiredCRLRevocationPolicy#apply(java.security.cert.X509CRL)}.
*/ | Test method for <code>ThresholdExpiredCRLRevocationPolicy#apply(java.security.cert.X509CRL)</code> | verifyApply | {
"repo_name": "vydra/cas",
"path": "support/cas-server-support-x509/src/test/java/org/apereo/cas/adaptors/x509/authentication/handler/support/ThresholdExpiredCRLRevocationPolicyTests.java",
"license": "apache-2.0",
"size": 4826
} | [
"java.security.GeneralSecurityException",
"org.junit.Assert"
] | import java.security.GeneralSecurityException; import org.junit.Assert; | import java.security.*; import org.junit.*; | [
"java.security",
"org.junit"
] | java.security; org.junit; | 801,080 |
public boolean storesUpperCaseIdentifiers() throws SQLException {
return false;
} | boolean function() throws SQLException { return false; } | /**
* Does the database store mixed case unquoted SQL identifiers in upper
* case?
*
* @return true if so
* @throws SQLException
* DOCUMENT ME!
*/ | Does the database store mixed case unquoted SQL identifiers in upper case | storesUpperCaseIdentifiers | {
"repo_name": "shubhanshu-gupta/Apache-Solr",
"path": "example/solr/collection1/lib/mysql-connector-java-5.1.32/src/com/mysql/jdbc/DatabaseMetaData.java",
"license": "apache-2.0",
"size": 287958
} | [
"java.sql.SQLException"
] | import java.sql.SQLException; | import java.sql.*; | [
"java.sql"
] | java.sql; | 371,062 |
public boolean configure( StaplerRequest req, JSONObject json ) throws FormException {
// compatibility
return configure(req);
} | boolean function( StaplerRequest req, JSONObject json ) throws FormException { return configure(req); } | /**
* Invoked when the global configuration page is submitted.
*
* Can be overriden to store descriptor-specific information.
*
* @param json
* The JSON object that captures the configuration data for this {@link Descriptor}.
* See http://hudson.gotdns.com/wiki/display/HUDSON/Structured+Form+Submission
* @return false
* to keep the client in the same config page.
*/ | Invoked when the global configuration page is submitted. Can be overriden to store descriptor-specific information | configure | {
"repo_name": "fujibee/hudson",
"path": "core/src/main/java/hudson/model/Descriptor.java",
"license": "mit",
"size": 24569
} | [
"net.sf.json.JSONObject",
"org.kohsuke.stapler.StaplerRequest"
] | import net.sf.json.JSONObject; import org.kohsuke.stapler.StaplerRequest; | import net.sf.json.*; import org.kohsuke.stapler.*; | [
"net.sf.json",
"org.kohsuke.stapler"
] | net.sf.json; org.kohsuke.stapler; | 2,661,549 |
EAttribute getCPUUtilization_Wait(); | EAttribute getCPUUtilization_Wait(); | /**
* Returns the meta object for the attribute '{@link kieker.tools.slastic.metamodel.monitoring.CPUUtilization#getWait <em>Wait</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the attribute '<em>Wait</em>'.
* @see kieker.tools.slastic.metamodel.monitoring.CPUUtilization#getWait()
* @see #getCPUUtilization()
* @generated
*/ | Returns the meta object for the attribute '<code>kieker.tools.slastic.metamodel.monitoring.CPUUtilization#getWait Wait</code>'. | getCPUUtilization_Wait | {
"repo_name": "SLAsticSPE/slastic",
"path": "src-gen/kieker/tools/slastic/metamodel/monitoring/MonitoringPackage.java",
"license": "apache-2.0",
"size": 40668
} | [
"org.eclipse.emf.ecore.EAttribute"
] | import org.eclipse.emf.ecore.EAttribute; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 351,450 |
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<ClusterInner>> listByResourceGroupSinglePageAsync(String resourceGroupName) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.listByResourceGroup(
this.client.getEndpoint(),
resourceGroupName,
this.client.getApiVersion(),
this.client.getSubscriptionId(),
accept,
context))
.<PagedResponse<ClusterInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
} | @ServiceMethod(returns = ReturnType.SINGLE) Mono<PagedResponse<ClusterInner>> function(String resourceGroupName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( STR)); } final String accept = STR; return FluxUtil .withContext( context -> service .listByResourceGroup( this.client.getEndpoint(), resourceGroupName, this.client.getApiVersion(), this.client.getSubscriptionId(), accept, context)) .<PagedResponse<ClusterInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } | /**
* Gets Log Analytics clusters in a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return log Analytics clusters in a resource group.
*/ | Gets Log Analytics clusters in a resource group | listByResourceGroupSinglePageAsync | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/loganalytics/azure-resourcemanager-loganalytics/src/main/java/com/azure/resourcemanager/loganalytics/implementation/ClustersClientImpl.java",
"license": "mit",
"size": 69008
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.PagedResponse",
"com.azure.core.http.rest.PagedResponseBase",
"com.azure.core.util.FluxUtil",
"com.azure.resourcemanager.loganalytics.fluent.models.ClusterInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.util.FluxUtil; import com.azure.resourcemanager.loganalytics.fluent.models.ClusterInner; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.loganalytics.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 259,107 |
private static boolean compareBranch(
MapRecord before, MapRecord after, NodeStateDiff diff) {
MapRecord[] beforeBuckets = before.getBuckets();
MapRecord[] afterBuckets = after.getBuckets();
for (int i = 0; i < BUCKETS_PER_LEVEL; i++) {
if (Objects.equal(beforeBuckets[i], afterBuckets[i])) {
// these buckets are equal (or both empty), so no changes
} else if (beforeBuckets[i] == null) {
// before bucket is empty, so all after entries were added
MapRecord bucket = afterBuckets[i];
for (MapEntry entry : bucket.getEntries()) {
if (!diff.childNodeAdded(
entry.getName(), entry.getNodeState())) {
return false;
}
}
} else if (afterBuckets[i] == null) {
// after bucket is empty, so all before entries were deleted
MapRecord bucket = beforeBuckets[i];
for (MapEntry entry : bucket.getEntries()) {
if (!diff.childNodeDeleted(
entry.getName(), entry.getNodeState())) {
return false;
}
}
} else {
// both before and after buckets exist; compare recursively
MapRecord beforeBucket = beforeBuckets[i];
MapRecord afterBucket = afterBuckets[i];
if (!afterBucket.compare(beforeBucket, diff)) {
return false;
}
}
}
return true;
} | static boolean function( MapRecord before, MapRecord after, NodeStateDiff diff) { MapRecord[] beforeBuckets = before.getBuckets(); MapRecord[] afterBuckets = after.getBuckets(); for (int i = 0; i < BUCKETS_PER_LEVEL; i++) { if (Objects.equal(beforeBuckets[i], afterBuckets[i])) { } else if (beforeBuckets[i] == null) { MapRecord bucket = afterBuckets[i]; for (MapEntry entry : bucket.getEntries()) { if (!diff.childNodeAdded( entry.getName(), entry.getNodeState())) { return false; } } } else if (afterBuckets[i] == null) { MapRecord bucket = beforeBuckets[i]; for (MapEntry entry : bucket.getEntries()) { if (!diff.childNodeDeleted( entry.getName(), entry.getNodeState())) { return false; } } } else { MapRecord beforeBucket = beforeBuckets[i]; MapRecord afterBucket = afterBuckets[i]; if (!afterBucket.compare(beforeBucket, diff)) { return false; } } } return true; } | /**
* Compares two map branches. Given the way the comparison algorithm
* works, the branches are always guaranteed to be at the same level
* with the same hash prefixes.
*/ | Compares two map branches. Given the way the comparison algorithm works, the branches are always guaranteed to be at the same level with the same hash prefixes | compareBranch | {
"repo_name": "bdelacretaz/jackrabbit-oak",
"path": "oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/MapRecord.java",
"license": "apache-2.0",
"size": 22774
} | [
"com.google.common.base.Objects",
"org.apache.jackrabbit.oak.spi.state.NodeStateDiff"
] | import com.google.common.base.Objects; import org.apache.jackrabbit.oak.spi.state.NodeStateDiff; | import com.google.common.base.*; import org.apache.jackrabbit.oak.spi.state.*; | [
"com.google.common",
"org.apache.jackrabbit"
] | com.google.common; org.apache.jackrabbit; | 743,930 |
public byte[] getHash(byte[] b) throws NoSuchAlgorithmException,
NoSuchProviderException {
return sha1Util.getHash(b);
}
| byte[] function(byte[] b) throws NoSuchAlgorithmException, NoSuchProviderException { return sha1Util.getHash(b); } | /**
* Computes the SHA-1 hash code of a byte array as a hex string.
*
* @param b
* the bytes to hash
* @return the hash value
* @exception NoSuchAlgorithmException
* if the algorithm is not available from the provide
* @exception NoSuchProviderException
* if the provider is not available in the environment
*/ | Computes the SHA-1 hash code of a byte array as a hex string | getHash | {
"repo_name": "abecquereau/awake-file",
"path": "src-main/org/kawanfw/commons/api/server/util/Sha1.java",
"license": "lgpl-2.1",
"size": 4401
} | [
"java.security.NoSuchAlgorithmException",
"java.security.NoSuchProviderException"
] | import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; | import java.security.*; | [
"java.security"
] | java.security; | 1,267,758 |
public void configureRangeAxes() {
for (int i = 0; i < this.rangeAxes.size(); i++) {
ValueAxis axis = (ValueAxis) this.rangeAxes.get(i);
if (axis != null) {
axis.configure();
}
}
} | void function() { for (int i = 0; i < this.rangeAxes.size(); i++) { ValueAxis axis = (ValueAxis) this.rangeAxes.get(i); if (axis != null) { axis.configure(); } } } | /**
* Configures the range axes.
*/ | Configures the range axes | configureRangeAxes | {
"repo_name": "ibestvina/multithread-centiscape",
"path": "CentiScaPe2.1/src/main/java/org/jfree/chart/plot/CategoryPlot.java",
"license": "mit",
"size": 135372
} | [
"org.jfree.chart.axis.ValueAxis"
] | import org.jfree.chart.axis.ValueAxis; | import org.jfree.chart.axis.*; | [
"org.jfree.chart"
] | org.jfree.chart; | 1,342,502 |
private void processNewPort(IOFSwitch sw, OFPort portnum) {
if ( sw != null ) {
sendDiscoveryMessage(sw, portnum, true, false);
// Add to maintenance queue to ensure that BDDP packets
// are sent out.
addToMaintenanceQueue( new NodePortTuple(sw.getId(), portnum) );
}
} | void function(IOFSwitch sw, OFPort portnum) { if ( sw != null ) { sendDiscoveryMessage(sw, portnum, true, false); addToMaintenanceQueue( new NodePortTuple(sw.getId(), portnum) ); } } | /**
* Process a new port.
* (Send LLDP message. Add the port to quarantine)
*
* @param sw
* @param portnum
*/ | Process a new port. (Send LLDP message. Add the port to quarantine) | processNewPort | {
"repo_name": "openiris/IRIS",
"path": "Torpedo/src/etri/sdn/controller/module/linkdiscovery/OFMLinkDiscovery.java",
"license": "apache-2.0",
"size": 38285
} | [
"org.projectfloodlight.openflow.types.OFPort"
] | import org.projectfloodlight.openflow.types.OFPort; | import org.projectfloodlight.openflow.types.*; | [
"org.projectfloodlight.openflow"
] | org.projectfloodlight.openflow; | 191,520 |
public static Node newQNameDeclaration(
AbstractCompiler compiler, String name, Node value, JSDocInfo info) {
Node result;
Node nameNode = newQName(compiler, name);
if (nameNode.isName()) {
result = IR.var(nameNode, value);
result.setJSDocInfo(info);
} else if (value != null) {
result = IR.exprResult(IR.assign(nameNode, value));
result.getFirstChild().setJSDocInfo(info);
} else {
result = IR.exprResult(nameNode);
result.getFirstChild().setJSDocInfo(info);
}
return result;
} | static Node function( AbstractCompiler compiler, String name, Node value, JSDocInfo info) { Node result; Node nameNode = newQName(compiler, name); if (nameNode.isName()) { result = IR.var(nameNode, value); result.setJSDocInfo(info); } else if (value != null) { result = IR.exprResult(IR.assign(nameNode, value)); result.getFirstChild().setJSDocInfo(info); } else { result = IR.exprResult(nameNode); result.getFirstChild().setJSDocInfo(info); } return result; } | /**
* Creates a node representing a qualified name.
*
* @param name A qualified name (e.g. "foo" or "foo.bar.baz")
* @return A NAME or GETPROP node
*/ | Creates a node representing a qualified name | newQNameDeclaration | {
"repo_name": "robbert/closure-compiler",
"path": "src/com/google/javascript/jscomp/NodeUtil.java",
"license": "apache-2.0",
"size": 114790
} | [
"com.google.javascript.rhino.IR",
"com.google.javascript.rhino.JSDocInfo",
"com.google.javascript.rhino.Node"
] | import com.google.javascript.rhino.IR; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; | import com.google.javascript.rhino.*; | [
"com.google.javascript"
] | com.google.javascript; | 1,808,624 |
IOHub result = new IOHub(executor);
executor.execute(result);
LOGGER.log(Level.FINE, "Starting an additional Selector wakeup thread. See JENKINS-47965 for more information.");
executor.execute(new IOHubSelectorWatcher(result));
return result;
}
/**
* {@inheritDoc} | IOHub result = new IOHub(executor); executor.execute(result); LOGGER.log(Level.FINE, STR); executor.execute(new IOHubSelectorWatcher(result)); return result; } /** * {@inheritDoc} | /**
* Creates and starts a new {@link IOHub} instance.
*
* @param executor the {@link Executor} to use for running tasks.
* @return the new hub.
* @throws IOException if the hub's {@link Selector} cannot be opened.
*/ | Creates and starts a new <code>IOHub</code> instance | create | {
"repo_name": "jenkinsci/remoting",
"path": "src/main/java/org/jenkinsci/remoting/protocol/IOHub.java",
"license": "mit",
"size": 38666
} | [
"java.util.logging.Level"
] | import java.util.logging.Level; | import java.util.logging.*; | [
"java.util"
] | java.util; | 2,320,406 |
private static List<AbstractFile> findVirtualMachineFiles(Content dataSource) throws TskCoreException, NoCurrentCaseException {
List<AbstractFile> vmFiles = new ArrayList<>();
for (String vmExtension : GeneralFilter.VIRTUAL_MACHINE_EXTS) {
String searchString = "%" + vmExtension; // want a search string that looks like this "%.vmdk"
vmFiles.addAll(Case.getCurrentCaseThrows().getServices().getFileManager().findFiles(dataSource, searchString));
}
return vmFiles;
} | static List<AbstractFile> function(Content dataSource) throws TskCoreException, NoCurrentCaseException { List<AbstractFile> vmFiles = new ArrayList<>(); for (String vmExtension : GeneralFilter.VIRTUAL_MACHINE_EXTS) { String searchString = "%" + vmExtension; vmFiles.addAll(Case.getCurrentCaseThrows().getServices().getFileManager().findFiles(dataSource, searchString)); } return vmFiles; } | /**
* Locate all supported virtual machine files, if any, contained in a data
* source.
*
* @param dataSource The data source.
*
* @return A list of virtual machine files, possibly empty.
*
* @throws TskCoreException if there is a problem querying the case
* database.
*/ | Locate all supported virtual machine files, if any, contained in a data source | findVirtualMachineFiles | {
"repo_name": "APriestman/autopsy",
"path": "Core/src/org/sleuthkit/autopsy/modules/vmextractor/VMExtractorIngestModule.java",
"license": "apache-2.0",
"size": 18981
} | [
"java.util.ArrayList",
"java.util.List",
"org.sleuthkit.autopsy.casemodule.Case",
"org.sleuthkit.autopsy.casemodule.GeneralFilter",
"org.sleuthkit.autopsy.casemodule.NoCurrentCaseException",
"org.sleuthkit.datamodel.AbstractFile",
"org.sleuthkit.datamodel.Content",
"org.sleuthkit.datamodel.TskCoreException"
] | import java.util.ArrayList; import java.util.List; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.GeneralFilter; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.TskCoreException; | import java.util.*; import org.sleuthkit.autopsy.casemodule.*; import org.sleuthkit.datamodel.*; | [
"java.util",
"org.sleuthkit.autopsy",
"org.sleuthkit.datamodel"
] | java.util; org.sleuthkit.autopsy; org.sleuthkit.datamodel; | 2,537,827 |
public ArrayList<LatLng> getGeometryObject() {
return mCoordinates;
} | ArrayList<LatLng> function() { return mCoordinates; } | /**
* Gets the coordinates
*
* @return ArrayList of LatLng
*/ | Gets the coordinates | getGeometryObject | {
"repo_name": "josegury/AndroidMarkerClusteringMaps",
"path": "MarkerClusteringMaps/app/src/main/java/joseangelpardo/markerclusteringmaps/libreryMaps/kml/KmlLineString.java",
"license": "apache-2.0",
"size": 1345
} | [
"com.google.android.gms.maps.model.LatLng",
"java.util.ArrayList"
] | import com.google.android.gms.maps.model.LatLng; import java.util.ArrayList; | import com.google.android.gms.maps.model.*; import java.util.*; | [
"com.google.android",
"java.util"
] | com.google.android; java.util; | 636,473 |
public Observable<ServiceResponse<VirtualNetworkTapInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String tapName, VirtualNetworkTapInner parameters) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (tapName == null) {
throw new IllegalArgumentException("Parameter tapName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
Validator.validate(parameters);
final String apiVersion = "2020-06-01";
Observable<Response<ResponseBody>> observable = service.createOrUpdate(resourceGroupName, tapName, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<VirtualNetworkTapInner>() { }.getType());
} | Observable<ServiceResponse<VirtualNetworkTapInner>> function(String resourceGroupName, String tapName, VirtualNetworkTapInner parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (tapName == null) { throw new IllegalArgumentException(STR); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (parameters == null) { throw new IllegalArgumentException(STR); } Validator.validate(parameters); final String apiVersion = STR; Observable<Response<ResponseBody>> observable = service.createOrUpdate(resourceGroupName, tapName, this.client.subscriptionId(), parameters, apiVersion, this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<VirtualNetworkTapInner>() { }.getType()); } | /**
* Creates or updates a Virtual Network Tap.
*
* @param resourceGroupName The name of the resource group.
* @param tapName The name of the virtual network tap.
* @param parameters Parameters supplied to the create or update virtual network tap operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/ | Creates or updates a Virtual Network Tap | createOrUpdateWithServiceResponseAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2020_06_01/src/main/java/com/microsoft/azure/management/network/v2020_06_01/implementation/VirtualNetworkTapsInner.java",
"license": "mit",
"size": 64262
} | [
"com.google.common.reflect.TypeToken",
"com.microsoft.rest.ServiceResponse",
"com.microsoft.rest.Validator"
] | import com.google.common.reflect.TypeToken; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator; | import com.google.common.reflect.*; import com.microsoft.rest.*; | [
"com.google.common",
"com.microsoft.rest"
] | com.google.common; com.microsoft.rest; | 1,695,839 |
public Flow<JobXMLDescriptor> getOrCreateFlow()
{
List<Node> nodeList = model.get("flow");
if (nodeList != null && nodeList.size() > 0)
{
return new FlowImpl<JobXMLDescriptor>(this, "flow", model, nodeList.get(0));
}
return createFlow();
} | Flow<JobXMLDescriptor> function() { List<Node> nodeList = model.get("flow"); if (nodeList != null && nodeList.size() > 0) { return new FlowImpl<JobXMLDescriptor>(this, "flow", model, nodeList.get(0)); } return createFlow(); } | /**
* If not already created, a new <code>flow</code> element will be created and returned.
* Otherwise, the first existing <code>flow</code> element will be returned.
* @return the instance defined for the element <code>flow</code>
*/ | If not already created, a new <code>flow</code> element will be created and returned. Otherwise, the first existing <code>flow</code> element will be returned | getOrCreateFlow | {
"repo_name": "forge/javaee-descriptors",
"path": "impl/src/main/java/org/jboss/shrinkwrap/descriptor/impl/jobXML10/JobXMLDescriptorImpl.java",
"license": "epl-1.0",
"size": 18396
} | [
"java.util.List",
"org.jboss.shrinkwrap.descriptor.api.jobXML10.Flow",
"org.jboss.shrinkwrap.descriptor.api.jobXML10.JobXMLDescriptor",
"org.jboss.shrinkwrap.descriptor.spi.node.Node"
] | import java.util.List; import org.jboss.shrinkwrap.descriptor.api.jobXML10.Flow; import org.jboss.shrinkwrap.descriptor.api.jobXML10.JobXMLDescriptor; import org.jboss.shrinkwrap.descriptor.spi.node.Node; | import java.util.*; import org.jboss.shrinkwrap.descriptor.api.*; import org.jboss.shrinkwrap.descriptor.spi.node.*; | [
"java.util",
"org.jboss.shrinkwrap"
] | java.util; org.jboss.shrinkwrap; | 150,131 |
@Test
public void subTreeBytesShouldBeCorrectWithAddition() throws Http2Exception {
// Block the connection
exhaustStreamWindow(CONNECTION_STREAM_ID);
Http2Stream stream0 = connection.connectionStream();
Http2Stream streamA = connection.stream(STREAM_A);
Http2Stream streamB = connection.stream(STREAM_B);
Http2Stream streamC = connection.stream(STREAM_C);
Http2Stream streamD = connection.stream(STREAM_D);
Http2Stream streamE = connection.local().createStream(STREAM_E, false);
streamE.setPriority(STREAM_A, DEFAULT_PRIORITY_WEIGHT, true);
// Send a bunch of data on each stream.
final IntObjectMap<Integer> streamSizes = new IntObjectHashMap<Integer>(4);
streamSizes.put(STREAM_A, 400);
streamSizes.put(STREAM_B, 500);
streamSizes.put(STREAM_C, 600);
streamSizes.put(STREAM_D, 700);
streamSizes.put(STREAM_E, 900);
final ByteBuf[] bufs = { dummyData(streamSizes.get(STREAM_A), 0), dummyData(streamSizes.get(STREAM_B), 0),
dummyData(streamSizes.get(STREAM_C), 0), dummyData(streamSizes.get(STREAM_D), 0),
dummyData(streamSizes.get(STREAM_E), 0) };
try {
send(STREAM_A, bufs[0], 0);
send(STREAM_B, bufs[1], 0);
send(STREAM_C, bufs[2], 0);
send(STREAM_D, bufs[3], 0);
send(STREAM_E, bufs[4], 0);
verifyNoWrite(STREAM_A);
verifyNoWrite(STREAM_B);
verifyNoWrite(STREAM_C);
verifyNoWrite(STREAM_D);
verifyNoWrite(STREAM_E);
OutboundFlowState state = state(stream0);
assertEquals(
calculateStreamSizeSum(streamSizes,
Arrays.asList(STREAM_A, STREAM_B, STREAM_C, STREAM_D, STREAM_E)),
state.priorityBytes());
state = state(streamA);
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_A, STREAM_E, STREAM_C, STREAM_D)),
state.priorityBytes());
state = state(streamB);
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_B)), state.priorityBytes());
state = state(streamC);
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_C)), state.priorityBytes());
state = state(streamD);
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_D)), state.priorityBytes());
state = state(streamE);
assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_E, STREAM_C, STREAM_D)),
state.priorityBytes());
} finally {
manualSafeRelease(bufs);
}
} | void function() throws Http2Exception { exhaustStreamWindow(CONNECTION_STREAM_ID); Http2Stream stream0 = connection.connectionStream(); Http2Stream streamA = connection.stream(STREAM_A); Http2Stream streamB = connection.stream(STREAM_B); Http2Stream streamC = connection.stream(STREAM_C); Http2Stream streamD = connection.stream(STREAM_D); Http2Stream streamE = connection.local().createStream(STREAM_E, false); streamE.setPriority(STREAM_A, DEFAULT_PRIORITY_WEIGHT, true); final IntObjectMap<Integer> streamSizes = new IntObjectHashMap<Integer>(4); streamSizes.put(STREAM_A, 400); streamSizes.put(STREAM_B, 500); streamSizes.put(STREAM_C, 600); streamSizes.put(STREAM_D, 700); streamSizes.put(STREAM_E, 900); final ByteBuf[] bufs = { dummyData(streamSizes.get(STREAM_A), 0), dummyData(streamSizes.get(STREAM_B), 0), dummyData(streamSizes.get(STREAM_C), 0), dummyData(streamSizes.get(STREAM_D), 0), dummyData(streamSizes.get(STREAM_E), 0) }; try { send(STREAM_A, bufs[0], 0); send(STREAM_B, bufs[1], 0); send(STREAM_C, bufs[2], 0); send(STREAM_D, bufs[3], 0); send(STREAM_E, bufs[4], 0); verifyNoWrite(STREAM_A); verifyNoWrite(STREAM_B); verifyNoWrite(STREAM_C); verifyNoWrite(STREAM_D); verifyNoWrite(STREAM_E); OutboundFlowState state = state(stream0); assertEquals( calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_A, STREAM_B, STREAM_C, STREAM_D, STREAM_E)), state.priorityBytes()); state = state(streamA); assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_A, STREAM_E, STREAM_C, STREAM_D)), state.priorityBytes()); state = state(streamB); assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_B)), state.priorityBytes()); state = state(streamC); assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_C)), state.priorityBytes()); state = state(streamD); assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_D)), state.priorityBytes()); state = state(streamE); assertEquals(calculateStreamSizeSum(streamSizes, Arrays.asList(STREAM_E, STREAM_C, STREAM_D)), state.priorityBytes()); } finally { manualSafeRelease(bufs); } } | /**
* In this test, we block all streams and add a node to the priority tree and verify
*
* <pre>
* [0]
* / \
* A B
* / \
* C D
* </pre>
*
* After the tree shift:
*
* <pre>
* [0]
* / \
* A B
* |
* E
* / \
* C D
* </pre>
*/ | In this test, we block all streams and add a node to the priority tree and verify <code> [0] \ A B \ C D </code> After the tree shift: <code> [0] \ A B | E \ C D </code> | subTreeBytesShouldBeCorrectWithAddition | {
"repo_name": "drowning/netty",
"path": "codec-http2/src/test/java/io/netty/handler/codec/http2/DefaultHttp2OutboundFlowControllerTest.java",
"license": "apache-2.0",
"size": 48643
} | [
"io.netty.buffer.ByteBuf",
"io.netty.handler.codec.http2.DefaultHttp2OutboundFlowController",
"io.netty.util.collection.IntObjectHashMap",
"io.netty.util.collection.IntObjectMap",
"java.util.Arrays",
"org.junit.Assert"
] | import io.netty.buffer.ByteBuf; import io.netty.handler.codec.http2.DefaultHttp2OutboundFlowController; import io.netty.util.collection.IntObjectHashMap; import io.netty.util.collection.IntObjectMap; import java.util.Arrays; import org.junit.Assert; | import io.netty.buffer.*; import io.netty.handler.codec.http2.*; import io.netty.util.collection.*; import java.util.*; import org.junit.*; | [
"io.netty.buffer",
"io.netty.handler",
"io.netty.util",
"java.util",
"org.junit"
] | io.netty.buffer; io.netty.handler; io.netty.util; java.util; org.junit; | 1,435,544 |
public Date getUnmodifiedSinceConstraint() {
return unmodifiedSinceConstraint;
} | Date function() { return unmodifiedSinceConstraint; } | /**
* <p>
* Gets the optional unmodified constraint that restricts this
* request to executing only if the source object has <b>not</b> been
* modified after the specified date.
* </p>
* <p>
* The unmodified since constraint may be used with matching ETag
* constraints, but not with any other type of constraint.
* </p>
*
* @return The optional unmodified constraint that restricts this
* request to executing only if the source object has <b>not</b>
* been modified after the specified date.
*/ | Gets the optional unmodified constraint that restricts this request to executing only if the source object has not been modified after the specified date. The unmodified since constraint may be used with matching ETag constraints, but not with any other type of constraint. | getUnmodifiedSinceConstraint | {
"repo_name": "amahule/aws-sdk-for-android",
"path": "src/com/amazonaws/services/s3/model/CopyObjectRequest.java",
"license": "apache-2.0",
"size": 33764
} | [
"java.util.Date"
] | import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 10,097 |
// -----------------------------------------------------------------------------
// ------------------ Start of inherited loading functions ---------------------
// -----------------------------------------------------------------------------
@Override
public void loadDataFromFile()
throws InitializationException {
// Variable declarations
int RatesLoaded = 0;
BufferedReader inFile;
String tmpFileRecord;
String[] RateFields;
int MapsLoaded = 0;
String tmpGroup;
String tmpModel;
String tmpRUM;
String tmpResource;
String tmpRUMType;
String tmpResCtr;
// ****** perform the loading of the model descriptors ******
// inform the user about the start of the price group phase
OpenRate.getOpenRateFrameworkLog().info("Starting RUM Map Data Loading from file for <" + getSymbolicName() + ">");
// Try to open the file
try {
inFile = new BufferedReader(new FileReader(cacheDataFile));
} catch (FileNotFoundException fnfe) {
message = "Not able to read file : <"
+ cacheDataFile + ">. message = <" + fnfe.getMessage() + ">";
OpenRate.getOpenRateFrameworkLog().error(message);
throw new InitializationException(message, getSymbolicName());
}
// File open, now get the stuff
try {
while (inFile.ready()) {
tmpFileRecord = inFile.readLine();
if ((tmpFileRecord.startsWith("#"))
| tmpFileRecord.trim().equals("")) {
// Comment line, ignore
} else {
MapsLoaded++;
RateFields = tmpFileRecord.split(";");
// Prepare and add the line
tmpGroup = RateFields[1];
tmpModel = RateFields[2];
tmpRUM = RateFields[3];
tmpResource = RateFields[4];
tmpRUMType = RateFields[5];
tmpResCtr = RateFields[6];
addRUMMap(tmpGroup, tmpModel, tmpRUM, tmpResource, tmpRUMType, tmpResCtr);
}
}
} catch (IOException ex) {
message = "Error reading input file <" + cacheDataFile
+ "> in record <" + RatesLoaded + ">. IO Error.";
OpenRate.getOpenRateFrameworkLog().fatal(message);
throw new InitializationException(message, getSymbolicName());
} catch (ArrayIndexOutOfBoundsException ex) {
message
= "Error reading input file <" + cacheDataFile
+ "> in record <" + RatesLoaded + ">. Malformed Record.";
OpenRate.getOpenRateFrameworkLog().fatal(message);
throw new InitializationException(message, getSymbolicName());
} finally {
try {
inFile.close();
} catch (IOException ex) {
message = "Error closing input file <" + cacheDataFile
+ ">. message = <" + ex.getMessage() + ">";
OpenRate.getOpenRateFrameworkLog().error(message);
throw new InitializationException(message, getSymbolicName());
}
}
OpenRate.getOpenRateFrameworkLog().info(
"Price Group Data Loading completed. " + MapsLoaded
+ " configuration lines loaded from <" + cacheDataFile
+ ">");
} | void function() throws InitializationException { int RatesLoaded = 0; BufferedReader inFile; String tmpFileRecord; String[] RateFields; int MapsLoaded = 0; String tmpGroup; String tmpModel; String tmpRUM; String tmpResource; String tmpRUMType; String tmpResCtr; OpenRate.getOpenRateFrameworkLog().info(STR + getSymbolicName() + ">"); try { inFile = new BufferedReader(new FileReader(cacheDataFile)); } catch (FileNotFoundException fnfe) { message = STR + cacheDataFile + STR + fnfe.getMessage() + ">"; OpenRate.getOpenRateFrameworkLog().error(message); throw new InitializationException(message, getSymbolicName()); } try { while (inFile.ready()) { tmpFileRecord = inFile.readLine(); if ((tmpFileRecord.startsWith("#")) tmpFileRecord.trim().equals(STR;STRError reading input file <STR> in record <STR>. IO Error.STRError reading input file <STR> in record <STR>. Malformed Record.STRError closing input file <" + cacheDataFile + STR + ex.getMessage() + ">STRPrice Group Data Loading completed. STR configuration lines loaded from <STR>"); } | /**
* Load the data from the defined file
*
* @throws OpenRate.exception.InitializationException
*/ | Load the data from the defined file | loadDataFromFile | {
"repo_name": "petebarnett/OpenRate",
"path": "src/main/java/OpenRate/cache/RUMMapCache.java",
"license": "gpl-2.0",
"size": 18979
} | [
"java.io.BufferedReader",
"java.io.FileNotFoundException",
"java.io.FileReader"
] | import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; | import java.io.*; | [
"java.io"
] | java.io; | 774,876 |
public void setApprovedTotal(final long value) {
this.approvedTotal = value;
}
/**
* Gets the value of the approvedPercentage property.
*
* @return
* possible object is
* {@link BigDecimal } | void function(final long value) { this.approvedTotal = value; } /** * Gets the value of the approvedPercentage property. * * * possible object is * {@link BigDecimal } | /**
* Sets the value of the approvedTotal property.
*
*/ | Sets the value of the approvedTotal property | setApprovedTotal | {
"repo_name": "Hack23/cia",
"path": "model.internal.application.user.impl/src/main/java/com/hack23/cia/model/internal/application/data/committee/impl/ViewRiksdagenVoteDataBallotPartySummaryDaily.java",
"license": "apache-2.0",
"size": 39491
} | [
"java.math.BigDecimal"
] | import java.math.BigDecimal; | import java.math.*; | [
"java.math"
] | java.math; | 1,996,963 |
List<UserAccount> getUserAccountsbyStatus(final Boolean status, final Date beforeDate, final Date afterDate);
| List<UserAccount> getUserAccountsbyStatus(final Boolean status, final Date beforeDate, final Date afterDate); | /**
* Get user account by status.
* @param status
* @param beforeDate
* @param afterDate
* @return
*/ | Get user account by status | getUserAccountsbyStatus | {
"repo_name": "cristiani/encuestame",
"path": "enme-persistence/enme-dao/src/main/java/org/encuestame/persistence/dao/IAccountDao.java",
"license": "apache-2.0",
"size": 9584
} | [
"java.util.Date",
"java.util.List",
"org.encuestame.persistence.domain.security.UserAccount"
] | import java.util.Date; import java.util.List; import org.encuestame.persistence.domain.security.UserAccount; | import java.util.*; import org.encuestame.persistence.domain.security.*; | [
"java.util",
"org.encuestame.persistence"
] | java.util; org.encuestame.persistence; | 1,461,323 |
protected void executeSQL(String sql) {
Statement statement = null;
try {
boolean autoCommit = connection.getAutoCommit();
if (!autoCommit) {
connection.setAutoCommit(true);
}
statement = connection.createStatement();
statement.execute(sql);
if (!autoCommit) {
connection.setAutoCommit(autoCommit);
}
} catch (SQLException e) {
error("Caught " + e.getClass() + " trying: " + sql);
if (statement == null) {
error(analyzeWarnings(connection));
} else {
error(analyzeWarnings(statement));
}
} finally {
if (statement != null) {
try {
statement.close();
} catch (SQLException e) {
// nothing can be done here
error("Error closing statement " + sql);
}
}
}
} | void function(String sql) { Statement statement = null; try { boolean autoCommit = connection.getAutoCommit(); if (!autoCommit) { connection.setAutoCommit(true); } statement = connection.createStatement(); statement.execute(sql); if (!autoCommit) { connection.setAutoCommit(autoCommit); } } catch (SQLException e) { error(STR + e.getClass() + STR + sql); if (statement == null) { error(analyzeWarnings(connection)); } else { error(analyzeWarnings(statement)); } } finally { if (statement != null) { try { statement.close(); } catch (SQLException e) { error(STR + sql); } } } } | /** Execute the sql in its own statement. If the connection is not
* currently autocommit, set autocommit to true and restore it after
* the statement is executed.
* @param sql the sql to execute
*/ | Execute the sql in its own statement. If the connection is not currently autocommit, set autocommit to true and restore it after the statement is executed | executeSQL | {
"repo_name": "myblockchain/myblockchain",
"path": "storage/ndb/clusterj/clusterj-test/src/main/java/testsuite/clusterj/AbstractClusterJTest.java",
"license": "gpl-2.0",
"size": 24628
} | [
"java.sql.SQLException",
"java.sql.Statement"
] | import java.sql.SQLException; import java.sql.Statement; | import java.sql.*; | [
"java.sql"
] | java.sql; | 782,843 |
public Csv nullLiteral(String nullLiteral) {
Preconditions.checkNotNull(nullLiteral);
internalProperties.putString(FORMAT_NULL_LITERAL, nullLiteral);
return this;
} | Csv function(String nullLiteral) { Preconditions.checkNotNull(nullLiteral); internalProperties.putString(FORMAT_NULL_LITERAL, nullLiteral); return this; } | /**
* Sets the null literal string that is interpreted as a null value (disabled by default).
*
* @param nullLiteral null literal (e.g. "null" or "n/a")
*/ | Sets the null literal string that is interpreted as a null value (disabled by default) | nullLiteral | {
"repo_name": "clarkyzl/flink",
"path": "flink-formats/flink-csv/src/main/java/org/apache/flink/table/descriptors/Csv.java",
"license": "apache-2.0",
"size": 7711
} | [
"org.apache.flink.util.Preconditions"
] | import org.apache.flink.util.Preconditions; | import org.apache.flink.util.*; | [
"org.apache.flink"
] | org.apache.flink; | 450,978 |
BoshDeployment deleteBoshDeploymentIfExists(String name);
/**
* Returns certain {@link BoshDirectorTask} with all the {@link Event} initialized to their current state
*
* @param id the identification of certain {@link BoshDirectorTask} | BoshDeployment deleteBoshDeploymentIfExists(String name); /** * Returns certain {@link BoshDirectorTask} with all the {@link Event} initialized to their current state * * @param id the identification of certain {@link BoshDirectorTask} | /**
* Delete certain {@link BoshDeployment} identified by its {@link BoshDeployment#getName()} if still exists.
*
* @param name the {@link BoshDeployment#getName()} we want to delete
* @return the {@link BoshDeployment} to delete
*/ | Delete certain <code>BoshDeployment</code> identified by its <code>BoshDeployment#getName()</code> if still exists | deleteBoshDeploymentIfExists | {
"repo_name": "swisscom/open-service-broker",
"path": "broker/core/bosh-service/src/main/java/com/swisscom/cloud/sb/broker/services/bosh/BoshDirectorService.java",
"license": "apache-2.0",
"size": 4675
} | [
"com.swisscom.cloud.sb.broker.services.bosh.client.BoshDeployment",
"com.swisscom.cloud.sb.broker.services.bosh.client.BoshDirectorTask"
] | import com.swisscom.cloud.sb.broker.services.bosh.client.BoshDeployment; import com.swisscom.cloud.sb.broker.services.bosh.client.BoshDirectorTask; | import com.swisscom.cloud.sb.broker.services.bosh.client.*; | [
"com.swisscom.cloud"
] | com.swisscom.cloud; | 2,099,725 |
private Parser createLookaheadParser() {
return new Parser(config,
new LookaheadErrorReporter(),
this.scanner.getFile(),
this.scanner.getOffset(),
inGeneratorContext());
} | Parser function() { return new Parser(config, new LookaheadErrorReporter(), this.scanner.getFile(), this.scanner.getOffset(), inGeneratorContext()); } | /**
* Forks the parser at the current point and returns a new
* parser for speculative parsing.
*/ | Forks the parser at the current point and returns a new parser for speculative parsing | createLookaheadParser | {
"repo_name": "dushmis/closure-compiler",
"path": "src/com/google/javascript/jscomp/parsing/parser/Parser.java",
"license": "apache-2.0",
"size": 115315
} | [
"com.google.javascript.jscomp.parsing.parser.util.LookaheadErrorReporter"
] | import com.google.javascript.jscomp.parsing.parser.util.LookaheadErrorReporter; | import com.google.javascript.jscomp.parsing.parser.util.*; | [
"com.google.javascript"
] | com.google.javascript; | 1,154,672 |
@Test
public void test11_createNewLocationBoundaryFile() throws ApplicationException{
//Create a location
String locationName = "India";
String filaName = "India.kml";
LocationTypeDto countryLocationTypeDto = createAndSaveLocationType(locationService, "Country", null, true);
LocationDto location = createLocation(locationName, countryLocationTypeDto, null);
LocationDto savedLocation = locationService.saveLocation(location);
final FileService fileService = mock(FileService.class, "fileService");
final InputStream inputStream = mock(InputStream.class, "inputStream");
expect(new Expectations() {{
oneOf (fileService).saveFile(with(any(String.class)), with(any(String.class)), with(any(InputStream.class)));
}});
LocationBoundaryFileDto locationBoundaryFileDto = locationService.createNewLocationBoundaryFile(savedLocation.getId(), filaName, inputStream, fileService);
assertNotNull(locationBoundaryFileDto);
assertNotNull(locationBoundaryFileDto.getId());
assertNotNull(locationBoundaryFileDto.getFileNameAndPath());
assertEquals(savedLocation.getId(), locationBoundaryFileDto.getLocationId());
assertEquals("Pending", locationBoundaryFileDto.getStatus());
} | void function() throws ApplicationException{ String locationName = "India"; String filaName = STR; LocationTypeDto countryLocationTypeDto = createAndSaveLocationType(locationService, STR, null, true); LocationDto location = createLocation(locationName, countryLocationTypeDto, null); LocationDto savedLocation = locationService.saveLocation(location); final FileService fileService = mock(FileService.class, STR); final InputStream inputStream = mock(InputStream.class, STR); expect(new Expectations() {{ oneOf (fileService).saveFile(with(any(String.class)), with(any(String.class)), with(any(InputStream.class))); }}); LocationBoundaryFileDto locationBoundaryFileDto = locationService.createNewLocationBoundaryFile(savedLocation.getId(), filaName, inputStream, fileService); assertNotNull(locationBoundaryFileDto); assertNotNull(locationBoundaryFileDto.getId()); assertNotNull(locationBoundaryFileDto.getFileNameAndPath()); assertEquals(savedLocation.getId(), locationBoundaryFileDto.getLocationId()); assertEquals(STR, locationBoundaryFileDto.getStatus()); } | /**
* Test to upload a LocationBoundary file
* @throws ApplicationException
*/ | Test to upload a LocationBoundary file | test11_createNewLocationBoundaryFile | {
"repo_name": "eswaraj/platform",
"path": "core/src/test/java/com/eswaraj/core/service/impl/TestLocationServiceImpl.java",
"license": "gpl-3.0",
"size": 16835
} | [
"com.eswaraj.core.exceptions.ApplicationException",
"com.eswaraj.core.service.FileService",
"com.eswaraj.web.dto.LocationBoundaryFileDto",
"com.eswaraj.web.dto.LocationDto",
"com.eswaraj.web.dto.LocationTypeDto",
"java.io.InputStream",
"org.jmock.Expectations",
"org.junit.Assert"
] | import com.eswaraj.core.exceptions.ApplicationException; import com.eswaraj.core.service.FileService; import com.eswaraj.web.dto.LocationBoundaryFileDto; import com.eswaraj.web.dto.LocationDto; import com.eswaraj.web.dto.LocationTypeDto; import java.io.InputStream; import org.jmock.Expectations; import org.junit.Assert; | import com.eswaraj.core.exceptions.*; import com.eswaraj.core.service.*; import com.eswaraj.web.dto.*; import java.io.*; import org.jmock.*; import org.junit.*; | [
"com.eswaraj.core",
"com.eswaraj.web",
"java.io",
"org.jmock",
"org.junit"
] | com.eswaraj.core; com.eswaraj.web; java.io; org.jmock; org.junit; | 2,904,995 |
public void registerVerification(String verificationCode)
{
if (PMUtils.isBlankOrNull(verificationCode))
return;
//if there is no device signature we register again (device_register)
//if the device is already verified we go to the inbox
if (PMPreferencesHelper.getDeviceSignature() == null || PMPreferencesHelper.isVerified())
{
registerDevice();
return;
}
PMWebServiceController.device_verification(verificationCode);
}
// public void updateLocation()
// {
// LocationManager locationManager = (LocationManager) mContext
// .getSystemService(Context.LOCATION_SERVICE);
// Criteria criteria = new Criteria();
// String provider = locationManager.getBestProvider(criteria, false);
// Location location = locationManager.getLastKnownLocation(provider);
//
// if (location == null) //low powered solution
// {
// criteria.setAccuracy(Criteria.ACCURACY_COARSE);
// criteria.setPowerRequirement(Criteria.POWER_LOW);
// provider = locationManager.getBestProvider(criteria, false);
// location = locationManager.getLastKnownLocation(provider);
// }
//
// //Log.d(TAG, "Location Updated: " + provider + " : " + location);
//
// if (location == null)
// {
// return;
// }
// PMWebServiceController.device_location_update( location); //fire and forget
// } | void function(String verificationCode) { if (PMUtils.isBlankOrNull(verificationCode)) return; if (PMPreferencesHelper.getDeviceSignature() == null PMPreferencesHelper.isVerified()) { registerDevice(); return; } PMWebServiceController.device_verification(verificationCode); } | /**
* If the notification returned by {@link #registerPhoneNumber} is
* {@link Result#REQUEST_VERIFICATION_CODE} the user should be prompted for
* the verification code this method should be called with the code.
*
* @param verificationCode
* sent via SMS (text)
*/ | If the notification returned by <code>#registerPhoneNumber</code> is <code>Result#REQUEST_VERIFICATION_CODE</code> the user should be prompted for the verification code this method should be called with the code | registerVerification | {
"repo_name": "PanaceaMobile/panacea-mobile-blackberry-sdk",
"path": "PanaceaSDK/src/com/panaceamobile/panacea/sdk/PanaceaSDK.java",
"license": "mit",
"size": 13884
} | [
"com.panaceamobile.panacea.sdk.webservices.PMWebServiceController"
] | import com.panaceamobile.panacea.sdk.webservices.PMWebServiceController; | import com.panaceamobile.panacea.sdk.webservices.*; | [
"com.panaceamobile.panacea"
] | com.panaceamobile.panacea; | 109,501 |
public void remove(SnmpOidTable table) throws SnmpStatusException; | void function(SnmpOidTable table) throws SnmpStatusException; | /**
* Removes an <CODE>SnmpOidTable</CODE> object from this <CODE>SnmpOidDatabase</CODE>.
* @param table The table to be removed.
*/ | Removes an <code>SnmpOidTable</code> object from this <code>SnmpOidDatabase</code> | remove | {
"repo_name": "shun634501730/java_source_cn",
"path": "src_en/com/sun/jmx/snmp/SnmpOidDatabase.java",
"license": "apache-2.0",
"size": 2893
} | [
"com.sun.jmx.snmp.SnmpOidTable",
"com.sun.jmx.snmp.SnmpStatusException"
] | import com.sun.jmx.snmp.SnmpOidTable; import com.sun.jmx.snmp.SnmpStatusException; | import com.sun.jmx.snmp.*; | [
"com.sun.jmx"
] | com.sun.jmx; | 1,131,832 |
public boolean initiateConnection(Socket sock, Long sid) {
DataOutputStream dout = null;
try {
// Sending id and challenge
dout = new DataOutputStream(sock.getOutputStream());
dout.writeLong(self.getId());
dout.flush();
} catch (IOException e) {
LOG.warn("Ignoring exception reading or writing challenge: ", e);
closeSocket(sock);
return false;
}
//客户端和服务端处理方式相同,通过以下的连接处理,每2台选举机器之间只会建立一个选举连接。
// If lost the challenge, then drop the new connection
//如果对方id比自己大,则关闭连接,这样导致的结果就是大id的server才会去连接小id的server,避免连接浪费
if (sid > self.getId()) {
LOG.info("Have smaller server identifier, so dropping the " +
"connection: (" + sid + ", " + self.getId() + ")大id的server才会去连接小id的server,每两台机器只有一个选举连接");
closeSocket(sock);
// Otherwise proceed with the connection
//如果对方id比自己小,则保持连接,并初始化单独的发送和接受线程
} else {
SendWorker sw = new SendWorker(sock, sid);
RecvWorker rw = new RecvWorker(sock, sid, sw);
sw.setRecv(rw);
SendWorker vsw = senderWorkerMap.get(sid);
if(vsw != null)
vsw.finish();
senderWorkerMap.put(sid, sw);
if (!queueSendMap.containsKey(sid)) {
queueSendMap.put(sid, new ArrayBlockingQueue<ByteBuffer>(
SEND_CAPACITY));
}
sw.start();
rw.start();
return true;
}
return false;
}
| boolean function(Socket sock, Long sid) { DataOutputStream dout = null; try { dout = new DataOutputStream(sock.getOutputStream()); dout.writeLong(self.getId()); dout.flush(); } catch (IOException e) { LOG.warn(STR, e); closeSocket(sock); return false; } if (sid > self.getId()) { LOG.info(STR + STR + sid + STR + self.getId() + STR); closeSocket(sock); } else { SendWorker sw = new SendWorker(sock, sid); RecvWorker rw = new RecvWorker(sock, sid, sw); sw.setRecv(rw); SendWorker vsw = senderWorkerMap.get(sid); if(vsw != null) vsw.finish(); senderWorkerMap.put(sid, sw); if (!queueSendMap.containsKey(sid)) { queueSendMap.put(sid, new ArrayBlockingQueue<ByteBuffer>( SEND_CAPACITY)); } sw.start(); rw.start(); return true; } return false; } | /**
* If this server has initiated the connection, then it gives up on the
* connection if it loses challenge. Otherwise, it keeps the connection.
*/ | If this server has initiated the connection, then it gives up on the connection if it loses challenge. Otherwise, it keeps the connection | initiateConnection | {
"repo_name": "CliffYuan/zookeeper",
"path": "src/java/main/org/apache/zookeeper/server/quorum/QuorumCnxManager.java",
"license": "apache-2.0",
"size": 32259
} | [
"java.io.DataOutputStream",
"java.io.IOException",
"java.net.Socket",
"java.nio.ByteBuffer",
"java.util.concurrent.ArrayBlockingQueue"
] | import java.io.DataOutputStream; import java.io.IOException; import java.net.Socket; import java.nio.ByteBuffer; import java.util.concurrent.ArrayBlockingQueue; | import java.io.*; import java.net.*; import java.nio.*; import java.util.concurrent.*; | [
"java.io",
"java.net",
"java.nio",
"java.util"
] | java.io; java.net; java.nio; java.util; | 264,935 |
final public void setFilterMode(FilterMode filterMode) {
pssmRenderer.setFilterMode(filterMode);
} | final void function(FilterMode filterMode) { pssmRenderer.setFilterMode(filterMode); } | /**
* Sets the filtering mode for shadow edges see {@link FilterMode} for more info
* @param filterMode
*/ | Sets the filtering mode for shadow edges see <code>FilterMode</code> for more info | setFilterMode | {
"repo_name": "zzuegg/jmonkeyengine",
"path": "jme3-core/src/main/java/com/jme3/shadow/PssmShadowFilter.java",
"license": "bsd-3-clause",
"size": 10253
} | [
"com.jme3.shadow.PssmShadowRenderer"
] | import com.jme3.shadow.PssmShadowRenderer; | import com.jme3.shadow.*; | [
"com.jme3.shadow"
] | com.jme3.shadow; | 1,300,554 |
public static List<Query> parse(final String query) {
final ArrayList<Query> parts = new ArrayList<>();
for (String queryPart : query.split(COLON)) {
queryPart = queryPart.trim();
if (queryPart.startsWith("#")) {
parts.add(new Query(QueryType.ById, queryPart.substring(1).split(COMMA)));
} else if (queryPart.startsWith(".")) {
parts.add(new Query(QueryType.ByClass, queryPart.substring(1).split(COMMA)));
} else if (queryPart.equals("*")) {
parts.add(new Query(QueryType.All, ""));
} else {
parts.add(new Query(QueryType.FieldFilter, queryPart.split(COMMA)));
}
}
return parts;
}
public final String[] args;
public final boolean singleton;
public final QueryType typ;
private Query(final QueryType typ, final String... args) {
this.typ = typ;
this.args = args;
singleton = args.length == 1 && typ.singletonPossible;
} | static List<Query> function(final String query) { final ArrayList<Query> parts = new ArrayList<>(); for (String queryPart : query.split(COLON)) { queryPart = queryPart.trim(); if (queryPart.startsWith("#")) { parts.add(new Query(QueryType.ById, queryPart.substring(1).split(COMMA))); } else if (queryPart.startsWith(".")) { parts.add(new Query(QueryType.ByClass, queryPart.substring(1).split(COMMA))); } else if (queryPart.equals("*")) { parts.add(new Query(QueryType.All, "")); } else { parts.add(new Query(QueryType.FieldFilter, queryPart.split(COMMA))); } } return parts; } public final String[] args; public final boolean singleton; public final QueryType typ; private Query(final QueryType typ, final String... args) { this.typ = typ; this.args = args; singleton = args.length == 1 && typ.singletonPossible; } | /**
* parse the query
*
* @param query
* the raw query to parse
* @return a list of queries to apply
*/ | parse the query | parse | {
"repo_name": "jeffrey-io/world-bootstrap",
"path": "src/main/java/io/jeffrey/world/document/Query.java",
"license": "apache-2.0",
"size": 5164
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 875,162 |
protected void generateObjectParams(JspJavaWriter out, boolean isEmbed)
throws Exception
{
for (int i = 0; i < _attrNames.size(); i++) {
String name = _attrNames.get(i);
String value = _attrValues.get(i);
if (name.equals("type") || name.equals("jreversion") ||
name.equals("iepluginurl") || name.equals("nspluginurl") ||
name.equals("code") || name.equals("archive") ||
name.equals("codebase") || name.equals("object"))
continue;
printText(out, " " + name + "=\"");
out.println("out.print(" + generateValue(String.class, value) + ");");
printText(out, "\"");
}
if (! isEmbed)
printText(out, ">\n");
for (int i = 0; i < _attrNames.size(); i++) {
String name = _attrNames.get(i);
String value = _attrValues.get(i);
if (name.equals("archive"))
name = "java_archive";
else if (name.equals("codebase"))
name = "java_codebase";
else if (name.equals("code"))
name = "java_code";
else if (name.equals("object"))
name = "java_object";
else
continue;
if (isEmbed) {
printText(out, " " + name + "=\"");
if (hasRuntimeAttribute(value))
out.println("out.print(" + getRuntimeAttribute(value) + ");");
else
printText(out, value);
printText(out, "\"");
}
else {
printText(out, "<param name=\"" + name + "\" value=\"");
if (hasRuntimeAttribute(value))
out.println("out.print(" + getRuntimeAttribute(value) + ");");
else
printText(out, value);
printText(out, "\">\n");
}
}
if (_params == null)
return;
ArrayList<JspParam> paramList = _params.getParams();
for (int i = 0; i < paramList.size(); i++) {
JspParam param = paramList.get(i);
String name = param.getName();
String value = param.getValue();
if (isEmbed)
printText(out, " " + name + "=\"");
else
printText(out, "<param name=\"" + name + "\" value=\"");
if (hasRuntimeAttribute(value)) {
out.println("out.print(" + getRuntimeAttribute(value) + ");");
}
else
printText(out, value);
if (isEmbed)
printText(out, "\"");
else
printText(out, "\">\n");
}
} | void function(JspJavaWriter out, boolean isEmbed) throws Exception { for (int i = 0; i < _attrNames.size(); i++) { String name = _attrNames.get(i); String value = _attrValues.get(i); if (name.equals("type") name.equals(STR) name.equals(STR) name.equals(STR) name.equals("code") name.equals(STR) name.equals(STR) name.equals(STR)) continue; printText(out, " STR=\"STRout.print(STR);STR\STR>\n"); for (int i = 0; i < _attrNames.size(); i++) { String name = _attrNames.get(i); String value = _attrValues.get(i); if (name.equals(STR)) name = "java_archive"; else if (name.equals(STR)) name = "java_codebaseSTRcodeSTRjava_code"; else if (name.equals(STR)) name = "java_objectSTR STR=\STRout.print(STR);STR\STR<param name=\"STR\" value=\STRout.print(STR);STR\">\n"); } } if (_params == null) return; ArrayList<JspParam> paramList = _params.getParams(); for (int i = 0; i < paramList.size(); i++) { JspParam param = paramList.get(i); String name = param.getName(); String value = param.getValue(); if (isEmbed) printText(out, " STR=\"STR<param name=\"STR\STRSTRout.print(STR);STR\"STR\">\n"); } } | /**
* Generates the parameters for the jsp:plugin object.
*/ | Generates the parameters for the jsp:plugin object | generateObjectParams | {
"repo_name": "WelcomeHUME/svn-caucho-com-resin",
"path": "modules/resin/src/com/caucho/jsp/java/JspPlugin.java",
"license": "gpl-2.0",
"size": 9832
} | [
"java.util.ArrayList"
] | import java.util.ArrayList; | import java.util.*; | [
"java.util"
] | java.util; | 2,065,542 |
public Pair<InflationProviderDiscount, CurveBuildingBlockBundle> makeCurvesFromDerivatives(
final MultiCurveBundle<? extends GeneratorCurve>[] curveBundles,
final InflationProviderDiscount knownData, final CurveBuildingBlockBundle knownBlockBundle,
final LinkedHashMap<String, Currency> discountingMap, final LinkedHashMap<String, IndexON[]> forwardONMap,
final LinkedHashMap<String, IborIndex[]> forwardIborMap, final LinkedHashMap<String, IndexPrice[]> inflationMap,
final InstrumentDerivativeVisitor<ParameterInflationProviderInterface, Double> calculator,
final InstrumentDerivativeVisitor<ParameterInflationProviderInterface, InflationSensitivity> sensitivityCalculator) {
ArgumentChecker.notNull(curveBundles, "curve bundles");
ArgumentChecker.notNull(knownData, "known data");
ArgumentChecker.notNull(inflationMap, "inflation map");
ArgumentChecker.notNull(calculator, "calculator");
ArgumentChecker.notNull(sensitivityCalculator, "sensitivity calculator");
final int nbUnits = curveBundles.length;
InflationProviderDiscount knownSoFarData = knownData.copy();
final CurveBuildingBlockBundle totalBundle = new CurveBuildingBlockBundle();
totalBundle.addAll(knownBlockBundle);
final List<InstrumentDerivative> instrumentsSoFar = new ArrayList<>();
final LinkedHashMap<String, GeneratorCurve> generatorsSoFar = new LinkedHashMap<>();
int startUnit = 0;
for (int iUnits = 0; iUnits < nbUnits; iUnits++) {
final MultiCurveBundle<? extends GeneratorCurve> curveBundle = curveBundles[iUnits];
final int nbCurve = curveBundle.size();
final int[] startCurve = new int[nbCurve]; // First parameter index of the curve in the unit.
final LinkedHashMap<String, GeneratorCurve> generators = new LinkedHashMap<>();
final int[] nbIns = new int[curveBundle.getNumberOfInstruments()];
int nbInsUnit = 0; // Number of instruments in the unit.
for (int iCurve = 0; iCurve < nbCurve; iCurve++) {
final SingleCurveBundle<? extends GeneratorCurve> singleCurve = curveBundle.getCurveBundle(iCurve);
startCurve[iCurve] = nbInsUnit;
nbIns[iCurve] = singleCurve.size();
nbInsUnit += nbIns[iCurve];
instrumentsSoFar.addAll(Arrays.asList(singleCurve.getDerivatives()));
}
final InstrumentDerivative[] instrumentsUnit = new InstrumentDerivative[nbInsUnit];
final double[] parametersGuess = new double[nbInsUnit];
for (int iCurve = 0; iCurve < nbCurve; iCurve++) {
final SingleCurveBundle<? extends GeneratorCurve> singleCurve = curveBundle.getCurveBundle(iCurve);
final InstrumentDerivative[] derivatives = singleCurve.getDerivatives();
System.arraycopy(derivatives, 0, instrumentsUnit, startCurve[iCurve], nbIns[iCurve]);
System.arraycopy(singleCurve.getStartingPoint(), 0, parametersGuess, startCurve[iCurve], nbIns[iCurve]);
GeneratorCurve tmp = singleCurve.getCurveGenerator().finalGenerator(derivatives);
String curveName = singleCurve.getCurveName();
generators.put(curveName, tmp);
generatorsSoFar.put(curveName, tmp);
}
knownSoFarData = makeUnit(instrumentsUnit, parametersGuess, knownSoFarData, discountingMap, forwardIborMap,
forwardONMap, inflationMap, generators, calculator, sensitivityCalculator);
updateBlockBundle(instrumentsUnit, knownSoFarData, curveBundle.getNames(), totalBundle, sensitivityCalculator);
startUnit = startUnit + nbInsUnit;
}
return Pairs.of(knownSoFarData, totalBundle);
} | Pair<InflationProviderDiscount, CurveBuildingBlockBundle> function( final MultiCurveBundle<? extends GeneratorCurve>[] curveBundles, final InflationProviderDiscount knownData, final CurveBuildingBlockBundle knownBlockBundle, final LinkedHashMap<String, Currency> discountingMap, final LinkedHashMap<String, IndexON[]> forwardONMap, final LinkedHashMap<String, IborIndex[]> forwardIborMap, final LinkedHashMap<String, IndexPrice[]> inflationMap, final InstrumentDerivativeVisitor<ParameterInflationProviderInterface, Double> calculator, final InstrumentDerivativeVisitor<ParameterInflationProviderInterface, InflationSensitivity> sensitivityCalculator) { ArgumentChecker.notNull(curveBundles, STR); ArgumentChecker.notNull(knownData, STR); ArgumentChecker.notNull(inflationMap, STR); ArgumentChecker.notNull(calculator, STR); ArgumentChecker.notNull(sensitivityCalculator, STR); final int nbUnits = curveBundles.length; InflationProviderDiscount knownSoFarData = knownData.copy(); final CurveBuildingBlockBundle totalBundle = new CurveBuildingBlockBundle(); totalBundle.addAll(knownBlockBundle); final List<InstrumentDerivative> instrumentsSoFar = new ArrayList<>(); final LinkedHashMap<String, GeneratorCurve> generatorsSoFar = new LinkedHashMap<>(); int startUnit = 0; for (int iUnits = 0; iUnits < nbUnits; iUnits++) { final MultiCurveBundle<? extends GeneratorCurve> curveBundle = curveBundles[iUnits]; final int nbCurve = curveBundle.size(); final int[] startCurve = new int[nbCurve]; final LinkedHashMap<String, GeneratorCurve> generators = new LinkedHashMap<>(); final int[] nbIns = new int[curveBundle.getNumberOfInstruments()]; int nbInsUnit = 0; for (int iCurve = 0; iCurve < nbCurve; iCurve++) { final SingleCurveBundle<? extends GeneratorCurve> singleCurve = curveBundle.getCurveBundle(iCurve); startCurve[iCurve] = nbInsUnit; nbIns[iCurve] = singleCurve.size(); nbInsUnit += nbIns[iCurve]; instrumentsSoFar.addAll(Arrays.asList(singleCurve.getDerivatives())); } final InstrumentDerivative[] instrumentsUnit = new InstrumentDerivative[nbInsUnit]; final double[] parametersGuess = new double[nbInsUnit]; for (int iCurve = 0; iCurve < nbCurve; iCurve++) { final SingleCurveBundle<? extends GeneratorCurve> singleCurve = curveBundle.getCurveBundle(iCurve); final InstrumentDerivative[] derivatives = singleCurve.getDerivatives(); System.arraycopy(derivatives, 0, instrumentsUnit, startCurve[iCurve], nbIns[iCurve]); System.arraycopy(singleCurve.getStartingPoint(), 0, parametersGuess, startCurve[iCurve], nbIns[iCurve]); GeneratorCurve tmp = singleCurve.getCurveGenerator().finalGenerator(derivatives); String curveName = singleCurve.getCurveName(); generators.put(curveName, tmp); generatorsSoFar.put(curveName, tmp); } knownSoFarData = makeUnit(instrumentsUnit, parametersGuess, knownSoFarData, discountingMap, forwardIborMap, forwardONMap, inflationMap, generators, calculator, sensitivityCalculator); updateBlockBundle(instrumentsUnit, knownSoFarData, curveBundle.getNames(), totalBundle, sensitivityCalculator); startUnit = startUnit + nbInsUnit; } return Pairs.of(knownSoFarData, totalBundle); } | /**
* Build a block of curves.
* @param curveBundles The curve bundles, not null
* @param knownData The known data (fx rates, other curves, model parameters, ...)
* @param knownBlockBundle The already build CurveBuildingBlockBundle.
* This should contain all the bundles corresponding to the curves in the knownData.
* @param discountingMap The discounting curves names map.
* @param forwardIborMap The forward curves names map.
* @param forwardONMap The forward curves names map.
* @param inflationMap The inflation curves names map.
* @param calculator The calculator of the value on which the calibration is done
* (usually ParSpreadInflationMarketQuoteDiscountingCalculator (recommended) or converted present value).
* @param sensitivityCalculator The parameter sensitivity calculator.
* @return A pair with the calibrated yield curve bundle (including the known data) and the
* CurveBuildingBlockBundle with the relevant inverse Jacobian Matrix.
*/ | Build a block of curves | makeCurvesFromDerivatives | {
"repo_name": "nssales/OG-Platform",
"path": "projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/provider/curve/inflation/InflationDiscountBuildingRepository.java",
"license": "apache-2.0",
"size": 22281
} | [
"com.opengamma.analytics.financial.curve.interestrate.generator.GeneratorCurve",
"com.opengamma.analytics.financial.instrument.index.IborIndex",
"com.opengamma.analytics.financial.instrument.index.IndexON",
"com.opengamma.analytics.financial.instrument.index.IndexPrice",
"com.opengamma.analytics.financial.interestrate.InstrumentDerivative",
"com.opengamma.analytics.financial.interestrate.InstrumentDerivativeVisitor",
"com.opengamma.analytics.financial.provider.curve.CurveBuildingBlockBundle",
"com.opengamma.analytics.financial.provider.curve.MultiCurveBundle",
"com.opengamma.analytics.financial.provider.curve.SingleCurveBundle",
"com.opengamma.analytics.financial.provider.description.inflation.InflationProviderDiscount",
"com.opengamma.analytics.financial.provider.description.inflation.ParameterInflationProviderInterface",
"com.opengamma.analytics.financial.provider.sensitivity.inflation.InflationSensitivity",
"com.opengamma.util.ArgumentChecker",
"com.opengamma.util.money.Currency",
"com.opengamma.util.tuple.Pair",
"com.opengamma.util.tuple.Pairs",
"java.util.ArrayList",
"java.util.Arrays",
"java.util.LinkedHashMap",
"java.util.List"
] | import com.opengamma.analytics.financial.curve.interestrate.generator.GeneratorCurve; import com.opengamma.analytics.financial.instrument.index.IborIndex; import com.opengamma.analytics.financial.instrument.index.IndexON; import com.opengamma.analytics.financial.instrument.index.IndexPrice; import com.opengamma.analytics.financial.interestrate.InstrumentDerivative; import com.opengamma.analytics.financial.interestrate.InstrumentDerivativeVisitor; import com.opengamma.analytics.financial.provider.curve.CurveBuildingBlockBundle; import com.opengamma.analytics.financial.provider.curve.MultiCurveBundle; import com.opengamma.analytics.financial.provider.curve.SingleCurveBundle; import com.opengamma.analytics.financial.provider.description.inflation.InflationProviderDiscount; import com.opengamma.analytics.financial.provider.description.inflation.ParameterInflationProviderInterface; import com.opengamma.analytics.financial.provider.sensitivity.inflation.InflationSensitivity; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.money.Currency; import com.opengamma.util.tuple.Pair; import com.opengamma.util.tuple.Pairs; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; | import com.opengamma.analytics.financial.curve.interestrate.generator.*; import com.opengamma.analytics.financial.instrument.index.*; import com.opengamma.analytics.financial.interestrate.*; import com.opengamma.analytics.financial.provider.curve.*; import com.opengamma.analytics.financial.provider.description.inflation.*; import com.opengamma.analytics.financial.provider.sensitivity.inflation.*; import com.opengamma.util.*; import com.opengamma.util.money.*; import com.opengamma.util.tuple.*; import java.util.*; | [
"com.opengamma.analytics",
"com.opengamma.util",
"java.util"
] | com.opengamma.analytics; com.opengamma.util; java.util; | 2,648,903 |
List<Period> getIntersectionPeriods( Collection<Period> periods );
| List<Period> getIntersectionPeriods( Collection<Period> periods ); | /**
* Returns Periods where at least one its days are between each of the Periods
* start date and end date in the given collection.
*
* @param periods the collection of Periods.
* @return a list of Periods.
*/ | Returns Periods where at least one its days are between each of the Periods start date and end date in the given collection | getIntersectionPeriods | {
"repo_name": "mortenoh/dhis2-core",
"path": "dhis-2/dhis-api/src/main/java/org/hisp/dhis/period/PeriodService.java",
"license": "bsd-3-clause",
"size": 12178
} | [
"java.util.Collection",
"java.util.List"
] | import java.util.Collection; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,001,363 |
KieRuntimeLogger logger = null;
if (_loggerType != null && runtimeEventManager != null) {
KieLoggers loggers = KieServices.Factory.get().getLoggers();
switch (_loggerType) {
case CONSOLE:
logger = loggers.newConsoleLogger(runtimeEventManager);
break;
case FILE:
logger = loggers.newFileLogger(runtimeEventManager, _log);
break;
case THREADED_FILE:
logger = loggers.newThreadedFileLogger(runtimeEventManager, _log, _interval);
break;
}
}
return logger;
} | KieRuntimeLogger logger = null; if (_loggerType != null && runtimeEventManager != null) { KieLoggers loggers = KieServices.Factory.get().getLoggers(); switch (_loggerType) { case CONSOLE: logger = loggers.newConsoleLogger(runtimeEventManager); break; case FILE: logger = loggers.newFileLogger(runtimeEventManager, _log); break; case THREADED_FILE: logger = loggers.newThreadedFileLogger(runtimeEventManager, _log, _interval); break; } } return logger; } | /** Builds a KieRuntimeLogger.
*
* @param runtimeEventManager runtimeEventManager
* @return a KieRuntimeLogger */ | Builds a KieRuntimeLogger | build | {
"repo_name": "cunningt/fuse-bxms-integ",
"path": "switchyard/switchyard-component-common-knowledge/src/main/java/org/switchyard/component/common/knowledge/config/builder/LoggerBuilder.java",
"license": "apache-2.0",
"size": 3947
} | [
"org.kie.api.KieServices",
"org.kie.api.logger.KieLoggers",
"org.kie.api.logger.KieRuntimeLogger"
] | import org.kie.api.KieServices; import org.kie.api.logger.KieLoggers; import org.kie.api.logger.KieRuntimeLogger; | import org.kie.api.*; import org.kie.api.logger.*; | [
"org.kie.api"
] | org.kie.api; | 570,234 |
public void setText(String footerText) {
if (footerText == null || footerText.equals("")) {
footerText = " ";
}
DOM.setInnerHTML(captionContainer, footerText);
} | void function(String footerText) { if (footerText == null footerText.equals(STR "; } DOM.setInnerHTML(captionContainer, footerText); } | /**
* Sets the text of the footer
*
* @param footerText
* The text in the footer
*/ | Sets the text of the footer | setText | {
"repo_name": "jdahlstrom/vaadin.react",
"path": "client/src/main/java/com/vaadin/client/ui/VScrollTable.java",
"license": "apache-2.0",
"size": 317883
} | [
"com.google.gwt.user.client.DOM"
] | import com.google.gwt.user.client.DOM; | import com.google.gwt.user.client.*; | [
"com.google.gwt"
] | com.google.gwt; | 1,105,756 |
public void setCacheManager(CacheManager cacheManager) {
this._cacheManager = cacheManager;
} | void function(CacheManager cacheManager) { this._cacheManager = cacheManager; } | /**
* Sets the cache manager to use if the raw repository should be cached.
* <p>
* If omitted there will be no local caching.
* @param cacheManager the new value of the property
*/ | Sets the cache manager to use if the raw repository should be cached. If omitted there will be no local caching | setCacheManager | {
"repo_name": "McLeodMoores/starling",
"path": "projects/component/src/main/java/com/opengamma/component/factory/source/TempTargetRepositoryComponentFactory.java",
"license": "apache-2.0",
"size": 19509
} | [
"net.sf.ehcache.CacheManager"
] | import net.sf.ehcache.CacheManager; | import net.sf.ehcache.*; | [
"net.sf.ehcache"
] | net.sf.ehcache; | 355,769 |
public AmqpMessage pullImmediate() throws IOException {
return pull(0, TimeUnit.MILLISECONDS);
} | AmqpMessage function() throws IOException { return pull(0, TimeUnit.MILLISECONDS); } | /**
* Request a remote peer send a Message to this client using an immediate drain request.
*
* @return the pulled AmqpMessage or null if none was pulled from the remote.
*
* @throws IOException if an error occurs
*/ | Request a remote peer send a Message to this client using an immediate drain request | pullImmediate | {
"repo_name": "chirino/activemq",
"path": "activemq-amqp/src/test/java/org/apache/activemq/transport/amqp/client/AmqpReceiver.java",
"license": "apache-2.0",
"size": 37258
} | [
"java.io.IOException",
"java.util.concurrent.TimeUnit"
] | import java.io.IOException; import java.util.concurrent.TimeUnit; | import java.io.*; import java.util.concurrent.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 461,760 |
protected DirContext getDirContextInstance(Hashtable environment)
throws NamingException {
return new InitialDirContext(environment);
}
| DirContext function(Hashtable environment) throws NamingException { return new InitialDirContext(environment); } | /**
* Create a new InitialDirContext instance.
*
* @param environment
* the environment to use when creating the context.
* @return a new InitialDirContext implementation.
*/ | Create a new InitialDirContext instance | getDirContextInstance | {
"repo_name": "pbzdyl/spring-ldap",
"path": "core/src/main/java/org/springframework/ldap/core/support/DirContextSource.java",
"license": "apache-2.0",
"size": 1683
} | [
"java.util.Hashtable",
"javax.naming.NamingException",
"javax.naming.directory.DirContext",
"javax.naming.directory.InitialDirContext"
] | import java.util.Hashtable; import javax.naming.NamingException; import javax.naming.directory.DirContext; import javax.naming.directory.InitialDirContext; | import java.util.*; import javax.naming.*; import javax.naming.directory.*; | [
"java.util",
"javax.naming"
] | java.util; javax.naming; | 598,426 |
public boolean isGoogleSigned(PackageManager pm, String packageName) {
// This is overridden in a subclass.
return false;
} | boolean function(PackageManager pm, String packageName) { return false; } | /**
* Returns whether the call is originating from a Google-signed package.
* @param pm Package manager to use for getting package related info.
* @param packageName The package name to inquire about.
*/ | Returns whether the call is originating from a Google-signed package | isGoogleSigned | {
"repo_name": "guorendong/iridium-browser-ubuntu",
"path": "chrome/android/java/src/org/chromium/chrome/browser/externalauth/ExternalAuthUtils.java",
"license": "bsd-3-clause",
"size": 6376
} | [
"android.content.pm.PackageManager"
] | import android.content.pm.PackageManager; | import android.content.pm.*; | [
"android.content"
] | android.content; | 947,936 |
HitResult rayTrace(Vector3f from, Vector3f direction, float distance, CollisionGroup... collisionGroups); | HitResult rayTrace(Vector3f from, Vector3f direction, float distance, CollisionGroup... collisionGroups); | /**
* Executes a rayTrace on the physics engine.
*
* @param from Place to start tracing
* @param direction Directing in which to trace
* @param distance maximum distance to trace before giving up
* @param collisionGroups the collision groups to collide with. Only if an
* object of any of these groups is hit it will be registered.
* @return A HitResult object that contains the info about the ray trace.
*/ | Executes a rayTrace on the physics engine | rayTrace | {
"repo_name": "kaen/Terasology",
"path": "engine/src/main/java/org/terasology/physics/Physics.java",
"license": "apache-2.0",
"size": 3850
} | [
"org.terasology.math.geom.Vector3f"
] | import org.terasology.math.geom.Vector3f; | import org.terasology.math.geom.*; | [
"org.terasology.math"
] | org.terasology.math; | 580,151 |
@Message(id = 86, value = "Could not find method %s %s on view %s of %s")
IllegalArgumentException viewMethodNotFound(String name, String descriptor, Class<?> viewClass, Class<?> component);
// @Message(id = 87, value = "Could not load component class %s")
// DeploymentUnitProcessingException couldNotLoadComponentClass(@Cause Throwable cause, final String className); | @Message(id = 86, value = STR) IllegalArgumentException viewMethodNotFound(String name, String descriptor, Class<?> viewClass, Class<?> component); | /**
* Creates an exception indicating the method could not found on the view.
*
* @param name the name of the method.
* @param descriptor the method descriptor.
* @param viewClass the view class.
* @param component the component class.
*
* @return an {@link IllegalArgumentException} for the error.
*/ | Creates an exception indicating the method could not found on the view | viewMethodNotFound | {
"repo_name": "tomazzupan/wildfly",
"path": "ee/src/main/java/org/jboss/as/ee/logging/EeLogger.java",
"license": "lgpl-2.1",
"size": 48634
} | [
"org.jboss.logging.annotations.Message"
] | import org.jboss.logging.annotations.Message; | import org.jboss.logging.annotations.*; | [
"org.jboss.logging"
] | org.jboss.logging; | 638,310 |
public void onPeerConnectionStatsReady(final StatsReport[] reports); | void function(final StatsReport[] reports); | /**
* Callback fired once peer connection statistics is ready.
*/ | Callback fired once peer connection statistics is ready | onPeerConnectionStatsReady | {
"repo_name": "JiYou/webrtctalk",
"path": "examples/android/src/org/appspot/apprtc/PeerConnectionClient.java",
"license": "bsd-3-clause",
"size": 35637
} | [
"org.webrtc.StatsReport"
] | import org.webrtc.StatsReport; | import org.webrtc.*; | [
"org.webrtc"
] | org.webrtc; | 1,131,074 |
protected void bind(Element e) {
AbstractDocument doc = (AbstractDocument) e.getOwnerDocument();
if (doc != document) {
XBLManager xm = doc.getXBLManager();
if (xm instanceof DefaultXBLManager) {
((DefaultXBLManager) xm).bind(e);
return;
}
}
if (e instanceof BindableElement) {
DefinitionRecord defRec
= getActiveDefinition(e.getNamespaceURI(),
e.getLocalName());
setActiveDefinition((BindableElement) e, defRec);
} else {
NodeList nl = getXblScopedChildNodes(e);
for (int i = 0; i < nl.getLength(); i++) {
Node n = nl.item(i);
if (n.getNodeType() == Node.ELEMENT_NODE) {
bind((Element) n);
}
}
}
} | void function(Element e) { AbstractDocument doc = (AbstractDocument) e.getOwnerDocument(); if (doc != document) { XBLManager xm = doc.getXBLManager(); if (xm instanceof DefaultXBLManager) { ((DefaultXBLManager) xm).bind(e); return; } } if (e instanceof BindableElement) { DefinitionRecord defRec = getActiveDefinition(e.getNamespaceURI(), e.getLocalName()); setActiveDefinition((BindableElement) e, defRec); } else { NodeList nl = getXblScopedChildNodes(e); for (int i = 0; i < nl.getLength(); i++) { Node n = nl.item(i); if (n.getNodeType() == Node.ELEMENT_NODE) { bind((Element) n); } } } } | /**
* Binds each bindable element in the given element's subtree.
*/ | Binds each bindable element in the given element's subtree | bind | {
"repo_name": "git-moss/Push2Display",
"path": "lib/batik-1.8/sources/org/apache/batik/bridge/svg12/DefaultXBLManager.java",
"license": "lgpl-3.0",
"size": 70498
} | [
"org.apache.batik.anim.dom.BindableElement",
"org.apache.batik.dom.AbstractDocument",
"org.apache.batik.dom.xbl.XBLManager",
"org.w3c.dom.Element",
"org.w3c.dom.Node",
"org.w3c.dom.NodeList"
] | import org.apache.batik.anim.dom.BindableElement; import org.apache.batik.dom.AbstractDocument; import org.apache.batik.dom.xbl.XBLManager; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; | import org.apache.batik.anim.dom.*; import org.apache.batik.dom.*; import org.apache.batik.dom.xbl.*; import org.w3c.dom.*; | [
"org.apache.batik",
"org.w3c.dom"
] | org.apache.batik; org.w3c.dom; | 2,830,300 |
private String createUrl(String urlAsString,
Map<String, String> formatDefinition) {
final StringBuilder result = new StringBuilder(urlAsString.length());
final StringTokenizer tokens = new StringTokenizer(urlAsString, "{}");
boolean isVariable = (urlAsString.charAt(0) == '{');
int i = 0;
while (tokens.hasMoreTokens()) {
final String key = tokens.nextToken();
if (isVariable) {
String format = formatDefinition.get(key);
if (format == null) {
LOG.warn("In URL {} you use an undefined parameter {}",
urlAsString, key);
format = "";
}
result.append('{').append(i).append(format).append('}');
i++;
} else {
result.append(key);
}
isVariable = !isVariable;
}
return result.toString();
}
/**
* Creates the Paramgroup from the constant as a message, i.e. paramaters
* will be enriched with ={} | String function(String urlAsString, Map<String, String> formatDefinition) { final StringBuilder result = new StringBuilder(urlAsString.length()); final StringTokenizer tokens = new StringTokenizer(urlAsString, "{}"); boolean isVariable = (urlAsString.charAt(0) == '{'); int i = 0; while (tokens.hasMoreTokens()) { final String key = tokens.nextToken(); if (isVariable) { String format = formatDefinition.get(key); if (format == null) { LOG.warn(STR, urlAsString, key); format = ""; } result.append('{').append(i).append(format).append('}'); i++; } else { result.append(key); } isVariable = !isVariable; } return result.toString(); } /** * Creates the Paramgroup from the constant as a message, i.e. paramaters * will be enriched with ={} | /**
* Creates the URL from the constant as a message, i.e. named parameters
* like {user_id} will be replaced by {0}.
*
* @param urlAsString the url.
* @param formatDefinition the format definitions.
* @return the URL as parameterized message.
*/ | Creates the URL from the constant as a message, i.e. named parameters like {user_id} will be replaced by {0} | createUrl | {
"repo_name": "opensource21/fuwesta",
"path": "fuwesta-core/src/main/java/de/ppi/fuwesta/spring/mvc/util/UrlDefinitionsToMessages.java",
"license": "apache-2.0",
"size": 16584
} | [
"java.util.Map",
"java.util.StringTokenizer"
] | import java.util.Map; import java.util.StringTokenizer; | import java.util.*; | [
"java.util"
] | java.util; | 1,156,046 |
public List<String> getCellSpecimenTypePVList()
{
final Map specimenTypeMap = AppUtility.getSpecimenTypeMap();
final List<NameValueBean> aList = (List<NameValueBean>) specimenTypeMap.get("Cell");
return this.toStrList(aList);
}
| List<String> function() { final Map specimenTypeMap = AppUtility.getSpecimenTypeMap(); final List<NameValueBean> aList = (List<NameValueBean>) specimenTypeMap.get("Cell"); return this.toStrList(aList); } | /**
* Gets the cell specimen type pv list.
*
* @return List.
*/ | Gets the cell specimen type pv list | getCellSpecimenTypePVList | {
"repo_name": "NCIP/catissue-core",
"path": "software/caTissue/modules/core/src/main/java/edu/wustl/catissuecore/flex/FlexInterface.java",
"license": "bsd-3-clause",
"size": 66403
} | [
"edu.wustl.catissuecore.util.global.AppUtility",
"edu.wustl.common.beans.NameValueBean",
"java.util.List",
"java.util.Map"
] | import edu.wustl.catissuecore.util.global.AppUtility; import edu.wustl.common.beans.NameValueBean; import java.util.List; import java.util.Map; | import edu.wustl.catissuecore.util.global.*; import edu.wustl.common.beans.*; import java.util.*; | [
"edu.wustl.catissuecore",
"edu.wustl.common",
"java.util"
] | edu.wustl.catissuecore; edu.wustl.common; java.util; | 1,772,543 |
//-----------------------------------------------------------------------
public static List<Locale> localeLookupList(final Locale locale) {
return localeLookupList(locale, locale);
} | static List<Locale> function(final Locale locale) { return localeLookupList(locale, locale); } | /**
* <p>Obtains the list of locales to search through when performing
* a locale search.</p>
*
* <pre>
* localeLookupList(Locale("fr", "CA", "xxx"))
* = [Locale("fr", "CA", "xxx"), Locale("fr", "CA"), Locale("fr")]
* </pre>
*
* @param locale the locale to start from
* @return the unmodifiable list of Locale objects, 0 being locale, not null
*/ | Obtains the list of locales to search through when performing a locale search. <code> localeLookupList(Locale("fr", "CA", "xxx")) = [Locale("fr", "CA", "xxx"), Locale("fr", "CA"), Locale("fr")] </code> | localeLookupList | {
"repo_name": "prime-framework/prime-mvc",
"path": "src/main/java/org/apache/commons/lang3/LocaleUtils.java",
"license": "apache-2.0",
"size": 14140
} | [
"java.util.List",
"java.util.Locale"
] | import java.util.List; import java.util.Locale; | import java.util.*; | [
"java.util"
] | java.util; | 2,229,161 |
RequestBodyUriSpec method(HttpMethod method); | RequestBodyUriSpec method(HttpMethod method); | /**
* Start building a request for the given {@code HttpMethod}.
* @return a spec for specifying the target URL
*/ | Start building a request for the given HttpMethod | method | {
"repo_name": "spring-projects/spring-framework",
"path": "spring-webflux/src/main/java/org/springframework/web/reactive/function/client/WebClient.java",
"license": "apache-2.0",
"size": 33699
} | [
"org.springframework.http.HttpMethod"
] | import org.springframework.http.HttpMethod; | import org.springframework.http.*; | [
"org.springframework.http"
] | org.springframework.http; | 86,046 |
@Override
public void receivePacket(Packet inPacket,String inInterfaceName) throws LowLinkException{
if(sz!=1){
Ethernet_packet tempPacket = (Ethernet_packet)inPacket;
Enumeration it;
boolean intFound = false;
String nic = "";
try{
Hashtable inInt = (Hashtable) IntCaches.get(inInterfaceName);
inInt.put(tempPacket.getSourceMACAddress(), "1");
Ethernet_packet copyPacket = new Ethernet_packet(tempPacket.getData(), tempPacket.getDestinationMACAddress(),tempPacket.getSourceMACAddress());
it = NetworkInterfacetable.elements();
while(it.hasMoreElements()){
NetworkInterface tempInterface = (NetworkInterface)it.nextElement();
nic = tempInterface.getName();
Hashtable outInt = (Hashtable) IntCaches.get(nic);
if(outInt.get(tempPacket.getDestinationMACAddress()) != null){
intFound = true;
try{
tempInterface.sendPacket(copyPacket);
}catch(NullPointerException e){
System.out.println("WirelessAP.java: " + e.toString());
}
}
}
it = NetworkInterfacetable.elements();
while(it.hasMoreElements() && !intFound){
//Test to see if the current Interface is the Interface that sent in the packet
// if it is skip that interface
NetworkInterface tempInterface = (NetworkInterface)it.nextElement();
if(!(tempInterface.getName().equals(inInterfaceName) && !inInterfaceName.contains("wrl"))){
try{
tempInterface.sendPacket(copyPacket);
}catch(NullPointerException e){
System.out.println("WirelessAP.java: " + e.toString());
}
}
}
}catch(Throwable th)
{
if(th.toString().contains("Packet lost due to physical link problems!")){
throw new LowLinkException(th.toString());
}else{
sz=1;
System.out.println(th.toString());
throw new LowLinkException("WirelessAP buffer overflow (packet loop flood?).");
}
}
}
}
| void function(Packet inPacket,String inInterfaceName) throws LowLinkException{ if(sz!=1){ Ethernet_packet tempPacket = (Ethernet_packet)inPacket; Enumeration it; boolean intFound = false; String nic = STR1STRWirelessAP.java: STRwrlSTRWirelessAP.java: STRPacket lost due to physical link problems!STRWirelessAP buffer overflow (packet loop flood?)."); } } } } | /**
* This method will recieve a packet from any of the connected links and the copy
* the Packet and distribute a copy to each of the other connected links.
* @author bevan_calliess
* @param inPacket - The packet to be transported
* @param inLinkName - The name of the link that sent the packet eg: eth0
*/ | This method will recieve a packet from any of the connected links and the copy the Packet and distribute a copy to each of the other connected links | receivePacket | {
"repo_name": "Darkkey/javaNetSim",
"path": "src/core/WirelessAP.java",
"license": "bsd-3-clause",
"size": 8091
} | [
"java.util.Enumeration"
] | import java.util.Enumeration; | import java.util.*; | [
"java.util"
] | java.util; | 1,326,770 |
public TomcatManagerResponse reload( String path )
throws TomcatManagerException, IOException
{
return invoke( "/reload?path=" + URLEncoder.encode( path, charset ) );
} | TomcatManagerResponse function( String path ) throws TomcatManagerException, IOException { return invoke( STR + URLEncoder.encode( path, charset ) ); } | /**
* Reloads the webapp at the specified context path.
*
* @param path the webapp context path to reload
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/ | Reloads the webapp at the specified context path | reload | {
"repo_name": "karthikjaps/Tomcat",
"path": "common-tomcat-maven-plugin/src/main/java/org/apache/tomcat/maven/common/deployer/TomcatManager.java",
"license": "apache-2.0",
"size": 34707
} | [
"java.io.IOException",
"java.net.URLEncoder"
] | import java.io.IOException; import java.net.URLEncoder; | import java.io.*; import java.net.*; | [
"java.io",
"java.net"
] | java.io; java.net; | 2,511,709 |
public static <V extends Writable> V createVertexValue(Configuration conf) {
Class<V> vertexValueClass = getVertexValueClass(conf);
try {
return vertexValueClass.newInstance();
} catch (InstantiationException e) {
throw new IllegalArgumentException("createVertexValue: Failed to instantiate", e);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("createVertexValue: Illegally accessed", e);
}
} | static <V extends Writable> V function(Configuration conf) { Class<V> vertexValueClass = getVertexValueClass(conf); try { return vertexValueClass.newInstance(); } catch (InstantiationException e) { throw new IllegalArgumentException(STR, e); } catch (IllegalAccessException e) { throw new IllegalArgumentException(STR, e); } } | /**
* Create a user vertex value
*
* @param conf
* Configuration to check
* @return Instantiated user vertex value
*/ | Create a user vertex value | createVertexValue | {
"repo_name": "sigmod/asterixdb-analytics",
"path": "pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/BspUtils.java",
"license": "apache-2.0",
"size": 39926
} | [
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.io.Writable"
] | import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Writable; | import org.apache.hadoop.conf.*; import org.apache.hadoop.io.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 1,979,082 |
public static int getServiceStartMode(String serviceName) throws IOException {
if (!SystemUtils.IS_OS_WINDOWS) {
return NOT_INSTALLED;
}
Process p = Runtime.getRuntime().exec("sc qc " + serviceName);
try ( BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream()));){
String line = reader.readLine();
while (line != null) {
if (line.trim().startsWith("START_TYPE")) {
if (line.trim()
.substring(line.trim().indexOf(":") + 1,
line.trim().indexOf(":") + 4).trim()
.equals("2")) {
if (line.contains("DELAYED")) {
return AUTO_START_DELAYED;
} else {
return AUTO_START;
}
} else if (line
.trim()
.substring(line.trim().indexOf(":") + 1,
line.trim().indexOf(":") + 4).trim()
.equals("3")) {
return DEMAND_START;
} else if (line
.trim()
.substring(line.trim().indexOf(":") + 1,
line.trim().indexOf(":") + 4).trim()
.equals("4")) {
return DISABLED;
}
}
line = reader.readLine();
}
return NOT_INSTALLED;
}
} | static int function(String serviceName) throws IOException { if (!SystemUtils.IS_OS_WINDOWS) { return NOT_INSTALLED; } Process p = Runtime.getRuntime().exec(STR + serviceName); try ( BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream()));){ String line = reader.readLine(); while (line != null) { if (line.trim().startsWith(STR)) { if (line.trim() .substring(line.trim().indexOf(":") + 1, line.trim().indexOf(":") + 4).trim() .equals("2")) { if (line.contains(STR)) { return AUTO_START_DELAYED; } else { return AUTO_START; } } else if (line .trim() .substring(line.trim().indexOf(":") + 1, line.trim().indexOf(":") + 4).trim() .equals("3")) { return DEMAND_START; } else if (line .trim() .substring(line.trim().indexOf(":") + 1, line.trim().indexOf(":") + 4).trim() .equals("4")) { return DISABLED; } } line = reader.readLine(); } return NOT_INSTALLED; } } | /**
* Returns the start mode status of the passed service
*
* @param serviceName the sercie name
* @return the start mode of the service
* @throws IOException
*/ | Returns the start mode status of the passed service | getServiceStartMode | {
"repo_name": "kawansoft/aceql-http-gui",
"path": "src/main/java/com/kawansoft/aceql/gui/service/ServiceUtil.java",
"license": "apache-2.0",
"size": 11737
} | [
"java.io.BufferedReader",
"java.io.IOException",
"java.io.InputStreamReader",
"org.apache.commons.lang3.SystemUtils"
] | import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import org.apache.commons.lang3.SystemUtils; | import java.io.*; import org.apache.commons.lang3.*; | [
"java.io",
"org.apache.commons"
] | java.io; org.apache.commons; | 1,555,272 |
public void mouseScroll(MouseWheelEvent e){
location=new Vector2(e.getX(),e.getY());
mouseScroll+=e.getWheelRotation();
} | void function(MouseWheelEvent e){ location=new Vector2(e.getX(),e.getY()); mouseScroll+=e.getWheelRotation(); } | /**
* Sets the number of rotations the mouse wheel has made
* @param e the mouse Wheel event
*/ | Sets the number of rotations the mouse wheel has made | mouseScroll | {
"repo_name": "Nekel-Seyew/SweeneyGameDevelopmentEnvironment",
"path": "SGDE/src/Utilities/Mouse.java",
"license": "bsd-3-clause",
"size": 9544
} | [
"java.awt.event.MouseWheelEvent"
] | import java.awt.event.MouseWheelEvent; | import java.awt.event.*; | [
"java.awt"
] | java.awt; | 664,589 |
@SuppressWarnings("unchecked")
public <T extends Capability> Set<T> intersectLocalAndRemoteCapabilities(Class<T> capClass) {
Set<T> caps = new TreeSet<T>();
for(Capability cap : listLocalCapabilities(capClass))
if(remoteCapabilities.contains(cap))
caps.add((T)cap);
return caps;
} | @SuppressWarnings(STR) <T extends Capability> Set<T> function(Class<T> capClass) { Set<T> caps = new TreeSet<T>(); for(Capability cap : listLocalCapabilities(capClass)) if(remoteCapabilities.contains(cap)) caps.add((T)cap); return caps; } | /**
* build an intersection of the locally configured capabilities and the capabilites passed in from the peer.
*
* @return
*/ | build an intersection of the locally configured capabilities and the capabilites passed in from the peer | intersectLocalAndRemoteCapabilities | {
"repo_name": "bnitin/bgp-ls",
"path": "src/main/java/org/topology/bgp_ls/netty/fsm/CapabilitesNegotiator.java",
"license": "apache-2.0",
"size": 6006
} | [
"java.util.Set",
"java.util.TreeSet",
"org.topology.bgp_ls.net.capabilities.Capability"
] | import java.util.Set; import java.util.TreeSet; import org.topology.bgp_ls.net.capabilities.Capability; | import java.util.*; import org.topology.bgp_ls.net.capabilities.*; | [
"java.util",
"org.topology.bgp_ls"
] | java.util; org.topology.bgp_ls; | 2,290,225 |
@Test
public void filterMappings() {
final Tomcat85ContainerAdapter adapter = new Tomcat85ContainerAdapter();
FilterMap map = new FilterMap();
map.addServletName("psi-probe");
map.addURLPattern("/psi-probe");
assertEquals(2, adapter.getFilterMappings(map, "dispatcherMap", "filterClass").size());
} | void function() { final Tomcat85ContainerAdapter adapter = new Tomcat85ContainerAdapter(); FilterMap map = new FilterMap(); map.addServletName(STR); map.addURLPattern(STR); assertEquals(2, adapter.getFilterMappings(map, STR, STR).size()); } | /**
* Filter mappings.
*/ | Filter mappings | filterMappings | {
"repo_name": "dougwm/psi-probe",
"path": "tomcat85adapter/src/test/java/psiprobe/Tomcat85ContainerAdapterTest.java",
"license": "gpl-2.0",
"size": 6377
} | [
"org.apache.tomcat.util.descriptor.web.FilterMap",
"org.junit.Assert"
] | import org.apache.tomcat.util.descriptor.web.FilterMap; import org.junit.Assert; | import org.apache.tomcat.util.descriptor.web.*; import org.junit.*; | [
"org.apache.tomcat",
"org.junit"
] | org.apache.tomcat; org.junit; | 1,436,309 |
@ParameterizedTest
@ValueSource(
strings = {
"hmac-md5",
"hmac-md5.sig-alg.reg.int.",
"hmac-sha1",
"hmac-sha224",
"hmac-sha256",
"hmac-sha256.",
"hmac-sha384",
"hmac-sha512",
// Java names
"HmacMD5",
"HmacSHA256"
})
void TSIG_query_stringalg(String alg) throws IOException {
TSIG key = new TSIG(alg, "example.", "12345678");
Name qname = Name.fromString("www.example.");
Record rec = Record.newRecord(qname, Type.A, DClass.IN);
Message msg = Message.newQuery(rec);
msg.setTSIG(key, Rcode.NOERROR, null);
byte[] bytes = msg.toWire(512);
assertEquals(1, bytes[11]);
Message parsed = new Message(bytes);
int result = key.verify(parsed, bytes, null);
assertEquals(Rcode.NOERROR, result);
assertTrue(parsed.isSigned());
} | @ValueSource( strings = { STR, STR, STR, STR, STR, STR, STR, STR, STR, STR }) void TSIG_query_stringalg(String alg) throws IOException { TSIG key = new TSIG(alg, STR, STR); Name qname = Name.fromString(STR); Record rec = Record.newRecord(qname, Type.A, DClass.IN); Message msg = Message.newQuery(rec); msg.setTSIG(key, Rcode.NOERROR, null); byte[] bytes = msg.toWire(512); assertEquals(1, bytes[11]); Message parsed = new Message(bytes); int result = key.verify(parsed, bytes, null); assertEquals(Rcode.NOERROR, result); assertTrue(parsed.isSigned()); } | /**
* Check all of the string algorithm names defined in the javadoc. Confirm that java names also
* allowed, even though undocumented. THis is to conserve backwards compatibility.
*/ | Check all of the string algorithm names defined in the javadoc. Confirm that java names also allowed, even though undocumented. THis is to conserve backwards compatibility | TSIG_query_stringalg | {
"repo_name": "dnsjava/dnsjava",
"path": "src/test/java/org/xbill/DNS/TSIGTest.java",
"license": "bsd-3-clause",
"size": 7798
} | [
"java.io.IOException",
"org.junit.jupiter.api.Assertions",
"org.junit.jupiter.params.provider.ValueSource"
] | import java.io.IOException; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.params.provider.ValueSource; | import java.io.*; import org.junit.jupiter.api.*; import org.junit.jupiter.params.provider.*; | [
"java.io",
"org.junit.jupiter"
] | java.io; org.junit.jupiter; | 244,132 |
private void cbUserGroupActionPerformed(final java.awt.event.ActionEvent evt) { //GEN-FIRST:event_cbUserGroupActionPerformed
EventQueue.invokeLater(new Runnable() { | void function(final java.awt.event.ActionEvent evt) { EventQueue.invokeLater(new Runnable() { | /**
* DOCUMENT ME!
*
* @param evt DOCUMENT ME!
*/ | DOCUMENT ME | cbUserGroupActionPerformed | {
"repo_name": "cismet/watergis-client",
"path": "src/main/java/de/cismet/watergis/gui/dialog/DbUserDialog.java",
"license": "lgpl-3.0",
"size": 37742
} | [
"java.awt.EventQueue"
] | import java.awt.EventQueue; | import java.awt.*; | [
"java.awt"
] | java.awt; | 279,818 |
public static boolean revertOrContinue(String msg, String msgDetails) {
if (Window.CANCEL == Utility.popMessage(msg,
msgDetails + "\nDo you want to continue, or Abort the last action?",
MessageDialog.QUESTION, new String[] { "Continue", "Abort" })) {
return true; // for closing the window or hitting Undo
}
return false;
} | static boolean function(String msg, String msgDetails) { if (Window.CANCEL == Utility.popMessage(msg, msgDetails + STR, MessageDialog.QUESTION, new String[] { STR, "Abort" })) { return true; } return false; } | /**
* Revert or continue.
*
* @param msg
* the msg
* @param msgDetails
* the msg details
* @return true to revert, false to continue
*/ | Revert or continue | revertOrContinue | {
"repo_name": "apache/uima-uimaj",
"path": "uimaj-ep-configurator/src/main/java/org/apache/uima/taeconfigurator/editors/ui/AbstractSection.java",
"license": "apache-2.0",
"size": 79829
} | [
"org.eclipse.jface.dialogs.MessageDialog",
"org.eclipse.jface.window.Window"
] | import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.window.Window; | import org.eclipse.jface.dialogs.*; import org.eclipse.jface.window.*; | [
"org.eclipse.jface"
] | org.eclipse.jface; | 1,088,157 |
public List getFields() {
return fields;
} | List function() { return fields; } | /**
* Returns all the fields declared in the class.
*
* @return a list of <code>FieldInfo</code>.
* @see FieldInfo
*/ | Returns all the fields declared in the class | getFields | {
"repo_name": "s20121035/rk3288_android5.1_repo",
"path": "external/javassist/src/main/javassist/bytecode/ClassFile.java",
"license": "gpl-3.0",
"size": 26395
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 174,958 |
public static void putStageInfo(Configuration conf, StageInfo info) {
conf.set(KEY_STAGE_INFO, info.serialize());
} | static void function(Configuration conf, StageInfo info) { conf.set(KEY_STAGE_INFO, info.serialize()); } | /**
* Puts the {@link StageInfo} object into the Hadoop configuration.
* @param conf the target Hadoop configuration
* @param info the {@link StageInfo} object
*/ | Puts the <code>StageInfo</code> object into the Hadoop configuration | putStageInfo | {
"repo_name": "asakusafw/asakusafw-compiler",
"path": "bridge-project/runtime-hadoop/src/main/java/com/asakusafw/bridge/hadoop/ConfigurationEditor.java",
"license": "apache-2.0",
"size": 2380
} | [
"com.asakusafw.bridge.stage.StageInfo",
"org.apache.hadoop.conf.Configuration"
] | import com.asakusafw.bridge.stage.StageInfo; import org.apache.hadoop.conf.Configuration; | import com.asakusafw.bridge.stage.*; import org.apache.hadoop.conf.*; | [
"com.asakusafw.bridge",
"org.apache.hadoop"
] | com.asakusafw.bridge; org.apache.hadoop; | 2,033,047 |
@Test
public void shouldSendThreeDataPackets3() throws Exception {
InBandBytestreamSession session = new InBandBytestreamSession(connection, initBytestream,
initiatorJID);
// verify the data packets
protocol.addResponse(null, incrementingSequence);
protocol.addResponse(null, incrementingSequence);
protocol.addResponse(null, incrementingSequence);
byte[] controlData = new byte[(blockSize * 3) - 2];
OutputStream outputStream = session.getOutputStream();
int off = 0;
for (int i = 1; i <= 7; i++) {
outputStream.write(controlData, off, i);
off += i;
}
outputStream.flush();
protocol.verifyAll();
} | void function() throws Exception { InBandBytestreamSession session = new InBandBytestreamSession(connection, initBytestream, initiatorJID); protocol.addResponse(null, incrementingSequence); protocol.addResponse(null, incrementingSequence); protocol.addResponse(null, incrementingSequence); byte[] controlData = new byte[(blockSize * 3) - 2]; OutputStream outputStream = session.getOutputStream(); int off = 0; for (int i = 1; i <= 7; i++) { outputStream.write(controlData, off, i); off += i; } outputStream.flush(); protocol.verifyAll(); } | /**
* Test the output stream write(byte[], int, int) method.
*
* @throws Exception should not happen
*/ | Test the output stream write(byte[], int, int) method | shouldSendThreeDataPackets3 | {
"repo_name": "magnetsystems/message-smack",
"path": "smack-extensions/src/test/java/org/jivesoftware/smackx/bytestreams/ibb/InBandBytestreamSessionMessageTest.java",
"license": "apache-2.0",
"size": 12583
} | [
"java.io.OutputStream"
] | import java.io.OutputStream; | import java.io.*; | [
"java.io"
] | java.io; | 1,512,113 |
protected Node exitComplianceGroup(Production node)
throws ParseException {
return node;
} | Node function(Production node) throws ParseException { return node; } | /**
* Called when exiting a parse tree node.
*
* @param node the node being exited
*
* @return the node to add to the parse tree, or
* null if no parse tree should be created
*
* @throws ParseException if the node analysis discovered errors
*/ | Called when exiting a parse tree node | exitComplianceGroup | {
"repo_name": "richb-hanover/mibble-2.9.2",
"path": "src/java/net/percederberg/mibble/asn1/Asn1Analyzer.java",
"license": "gpl-2.0",
"size": 275483
} | [
"net.percederberg.grammatica.parser.Node",
"net.percederberg.grammatica.parser.ParseException",
"net.percederberg.grammatica.parser.Production"
] | import net.percederberg.grammatica.parser.Node; import net.percederberg.grammatica.parser.ParseException; import net.percederberg.grammatica.parser.Production; | import net.percederberg.grammatica.parser.*; | [
"net.percederberg.grammatica"
] | net.percederberg.grammatica; | 447,871 |
Set<String> getAllowedFeatureOfInterestTypesForOffering(String offering); | Set<String> getAllowedFeatureOfInterestTypesForOffering(String offering); | /**
* Returns the allowed featureOfInterest types for the specified offering.
*
* @param offering the offering
*
* @return the allowed featureOfInterest types
*/ | Returns the allowed featureOfInterest types for the specified offering | getAllowedFeatureOfInterestTypesForOffering | {
"repo_name": "52North/SOS",
"path": "core/api/src/main/java/org/n52/sos/cache/SosContentCache.java",
"license": "gpl-2.0",
"size": 25788
} | [
"java.util.Set"
] | import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 905,442 |
public void beginDelete202NonRetry400() throws CloudException, IOException {
beginDelete202NonRetry400WithServiceResponseAsync().toBlocking().single().getBody();
} | void function() throws CloudException, IOException { beginDelete202NonRetry400WithServiceResponseAsync().toBlocking().single().getBody(); } | /**
* Long running delete request, service returns a 202 with a location header.
*
* @throws CloudException exception thrown from REST call
* @throws IOException exception thrown from serialization/deserialization
*/ | Long running delete request, service returns a 202 with a location header | beginDelete202NonRetry400 | {
"repo_name": "tbombach/autorest",
"path": "src/generator/AutoRest.Java.Azure.Fluent.Tests/src/main/java/fixtures/lro/implementation/LROSADsInner.java",
"license": "mit",
"size": 293065
} | [
"com.microsoft.azure.CloudException",
"java.io.IOException"
] | import com.microsoft.azure.CloudException; import java.io.IOException; | import com.microsoft.azure.*; import java.io.*; | [
"com.microsoft.azure",
"java.io"
] | com.microsoft.azure; java.io; | 2,083,054 |
@Override
public final CommandResult call() {
int retryCount = 0;
final CommandResult result = new CommandResult();
boolean isCommandSent = false;
final String baseCommand = mCommand.getCommand();
while (result.getResult() == null && retryCount < MAX_REQUEST_RETRY && !mCancelled) {
try {
if (getSocket() == null || !getSocket().isConnected() ||
getSocket().isClosed()) {
result.setConnectionResult(innerConnect());
}
write();
isCommandSent = true;
result.setResult(read());
} catch (final EOFException ex0) {
handleFailure(result, ex0);
// Do not fail when the IDLE response has not been read (to improve connection
// failure robustness). Just send the "changed playlist" result to force the MPD
// status to be refreshed.
if (MPDCommand.MPD_CMD_IDLE.equals(baseCommand)) {
result.setResult(Collections.singletonList(
"changed: " + MPDStatusMonitor.IDLE_PLAYLIST));
}
} catch (final IOException e) {
handleFailure(result, e);
} catch (final MPDException ex1) {
// Avoid getting in an infinite loop if an error occurred in the password cmd
if (ex1.mErrorCode == MPDException.ACK_ERROR_PASSWORD ||
ex1.mErrorCode == MPDException.ACK_ERROR_PERMISSION) {
result.setException(ex1);
} else {
handleFailure(result, ex1);
}
}
if (!MPDCommand.isRetryable(baseCommand) && isCommandSent) {
break;
}
retryCount++;
}
if (!mCancelled) {
if (result.getResult() == null) {
logError(result, baseCommand, retryCount);
} else {
mIsConnected = true;
}
}
return result;
} | final CommandResult function() { int retryCount = 0; final CommandResult result = new CommandResult(); boolean isCommandSent = false; final String baseCommand = mCommand.getCommand(); while (result.getResult() == null && retryCount < MAX_REQUEST_RETRY && !mCancelled) { try { if (getSocket() == null !getSocket().isConnected() getSocket().isClosed()) { result.setConnectionResult(innerConnect()); } write(); isCommandSent = true; result.setResult(read()); } catch (final EOFException ex0) { handleFailure(result, ex0); if (MPDCommand.MPD_CMD_IDLE.equals(baseCommand)) { result.setResult(Collections.singletonList( STR + MPDStatusMonitor.IDLE_PLAYLIST)); } } catch (final IOException e) { handleFailure(result, e); } catch (final MPDException ex1) { if (ex1.mErrorCode == MPDException.ACK_ERROR_PASSWORD ex1.mErrorCode == MPDException.ACK_ERROR_PERMISSION) { result.setException(ex1); } else { handleFailure(result, ex1); } } if (!MPDCommand.isRetryable(baseCommand) && isCommandSent) { break; } retryCount++; } if (!mCancelled) { if (result.getResult() == null) { logError(result, baseCommand, retryCount); } else { mIsConnected = true; } } return result; } | /**
* This is the default class method.
*
* @return A {@code CommandResult} from the processed command.
*/ | This is the default class method | call | {
"repo_name": "jcnoir/dmix",
"path": "JMPDComm/src/main/java/org/a0z/mpd/connection/MPDConnection.java",
"license": "apache-2.0",
"size": 24038
} | [
"java.io.EOFException",
"java.io.IOException",
"java.util.Collections",
"org.a0z.mpd.MPDCommand",
"org.a0z.mpd.MPDStatusMonitor",
"org.a0z.mpd.exception.MPDException"
] | import java.io.EOFException; import java.io.IOException; import java.util.Collections; import org.a0z.mpd.MPDCommand; import org.a0z.mpd.MPDStatusMonitor; import org.a0z.mpd.exception.MPDException; | import java.io.*; import java.util.*; import org.a0z.mpd.*; import org.a0z.mpd.exception.*; | [
"java.io",
"java.util",
"org.a0z.mpd"
] | java.io; java.util; org.a0z.mpd; | 2,153,386 |
EReference getDocumentRoot_LinkEventDefinition(); | EReference getDocumentRoot_LinkEventDefinition(); | /**
* Returns the meta object for the containment reference '{@link org.eclipse.bpmn2.DocumentRoot#getLinkEventDefinition <em>Link Event Definition</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the containment reference '<em>Link Event Definition</em>'.
* @see org.eclipse.bpmn2.DocumentRoot#getLinkEventDefinition()
* @see #getDocumentRoot()
* @generated
*/ | Returns the meta object for the containment reference '<code>org.eclipse.bpmn2.DocumentRoot#getLinkEventDefinition Link Event Definition</code>'. | getDocumentRoot_LinkEventDefinition | {
"repo_name": "Rikkola/kie-wb-common",
"path": "kie-wb-common-stunner/kie-wb-common-stunner-sets/kie-wb-common-stunner-bpmn/kie-wb-common-stunner-bpmn-emf/src/main/java/org/eclipse/bpmn2/Bpmn2Package.java",
"license": "apache-2.0",
"size": 929298
} | [
"org.eclipse.emf.ecore.EReference"
] | import org.eclipse.emf.ecore.EReference; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 2,124,016 |
public static List<InetSocketAddress> defaultAddressList() {
return DEFAULT_NAME_SERVER_LIST;
} | static List<InetSocketAddress> function() { return DEFAULT_NAME_SERVER_LIST; } | /**
* Returns the list of the system DNS server addresses. If it failed to retrieve the list of the system DNS server
* addresses from the environment, it will return {@code "8.8.8.8"} and {@code "8.8.4.4"}, the addresses of the
* Google public DNS servers.
*/ | Returns the list of the system DNS server addresses. If it failed to retrieve the list of the system DNS server addresses from the environment, it will return "8.8.8.8" and "8.8.4.4", the addresses of the Google public DNS servers | defaultAddressList | {
"repo_name": "golovnin/netty",
"path": "resolver-dns/src/main/java/io/netty/resolver/dns/DnsServerAddresses.java",
"license": "apache-2.0",
"size": 10576
} | [
"java.net.InetSocketAddress",
"java.util.List"
] | import java.net.InetSocketAddress; import java.util.List; | import java.net.*; import java.util.*; | [
"java.net",
"java.util"
] | java.net; java.util; | 2,903,774 |
public Map<String, Object> makeDefaultConnectionParams(String server, int port, String application) {
Map<String, Object> params = new ObjectMap<String, Object>();
params.put("app", application);
params.put("objectEncoding", Integer.valueOf(0));
params.put("fpad", Boolean.FALSE);
params.put("flashVer", "WIN 11,2,202,235");
params.put("audioCodecs", Integer.valueOf(3575));
params.put("videoFunction", Integer.valueOf(1));
params.put("pageUrl", null);
params.put("path", application);
params.put("capabilities", Integer.valueOf(15));
params.put("swfUrl", null);
params.put("videoCodecs", Integer.valueOf(252));
return params;
}
| Map<String, Object> function(String server, int port, String application) { Map<String, Object> params = new ObjectMap<String, Object>(); params.put("app", application); params.put(STR, Integer.valueOf(0)); params.put("fpad", Boolean.FALSE); params.put(STR, STR); params.put(STR, Integer.valueOf(3575)); params.put(STR, Integer.valueOf(1)); params.put(STR, null); params.put("path", application); params.put(STR, Integer.valueOf(15)); params.put(STR, null); params.put(STR, Integer.valueOf(252)); return params; } | /**
* Creates the default connection parameters collection. Many implementations of this handler will create a tcUrl if not found,
* it is created with the current server url.
*
* @param server the server location
* @param port the port for the protocol
* @param application the application name at the given server
* @return connection parameters map
*/ | Creates the default connection parameters collection. Many implementations of this handler will create a tcUrl if not found, it is created with the current server url | makeDefaultConnectionParams | {
"repo_name": "zofuthan/red5-client",
"path": "src/main/java/org/red5/client/net/rtmp/BaseRTMPClientHandler.java",
"license": "apache-2.0",
"size": 35769
} | [
"java.util.Map",
"org.red5.io.utils.ObjectMap"
] | import java.util.Map; import org.red5.io.utils.ObjectMap; | import java.util.*; import org.red5.io.utils.*; | [
"java.util",
"org.red5.io"
] | java.util; org.red5.io; | 2,486,285 |
@CheckForNull
public ResultAction<?> getLastAction() {
AbstractBuild<?, ?> lastBuild = getLastFinishedBuild();
if (lastBuild == null) {
return null;
}
else {
return getResultAction(lastBuild);
}
}
| ResultAction<?> function() { AbstractBuild<?, ?> lastBuild = getLastFinishedBuild(); if (lastBuild == null) { return null; } else { return getResultAction(lastBuild); } } | /**
* Returns the last valid result action.
*
* @return the last valid result action, or <code>null</code> if no such
* action is found
*/ | Returns the last valid result action | getLastAction | {
"repo_name": "recena/analysis-core-plugin",
"path": "src/main/java/hudson/plugins/analysis/core/AbstractProjectAction.java",
"license": "mit",
"size": 15161
} | [
"hudson.model.AbstractBuild"
] | import hudson.model.AbstractBuild; | import hudson.model.*; | [
"hudson.model"
] | hudson.model; | 1,630,454 |
void onSnackbarShown(Snackbar snackbar); | void onSnackbarShown(Snackbar snackbar); | /**
* Indicates that the Snackbar was shown (made visible).
*
* @param snackbar The Snackbar.
*/ | Indicates that the Snackbar was shown (made visible) | onSnackbarShown | {
"repo_name": "andrewlord1990/SnackbarBuilder",
"path": "snackbarbuilder/src/main/java/com/github/andrewlord1990/snackbarbuilder/callback/SnackbarShowCallback.java",
"license": "apache-2.0",
"size": 1002
} | [
"android.support.design.widget.Snackbar"
] | import android.support.design.widget.Snackbar; | import android.support.design.widget.*; | [
"android.support"
] | android.support; | 1,316,431 |
public void registerInput(Model globalInput) {
this.inputGraph=globalInput;
}
| void function(Model globalInput) { this.inputGraph=globalInput; } | /**
* sets the input to the import process
* @param globalInput
*/ | sets the input to the import process | registerInput | {
"repo_name": "metarelate/terminology-server",
"path": "moduleTmCore/src/main/java/net/metarelate/terminology/modelBuilders/TerminologyModelBuilder.java",
"license": "gpl-3.0",
"size": 21639
} | [
"com.hp.hpl.jena.rdf.model.Model"
] | import com.hp.hpl.jena.rdf.model.Model; | import com.hp.hpl.jena.rdf.model.*; | [
"com.hp.hpl"
] | com.hp.hpl; | 1,153,170 |
@Override
public Iterator<Group> getGroups() {
return groups.iterator();
} | Iterator<Group> function() { return groups.iterator(); } | /**
* Return the set of {@link Group}s to which this user belongs.
*/ | Return the set of <code>Group</code>s to which this user belongs | getGroups | {
"repo_name": "apache/tomcat",
"path": "java/org/apache/catalina/users/GenericUser.java",
"license": "apache-2.0",
"size": 6766
} | [
"java.util.Iterator",
"org.apache.catalina.Group"
] | import java.util.Iterator; import org.apache.catalina.Group; | import java.util.*; import org.apache.catalina.*; | [
"java.util",
"org.apache.catalina"
] | java.util; org.apache.catalina; | 939,139 |
default void checkCanExecuteProcedure(SystemSecurityContext systemSecurityContext, CatalogSchemaRoutineName procedure)
{
denyExecuteProcedure(procedure.toString());
} | default void checkCanExecuteProcedure(SystemSecurityContext systemSecurityContext, CatalogSchemaRoutineName procedure) { denyExecuteProcedure(procedure.toString()); } | /**
* Check if identity is allowed to execute the specified procedure
*
* @throws AccessDeniedException if not allowed
*/ | Check if identity is allowed to execute the specified procedure | checkCanExecuteProcedure | {
"repo_name": "hgschmie/presto",
"path": "presto-spi/src/main/java/io/prestosql/spi/security/SystemAccessControl.java",
"license": "apache-2.0",
"size": 21168
} | [
"io.prestosql.spi.connector.CatalogSchemaRoutineName",
"io.prestosql.spi.security.AccessDeniedException"
] | import io.prestosql.spi.connector.CatalogSchemaRoutineName; import io.prestosql.spi.security.AccessDeniedException; | import io.prestosql.spi.connector.*; import io.prestosql.spi.security.*; | [
"io.prestosql.spi"
] | io.prestosql.spi; | 2,564,724 |
public void pushScreen(String screenName, CallbackContext callbackContext) {
DataLayer dataLayer = getDataLayer();
dataLayer.pushEvent("ScreenView", DataLayer.mapOf("ScreenName", screenName));
System.out.println("[TAG_MANAGER] pushScreen: " + screenName);
callbackContext.success("pushScreen: " + screenName);
} | void function(String screenName, CallbackContext callbackContext) { DataLayer dataLayer = getDataLayer(); dataLayer.pushEvent(STR, DataLayer.mapOf(STR, screenName)); System.out.println(STR + screenName); callbackContext.success(STR + screenName); } | /**
* Push an "ScreenView" event with the given screen name.
*/ | Push an "ScreenView" event with the given screen name | pushScreen | {
"repo_name": "ramirogm/google-analytics-plugin",
"path": "android/UniversalAnalyticsPlugin.java",
"license": "mit",
"size": 22283
} | [
"com.google.android.gms.tagmanager.DataLayer",
"org.apache.cordova.CallbackContext"
] | import com.google.android.gms.tagmanager.DataLayer; import org.apache.cordova.CallbackContext; | import com.google.android.gms.tagmanager.*; import org.apache.cordova.*; | [
"com.google.android",
"org.apache.cordova"
] | com.google.android; org.apache.cordova; | 2,882,855 |
public void init(IHandler handler)
{
// Remember Codebase
m_codeBase = handler.getCodeBase();
super.init(handler);
} | void function(IHandler handler) { m_codeBase = handler.getCodeBase(); super.init(handler); } | /**
* Initialize the codebase, used for image-loading
*
* @param handler
*/ | Initialize the codebase, used for image-loading | init | {
"repo_name": "Doag/Forms",
"path": "demos/demo0014/AccordionButton.java",
"license": "mit",
"size": 15047
} | [
"oracle.forms.handler.IHandler"
] | import oracle.forms.handler.IHandler; | import oracle.forms.handler.*; | [
"oracle.forms.handler"
] | oracle.forms.handler; | 773,221 |
private boolean casContainsTypes(CAS aCAS, Type[] aTypes) {
for (int i = 0; i < aTypes.length; i++) {
Collection c = aCAS.getIndexRepository().getIndexedFSs(aTypes[i]);
if (c.isEmpty())
return false;
}
return true;
}
}
static private class ComponentInfo {
String key;
Type[][] inputTypesByCapability;
} | boolean function(CAS aCAS, Type[] aTypes) { for (int i = 0; i < aTypes.length; i++) { Collection c = aCAS.getIndexRepository().getIndexedFSs(aTypes[i]); if (c.isEmpty()) return false; } return true; } } static private class ComponentInfo { String key; Type[][] inputTypesByCapability; } | /**
* Checks if the CAS contains at least one instance of each of the specified types.
*
* @param aCAS
* the CAS to check
* @param aTypes
* array of types to look for
*
* @return true iff <code>aCAS</code> contains at least one instance of each type in
* <code>aTypes</code>
*/ | Checks if the CAS contains at least one instance of each of the specified types | casContainsTypes | {
"repo_name": "apache/uima-uimaj",
"path": "uimaj-examples/src/main/java/org/apache/uima/examples/flow/WhiteboardFlowController2.java",
"license": "apache-2.0",
"size": 8066
} | [
"java.util.Collection",
"org.apache.uima.cas.Type"
] | import java.util.Collection; import org.apache.uima.cas.Type; | import java.util.*; import org.apache.uima.cas.*; | [
"java.util",
"org.apache.uima"
] | java.util; org.apache.uima; | 1,033,718 |
static FSNamesystem loadFromDisk(Configuration conf) throws IOException {
checkConfiguration(conf);
FSImage fsImage = new FSImage(conf,
FSNamesystem.getNamespaceDirs(conf),
FSNamesystem.getNamespaceEditsDirs(conf));
FSNamesystem namesystem = new FSNamesystem(conf, fsImage, false);
StartupOption startOpt = NameNode.getStartupOption(conf);
if (startOpt == StartupOption.RECOVER) {
namesystem.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
}
long loadStart = monotonicNow();
try {
namesystem.loadFSImage(startOpt);
} catch (IOException ioe) {
LOG.warn("Encountered exception loading fsimage", ioe);
fsImage.close();
throw ioe;
}
long timeTakenToLoadFSImage = monotonicNow() - loadStart;
LOG.info("Finished loading FSImage in " + timeTakenToLoadFSImage + " msecs");
NameNodeMetrics nnMetrics = NameNode.getNameNodeMetrics();
if (nnMetrics != null) {
nnMetrics.setFsImageLoadTime((int) timeTakenToLoadFSImage);
}
return namesystem;
}
FSNamesystem(Configuration conf, FSImage fsImage) throws IOException {
this(conf, fsImage, false);
}
FSNamesystem(Configuration conf, FSImage fsImage, boolean ignoreRetryCache)
throws IOException {
provider = DFSUtil.createKeyProviderCryptoExtension(conf);
if (provider == null) {
LOG.info("No KeyProvider found.");
} else {
LOG.info("Found KeyProvider: " + provider.toString());
}
if (conf.getBoolean(DFS_NAMENODE_AUDIT_LOG_ASYNC_KEY,
DFS_NAMENODE_AUDIT_LOG_ASYNC_DEFAULT)) {
LOG.info("Enabling async auditlog");
enableAsyncAuditLog();
}
boolean fair = conf.getBoolean("dfs.namenode.fslock.fair", true);
LOG.info("fsLock is fair:" + fair);
fsLock = new FSNamesystemLock(fair);
cond = fsLock.writeLock().newCondition();
cpLock = new ReentrantLock();
this.fsImage = fsImage;
try {
resourceRecheckInterval = conf.getLong(
DFS_NAMENODE_RESOURCE_CHECK_INTERVAL_KEY,
DFS_NAMENODE_RESOURCE_CHECK_INTERVAL_DEFAULT);
this.blockManager = new BlockManager(this, conf);
this.datanodeStatistics = blockManager.getDatanodeManager().getDatanodeStatistics();
this.blockIdManager = new BlockIdManager(blockManager);
this.fsOwner = UserGroupInformation.getCurrentUser();
this.supergroup = conf.get(DFS_PERMISSIONS_SUPERUSERGROUP_KEY,
DFS_PERMISSIONS_SUPERUSERGROUP_DEFAULT);
this.isPermissionEnabled = conf.getBoolean(DFS_PERMISSIONS_ENABLED_KEY,
DFS_PERMISSIONS_ENABLED_DEFAULT);
LOG.info("fsOwner = " + fsOwner);
LOG.info("supergroup = " + supergroup);
LOG.info("isPermissionEnabled = " + isPermissionEnabled);
// block allocation has to be persisted in HA using a shared edits directory
// so that the standby has up-to-date namespace information
nameserviceId = DFSUtil.getNamenodeNameServiceId(conf);
this.haEnabled = HAUtil.isHAEnabled(conf, nameserviceId);
// Sanity check the HA-related config.
if (nameserviceId != null) {
LOG.info("Determined nameservice ID: " + nameserviceId);
}
LOG.info("HA Enabled: " + haEnabled);
if (!haEnabled && HAUtil.usesSharedEditsDir(conf)) {
LOG.warn("Configured NNs:\n" + DFSUtil.nnAddressesAsString(conf));
throw new IOException("Invalid configuration: a shared edits dir " +
"must not be specified if HA is not enabled.");
}
// Get the checksum type from config
String checksumTypeStr = conf.get(DFS_CHECKSUM_TYPE_KEY, DFS_CHECKSUM_TYPE_DEFAULT);
DataChecksum.Type checksumType;
try {
checksumType = DataChecksum.Type.valueOf(checksumTypeStr);
} catch (IllegalArgumentException iae) {
throw new IOException("Invalid checksum type in "
+ DFS_CHECKSUM_TYPE_KEY + ": " + checksumTypeStr);
}
this.serverDefaults = new FsServerDefaults(
conf.getLongBytes(DFS_BLOCK_SIZE_KEY, DFS_BLOCK_SIZE_DEFAULT),
conf.getInt(DFS_BYTES_PER_CHECKSUM_KEY, DFS_BYTES_PER_CHECKSUM_DEFAULT),
conf.getInt(DFS_CLIENT_WRITE_PACKET_SIZE_KEY, DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT),
(short) conf.getInt(DFS_REPLICATION_KEY, DFS_REPLICATION_DEFAULT),
conf.getInt(IO_FILE_BUFFER_SIZE_KEY, IO_FILE_BUFFER_SIZE_DEFAULT),
conf.getBoolean(DFS_ENCRYPT_DATA_TRANSFER_KEY, DFS_ENCRYPT_DATA_TRANSFER_DEFAULT),
conf.getLong(FS_TRASH_INTERVAL_KEY, FS_TRASH_INTERVAL_DEFAULT),
checksumType);
this.maxFsObjects = conf.getLong(DFS_NAMENODE_MAX_OBJECTS_KEY,
DFS_NAMENODE_MAX_OBJECTS_DEFAULT);
this.minBlockSize = conf.getLong(DFSConfigKeys.DFS_NAMENODE_MIN_BLOCK_SIZE_KEY,
DFSConfigKeys.DFS_NAMENODE_MIN_BLOCK_SIZE_DEFAULT);
this.maxBlocksPerFile = conf.getLong(DFSConfigKeys.DFS_NAMENODE_MAX_BLOCKS_PER_FILE_KEY,
DFSConfigKeys.DFS_NAMENODE_MAX_BLOCKS_PER_FILE_DEFAULT);
this.accessTimePrecision = conf.getLong(DFS_NAMENODE_ACCESSTIME_PRECISION_KEY,
DFS_NAMENODE_ACCESSTIME_PRECISION_DEFAULT);
this.supportAppends = conf.getBoolean(DFS_SUPPORT_APPEND_KEY, DFS_SUPPORT_APPEND_DEFAULT);
LOG.info("Append Enabled: " + supportAppends);
this.dtpReplaceDatanodeOnFailure = ReplaceDatanodeOnFailure.get(conf);
this.standbyShouldCheckpoint = conf.getBoolean(
DFS_HA_STANDBY_CHECKPOINTS_KEY, DFS_HA_STANDBY_CHECKPOINTS_DEFAULT);
// # edit autoroll threshold is a multiple of the checkpoint threshold
this.editLogRollerThreshold = (long)
(conf.getFloat(
DFS_NAMENODE_EDIT_LOG_AUTOROLL_MULTIPLIER_THRESHOLD,
DFS_NAMENODE_EDIT_LOG_AUTOROLL_MULTIPLIER_THRESHOLD_DEFAULT) *
conf.getLong(
DFS_NAMENODE_CHECKPOINT_TXNS_KEY,
DFS_NAMENODE_CHECKPOINT_TXNS_DEFAULT));
this.editLogRollerInterval = conf.getInt(
DFS_NAMENODE_EDIT_LOG_AUTOROLL_CHECK_INTERVAL_MS,
DFS_NAMENODE_EDIT_LOG_AUTOROLL_CHECK_INTERVAL_MS_DEFAULT);
this.lazyPersistFileScrubIntervalSec = conf.getInt(
DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC,
DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC_DEFAULT);
if (this.lazyPersistFileScrubIntervalSec == 0) {
throw new IllegalArgumentException(
DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC + " must be non-zero.");
}
// For testing purposes, allow the DT secret manager to be started regardless
// of whether security is enabled.
alwaysUseDelegationTokensForTests = conf.getBoolean(
DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY,
DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_DEFAULT);
this.dtSecretManager = createDelegationTokenSecretManager(conf);
this.dir = new FSDirectory(this, conf);
this.snapshotManager = new SnapshotManager(dir);
this.cacheManager = new CacheManager(this, conf, blockManager);
this.safeMode = new SafeModeInfo(conf);
this.topConf = new TopConf(conf);
this.auditLoggers = initAuditLoggers(conf);
this.isDefaultAuditLogger = auditLoggers.size() == 1 &&
auditLoggers.get(0) instanceof DefaultAuditLogger;
this.retryCache = ignoreRetryCache ? null : initRetryCache(conf);
Class<? extends INodeAttributeProvider> klass = conf.getClass(
DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_KEY,
null, INodeAttributeProvider.class);
if (klass != null) {
inodeAttributeProvider = ReflectionUtils.newInstance(klass, conf);
LOG.info("Using INode attribute provider: " + klass.getName());
}
} catch(IOException e) {
LOG.error(getClass().getSimpleName() + " initialization failed.", e);
close();
throw e;
} catch (RuntimeException re) {
LOG.error(getClass().getSimpleName() + " initialization failed.", re);
close();
throw re;
}
} | static FSNamesystem loadFromDisk(Configuration conf) throws IOException { checkConfiguration(conf); FSImage fsImage = new FSImage(conf, FSNamesystem.getNamespaceDirs(conf), FSNamesystem.getNamespaceEditsDirs(conf)); FSNamesystem namesystem = new FSNamesystem(conf, fsImage, false); StartupOption startOpt = NameNode.getStartupOption(conf); if (startOpt == StartupOption.RECOVER) { namesystem.setSafeMode(SafeModeAction.SAFEMODE_ENTER); } long loadStart = monotonicNow(); try { namesystem.loadFSImage(startOpt); } catch (IOException ioe) { LOG.warn(STR, ioe); fsImage.close(); throw ioe; } long timeTakenToLoadFSImage = monotonicNow() - loadStart; LOG.info(STR + timeTakenToLoadFSImage + STR); NameNodeMetrics nnMetrics = NameNode.getNameNodeMetrics(); if (nnMetrics != null) { nnMetrics.setFsImageLoadTime((int) timeTakenToLoadFSImage); } return namesystem; } FSNamesystem(Configuration conf, FSImage fsImage) throws IOException { this(conf, fsImage, false); } FSNamesystem(Configuration conf, FSImage fsImage, boolean ignoreRetryCache) throws IOException { provider = DFSUtil.createKeyProviderCryptoExtension(conf); if (provider == null) { LOG.info(STR); } else { LOG.info(STR + provider.toString()); } if (conf.getBoolean(DFS_NAMENODE_AUDIT_LOG_ASYNC_KEY, DFS_NAMENODE_AUDIT_LOG_ASYNC_DEFAULT)) { LOG.info(STR); enableAsyncAuditLog(); } boolean fair = conf.getBoolean(STR, true); LOG.info(STR + fair); fsLock = new FSNamesystemLock(fair); cond = fsLock.writeLock().newCondition(); cpLock = new ReentrantLock(); this.fsImage = fsImage; try { resourceRecheckInterval = conf.getLong( DFS_NAMENODE_RESOURCE_CHECK_INTERVAL_KEY, DFS_NAMENODE_RESOURCE_CHECK_INTERVAL_DEFAULT); this.blockManager = new BlockManager(this, conf); this.datanodeStatistics = blockManager.getDatanodeManager().getDatanodeStatistics(); this.blockIdManager = new BlockIdManager(blockManager); this.fsOwner = UserGroupInformation.getCurrentUser(); this.supergroup = conf.get(DFS_PERMISSIONS_SUPERUSERGROUP_KEY, DFS_PERMISSIONS_SUPERUSERGROUP_DEFAULT); this.isPermissionEnabled = conf.getBoolean(DFS_PERMISSIONS_ENABLED_KEY, DFS_PERMISSIONS_ENABLED_DEFAULT); LOG.info(STR + fsOwner); LOG.info(STR + supergroup); LOG.info(STR + isPermissionEnabled); nameserviceId = DFSUtil.getNamenodeNameServiceId(conf); this.haEnabled = HAUtil.isHAEnabled(conf, nameserviceId); if (nameserviceId != null) { LOG.info(STR + nameserviceId); } LOG.info(STR + haEnabled); if (!haEnabled && HAUtil.usesSharedEditsDir(conf)) { LOG.warn(STR + DFSUtil.nnAddressesAsString(conf)); throw new IOException(STR + STR); } String checksumTypeStr = conf.get(DFS_CHECKSUM_TYPE_KEY, DFS_CHECKSUM_TYPE_DEFAULT); DataChecksum.Type checksumType; try { checksumType = DataChecksum.Type.valueOf(checksumTypeStr); } catch (IllegalArgumentException iae) { throw new IOException(STR + DFS_CHECKSUM_TYPE_KEY + STR + checksumTypeStr); } this.serverDefaults = new FsServerDefaults( conf.getLongBytes(DFS_BLOCK_SIZE_KEY, DFS_BLOCK_SIZE_DEFAULT), conf.getInt(DFS_BYTES_PER_CHECKSUM_KEY, DFS_BYTES_PER_CHECKSUM_DEFAULT), conf.getInt(DFS_CLIENT_WRITE_PACKET_SIZE_KEY, DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT), (short) conf.getInt(DFS_REPLICATION_KEY, DFS_REPLICATION_DEFAULT), conf.getInt(IO_FILE_BUFFER_SIZE_KEY, IO_FILE_BUFFER_SIZE_DEFAULT), conf.getBoolean(DFS_ENCRYPT_DATA_TRANSFER_KEY, DFS_ENCRYPT_DATA_TRANSFER_DEFAULT), conf.getLong(FS_TRASH_INTERVAL_KEY, FS_TRASH_INTERVAL_DEFAULT), checksumType); this.maxFsObjects = conf.getLong(DFS_NAMENODE_MAX_OBJECTS_KEY, DFS_NAMENODE_MAX_OBJECTS_DEFAULT); this.minBlockSize = conf.getLong(DFSConfigKeys.DFS_NAMENODE_MIN_BLOCK_SIZE_KEY, DFSConfigKeys.DFS_NAMENODE_MIN_BLOCK_SIZE_DEFAULT); this.maxBlocksPerFile = conf.getLong(DFSConfigKeys.DFS_NAMENODE_MAX_BLOCKS_PER_FILE_KEY, DFSConfigKeys.DFS_NAMENODE_MAX_BLOCKS_PER_FILE_DEFAULT); this.accessTimePrecision = conf.getLong(DFS_NAMENODE_ACCESSTIME_PRECISION_KEY, DFS_NAMENODE_ACCESSTIME_PRECISION_DEFAULT); this.supportAppends = conf.getBoolean(DFS_SUPPORT_APPEND_KEY, DFS_SUPPORT_APPEND_DEFAULT); LOG.info(STR + supportAppends); this.dtpReplaceDatanodeOnFailure = ReplaceDatanodeOnFailure.get(conf); this.standbyShouldCheckpoint = conf.getBoolean( DFS_HA_STANDBY_CHECKPOINTS_KEY, DFS_HA_STANDBY_CHECKPOINTS_DEFAULT); this.editLogRollerThreshold = (long) (conf.getFloat( DFS_NAMENODE_EDIT_LOG_AUTOROLL_MULTIPLIER_THRESHOLD, DFS_NAMENODE_EDIT_LOG_AUTOROLL_MULTIPLIER_THRESHOLD_DEFAULT) * conf.getLong( DFS_NAMENODE_CHECKPOINT_TXNS_KEY, DFS_NAMENODE_CHECKPOINT_TXNS_DEFAULT)); this.editLogRollerInterval = conf.getInt( DFS_NAMENODE_EDIT_LOG_AUTOROLL_CHECK_INTERVAL_MS, DFS_NAMENODE_EDIT_LOG_AUTOROLL_CHECK_INTERVAL_MS_DEFAULT); this.lazyPersistFileScrubIntervalSec = conf.getInt( DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC, DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC_DEFAULT); if (this.lazyPersistFileScrubIntervalSec == 0) { throw new IllegalArgumentException( DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC + STR); } alwaysUseDelegationTokensForTests = conf.getBoolean( DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_DEFAULT); this.dtSecretManager = createDelegationTokenSecretManager(conf); this.dir = new FSDirectory(this, conf); this.snapshotManager = new SnapshotManager(dir); this.cacheManager = new CacheManager(this, conf, blockManager); this.safeMode = new SafeModeInfo(conf); this.topConf = new TopConf(conf); this.auditLoggers = initAuditLoggers(conf); this.isDefaultAuditLogger = auditLoggers.size() == 1 && auditLoggers.get(0) instanceof DefaultAuditLogger; this.retryCache = ignoreRetryCache ? null : initRetryCache(conf); Class<? extends INodeAttributeProvider> klass = conf.getClass( DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_KEY, null, INodeAttributeProvider.class); if (klass != null) { inodeAttributeProvider = ReflectionUtils.newInstance(klass, conf); LOG.info(STR + klass.getName()); } } catch(IOException e) { LOG.error(getClass().getSimpleName() + STR, e); close(); throw e; } catch (RuntimeException re) { LOG.error(getClass().getSimpleName() + STR, re); close(); throw re; } } | /**
* Instantiates an FSNamesystem loaded from the image and edits
* directories specified in the passed Configuration.
*
* @param conf the Configuration which specifies the storage directories
* from which to load
* @return an FSNamesystem which contains the loaded namespace
* @throws IOException if loading fails
*/ | Instantiates an FSNamesystem loaded from the image and edits directories specified in the passed Configuration | loadFromDisk | {
"repo_name": "myeoje/PhillyYarn",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java",
"license": "apache-2.0",
"size": 298468
} | [
"java.io.IOException",
"java.util.concurrent.locks.ReentrantLock",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.FsServerDefaults",
"org.apache.hadoop.hdfs.DFSConfigKeys",
"org.apache.hadoop.hdfs.DFSUtil",
"org.apache.hadoop.hdfs.HAUtil",
"org.apache.hadoop.hdfs.protocol.HdfsConstants",
"org.apache.hadoop.hdfs.protocol.datatransfer.ReplaceDatanodeOnFailure",
"org.apache.hadoop.hdfs.server.blockmanagement.BlockIdManager",
"org.apache.hadoop.hdfs.server.blockmanagement.BlockManager",
"org.apache.hadoop.hdfs.server.common.HdfsServerConstants",
"org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics",
"org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotManager",
"org.apache.hadoop.hdfs.server.namenode.top.TopConf",
"org.apache.hadoop.security.UserGroupInformation",
"org.apache.hadoop.util.DataChecksum",
"org.apache.hadoop.util.ReflectionUtils",
"org.apache.hadoop.util.Time"
] | import java.io.IOException; import java.util.concurrent.locks.ReentrantLock; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.HAUtil; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.datatransfer.ReplaceDatanodeOnFailure; import org.apache.hadoop.hdfs.server.blockmanagement.BlockIdManager; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics; import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotManager; import org.apache.hadoop.hdfs.server.namenode.top.TopConf; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Time; | import java.io.*; import java.util.concurrent.locks.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.protocol.datatransfer.*; import org.apache.hadoop.hdfs.server.blockmanagement.*; import org.apache.hadoop.hdfs.server.common.*; import org.apache.hadoop.hdfs.server.namenode.metrics.*; import org.apache.hadoop.hdfs.server.namenode.snapshot.*; import org.apache.hadoop.hdfs.server.namenode.top.*; import org.apache.hadoop.security.*; import org.apache.hadoop.util.*; | [
"java.io",
"java.util",
"org.apache.hadoop"
] | java.io; java.util; org.apache.hadoop; | 1,602,804 |
@Override
public void setAway() {
if (me.isAway()) {
final int choice = uiTools.showOptionDialog(swingMessages.getMessage("swing.away.comeBackPopup.message", me.getAwayMsg()),
swingMessages.getMessage("swing.away.comeBackPopup.title"),
swingMessages.getMessage("swing.button.yes"),
swingMessages.getMessage("swing.button.cancel"));
if (choice == JOptionPane.YES_OPTION) {
try {
controller.comeBack();
}
catch (final CommandException e) {
uiTools.showWarningMessage(e.getMessage(), swingMessages.getMessage("swing.away.warningPopup.generalError.title"));
}
}
}
else {
final String reason = uiTools.showInputDialog(swingMessages.getMessage("swing.away.goAwayPopup.message"),
swingMessages.getMessage("swing.away.goAwayPopup.title"),
null);
if (reason != null && reason.trim().length() > 0) {
if (controller.isWrote()) {
controller.changeWriting(me.getCode(), false);
mainP.getMsgTF().setText("");
}
try {
controller.goAway(reason);
}
catch (final CommandException e) {
uiTools.showWarningMessage(e.getMessage(), swingMessages.getMessage("swing.away.warningPopup.generalError.title"));
}
}
}
mainP.getMsgTF().requestFocusInWindow();
} | void function() { if (me.isAway()) { final int choice = uiTools.showOptionDialog(swingMessages.getMessage(STR, me.getAwayMsg()), swingMessages.getMessage(STR), swingMessages.getMessage(STR), swingMessages.getMessage(STR)); if (choice == JOptionPane.YES_OPTION) { try { controller.comeBack(); } catch (final CommandException e) { uiTools.showWarningMessage(e.getMessage(), swingMessages.getMessage(STR)); } } } else { final String reason = uiTools.showInputDialog(swingMessages.getMessage(STR), swingMessages.getMessage(STR), null); if (reason != null && reason.trim().length() > 0) { if (controller.isWrote()) { controller.changeWriting(me.getCode(), false); mainP.getMsgTF().setText(""); } try { controller.goAway(reason); } catch (final CommandException e) { uiTools.showWarningMessage(e.getMessage(), swingMessages.getMessage(STR)); } } } mainP.getMsgTF().requestFocusInWindow(); } | /**
* If the user is not away, asks for an away reason,
* and sets the user as away.
*
* If user is away, asks if the user wants to come back.
*/ | If the user is not away, asks for an away reason, and sets the user as away. If user is away, asks if the user wants to come back | setAway | {
"repo_name": "yuchaosydney/kouchat",
"path": "src/main/java/net/usikkert/kouchat/ui/swing/SwingMediator.java",
"license": "gpl-3.0",
"size": 31412
} | [
"javax.swing.JOptionPane",
"net.usikkert.kouchat.misc.CommandException"
] | import javax.swing.JOptionPane; import net.usikkert.kouchat.misc.CommandException; | import javax.swing.*; import net.usikkert.kouchat.misc.*; | [
"javax.swing",
"net.usikkert.kouchat"
] | javax.swing; net.usikkert.kouchat; | 1,677,404 |
Subsets and Splits