method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
int InferenceC(vectordouble v, ArrayList<Double> milista1, ArrayList<Integer> milista2){ Double aux01 = milista1.get(0); double grado = aux01.doubleValue(); Integer aux02 = milista2.get(0); int regla_disparada = aux02.intValue(); double max=0,aux; int re=-1; int n = 0; int[] nn = new int[0]; int r = 0; int[] nr = new int[0]; int b = 0; int[] nb = new int[0]; char[][] mb = new char[0][0]; int[][]nnn = new int[0][0]; double[][] mr = new double[0][0]; String regla; int var1, var2; for (int i=0; i<n_rule; i++){ ArrayList<Integer> lista4 = new ArrayList<Integer>(1); Integer aux4 = new Integer(b); lista4.add(aux4); ArrayList<int[]> lista5 = new ArrayList<int[]>(1); lista5.add(nb); ArrayList<char[][]> lista6 = new ArrayList<char[][]>(1); lista6.add(mb); rule[i].GetBinary(lista4,lista5,lista6); aux4 = lista4.get(0); b = aux4.intValue(); nb = lista5.get(0); mb = lista6.get(0); char[] s= new char[nb[0]+1]; for (int j=0; j<nb[0]; j++) s[j]=mb[0][j]; s[nb[0]]='\0'; regla = String.copyValueOf(s); ArrayList<Integer> lista7 = new ArrayList<Integer>(1); Integer aux1 = new Integer(n); lista7.add(aux1); ArrayList<int[]> lista8 = new ArrayList<int[]>(1); lista8.add(nn); ArrayList<int[][]> lista9 = new ArrayList<int[][]>(1); lista9.add(nnn); rule[i].GetInteger(lista7,lista8,lista9); aux1 = lista7.get(0); n = aux1.intValue(); nn = lista8.get(0); nnn = lista9.get(0); ArrayList<Integer> lista1 = new ArrayList<Integer>(1); Integer aux1b = new Integer(r); lista1.add(aux1b); ArrayList<int[]> lista2 = new ArrayList<int[]>(1); lista2.add(nr); ArrayList<double[][]> lista3 = new ArrayList<double[][]>(1); lista3.add(mr); rule[i].GetReal(lista1,lista2,lista3); aux1b = lista1.get(0); r = aux1b.intValue(); nr = lista2.get(0); mr = lista3.get(0); aux=domain.Adaptation(v,regla,mr[0],mr[0][nr[0]-1]); //Apply the relations //aux=domain[0].Adaptation(v,regla); aux = aux*peso[i]; if (aux>max){ max=aux; re=i; grado = aux; } else if (r!=0 && aux>0 && aux==max && peso[i]>peso[re]) { max=aux; re=i; grado = aux; } } regla_disparada=re; aux01 = Double.valueOf(grado); milista1.add(0, aux01); aux02 = Integer.valueOf(regla_disparada); milista2.add(0, aux02); if (re!=-1) { ArrayList<Integer> lista7 = new ArrayList<Integer>(1); Integer aux1 = new Integer(n); lista7.add(aux1); ArrayList<int[]> lista8 = new ArrayList<int[]>(1); lista8.add(nn); ArrayList<int[][]> lista9 = new ArrayList<int[][]>(1); lista9.add(nnn); rule[re].GetInteger(lista7,lista8,lista9); aux1 = lista7.get(0); n = aux1.intValue(); nn = lista8.get(0); nnn = lista9.get(0); return nnn[0][0]; } else return -1; }
int InferenceC(vectordouble v, ArrayList<Double> milista1, ArrayList<Integer> milista2){ Double aux01 = milista1.get(0); double grado = aux01.doubleValue(); Integer aux02 = milista2.get(0); int regla_disparada = aux02.intValue(); double max=0,aux; int re=-1; int n = 0; int[] nn = new int[0]; int r = 0; int[] nr = new int[0]; int b = 0; int[] nb = new int[0]; char[][] mb = new char[0][0]; int[][]nnn = new int[0][0]; double[][] mr = new double[0][0]; String regla; int var1, var2; for (int i=0; i<n_rule; i++){ ArrayList<Integer> lista4 = new ArrayList<Integer>(1); Integer aux4 = new Integer(b); lista4.add(aux4); ArrayList<int[]> lista5 = new ArrayList<int[]>(1); lista5.add(nb); ArrayList<char[][]> lista6 = new ArrayList<char[][]>(1); lista6.add(mb); rule[i].GetBinary(lista4,lista5,lista6); aux4 = lista4.get(0); b = aux4.intValue(); nb = lista5.get(0); mb = lista6.get(0); char[] s= new char[nb[0]+1]; for (int j=0; j<nb[0]; j++) s[j]=mb[0][j]; s[nb[0]]='\0'; regla = String.copyValueOf(s); ArrayList<Integer> lista7 = new ArrayList<Integer>(1); Integer aux1 = new Integer(n); lista7.add(aux1); ArrayList<int[]> lista8 = new ArrayList<int[]>(1); lista8.add(nn); ArrayList<int[][]> lista9 = new ArrayList<int[][]>(1); lista9.add(nnn); rule[i].GetInteger(lista7,lista8,lista9); aux1 = lista7.get(0); n = aux1.intValue(); nn = lista8.get(0); nnn = lista9.get(0); ArrayList<Integer> lista1 = new ArrayList<Integer>(1); Integer aux1b = new Integer(r); lista1.add(aux1b); ArrayList<int[]> lista2 = new ArrayList<int[]>(1); lista2.add(nr); ArrayList<double[][]> lista3 = new ArrayList<double[][]>(1); lista3.add(mr); rule[i].GetReal(lista1,lista2,lista3); aux1b = lista1.get(0); r = aux1b.intValue(); nr = lista2.get(0); mr = lista3.get(0); aux=domain.Adaptation(v,regla,mr[0],mr[0][nr[0]-1]); aux = aux*peso[i]; if (aux>max){ max=aux; re=i; grado = aux; } else if (r!=0 && aux>0 && aux==max && peso[i]>peso[re]) { max=aux; re=i; grado = aux; } } regla_disparada=re; aux01 = Double.valueOf(grado); milista1.add(0, aux01); aux02 = Integer.valueOf(regla_disparada); milista2.add(0, aux02); if (re!=-1) { ArrayList<Integer> lista7 = new ArrayList<Integer>(1); Integer aux1 = new Integer(n); lista7.add(aux1); ArrayList<int[]> lista8 = new ArrayList<int[]>(1); lista8.add(nn); ArrayList<int[][]> lista9 = new ArrayList<int[][]>(1); lista9.add(nnn); rule[re].GetInteger(lista7,lista8,lista9); aux1 = lista7.get(0); n = aux1.intValue(); nn = lista8.get(0); nnn = lista9.get(0); return nnn[0][0]; } else return -1; }
/** * <p> * Retuns the position of the rule with the best adaptation degree of the example (v) in the ruleset and * also this adaptation degree * </p> * @param v vectordouble An example * @param milista1 ArrayList<Double> The adaptation degree * @param milista2 ArrayList<Integer> The rule that have been fired * @return int The position of the rule with the best adaptation degree */
Retuns the position of the rule with the best adaptation degree of the example (v) in the ruleset and also this adaptation degree
InferenceC
{ "repo_name": "SCI2SUGR/KEEL", "path": "src/keel/Algorithms/Fuzzy_Rule_Learning/Genetic/ClassifierSLAVE/ruleset.java", "license": "gpl-3.0", "size": 35356 }
[ "java.util.ArrayList" ]
import java.util.ArrayList;
import java.util.*;
[ "java.util" ]
java.util;
2,610,025
public RestTemplateBuilder customizers( RestTemplateCustomizer... restTemplateCustomizers) { Assert.notNull(restTemplateCustomizers, "RestTemplateCustomizers must not be null"); return customizers(Arrays.asList(restTemplateCustomizers)); }
RestTemplateBuilder function( RestTemplateCustomizer... restTemplateCustomizers) { Assert.notNull(restTemplateCustomizers, STR); return customizers(Arrays.asList(restTemplateCustomizers)); }
/** * Set the {@link RestTemplateCustomizer RestTemplateCustomizers} that should be * applied to the {@link RestTemplate}. Customizers are applied in the order that they * were added after builder configuration has been applied. Setting this value will * replace any previously configured customizers. * @param restTemplateCustomizers the customizers to set * @return a new builder instance * @see #additionalCustomizers(RestTemplateCustomizer...) */
Set the <code>RestTemplateCustomizer RestTemplateCustomizers</code> that should be applied to the <code>RestTemplate</code>. Customizers are applied in the order that they were added after builder configuration has been applied. Setting this value will replace any previously configured customizers
customizers
{ "repo_name": "KiviMao/kivi", "path": "Java.Source/spring-boot/spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/client/RestTemplateBuilder.java", "license": "apache-2.0", "size": 26968 }
[ "java.util.Arrays", "org.springframework.util.Assert" ]
import java.util.Arrays; import org.springframework.util.Assert;
import java.util.*; import org.springframework.util.*;
[ "java.util", "org.springframework.util" ]
java.util; org.springframework.util;
1,384,329
assertNull(IssueUriMatcher.getIssue(Uri.parse(""))); }
assertNull(IssueUriMatcher.getIssue(Uri.parse(""))); }
/** * Verify empty uri */
Verify empty uri
testEmptyUri
{ "repo_name": "DeLaSalleUniversity-Manila/forkhub-Janelaaa", "path": "app/src/androidTest/java/com/janela/mobile/tests/issue/IssueUriMatcherTest.java", "license": "apache-2.0", "size": 3483 }
[ "android.net.Uri", "com.janela.mobile.core.issue.IssueUriMatcher" ]
import android.net.Uri; import com.janela.mobile.core.issue.IssueUriMatcher;
import android.net.*; import com.janela.mobile.core.issue.*;
[ "android.net", "com.janela.mobile" ]
android.net; com.janela.mobile;
1,283,287
@Test public void testSerialization() { XYItemEntity e1 = new XYItemEntity(new Rectangle2D.Double(1.0, 2.0, 3.0, 4.0), new TimeSeriesCollection(), 1, 9, "ToolTip", "URL"); XYItemEntity e2 = (XYItemEntity) TestUtilities.serialised(e1); assertEquals(e1, e2); }
void function() { XYItemEntity e1 = new XYItemEntity(new Rectangle2D.Double(1.0, 2.0, 3.0, 4.0), new TimeSeriesCollection(), 1, 9, STR, "URL"); XYItemEntity e2 = (XYItemEntity) TestUtilities.serialised(e1); assertEquals(e1, e2); }
/** * Serialize an instance, restore it, and check for equality. */
Serialize an instance, restore it, and check for equality
testSerialization
{ "repo_name": "GitoMat/jfreechart", "path": "src/test/java/org/jfree/chart/entity/XYItemEntityTest.java", "license": "lgpl-2.1", "size": 4027 }
[ "java.awt.geom.Rectangle2D", "org.jfree.chart.TestUtilities", "org.jfree.data.time.TimeSeriesCollection", "org.junit.Assert" ]
import java.awt.geom.Rectangle2D; import org.jfree.chart.TestUtilities; import org.jfree.data.time.TimeSeriesCollection; import org.junit.Assert;
import java.awt.geom.*; import org.jfree.chart.*; import org.jfree.data.time.*; import org.junit.*;
[ "java.awt", "org.jfree.chart", "org.jfree.data", "org.junit" ]
java.awt; org.jfree.chart; org.jfree.data; org.junit;
2,283,629
public List<Phase> getPhases() { return phasePool; }
List<Phase> function() { return phasePool; }
/** * Method getPhases. * * @return List<Phase> */
Method getPhases
getPhases
{ "repo_name": "nobio/pfm-simulator", "path": "src/de/nobio/pfmsim/runtime/Simulation.java", "license": "mit", "size": 8395 }
[ "de.nobio.pfmsim.project.Phase", "java.util.List" ]
import de.nobio.pfmsim.project.Phase; import java.util.List;
import de.nobio.pfmsim.project.*; import java.util.*;
[ "de.nobio.pfmsim", "java.util" ]
de.nobio.pfmsim; java.util;
1,985,159
@Test(expected = IllegalArgumentException.class) public void test_address_with_only_scheme_address() throws Exception { final String value = this.scheme; try { InterledgerAddress.builder().value(this.scheme).build(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is(String.format(EXPECTED_ERROR_MESSAGE, value))); throw e; } }
@Test(expected = IllegalArgumentException.class) void function() throws Exception { final String value = this.scheme; try { InterledgerAddress.builder().value(this.scheme).build(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is(String.format(EXPECTED_ERROR_MESSAGE, value))); throw e; } }
/** * Assert that something like "g" is invalid. */
Assert that something like "g" is invalid
test_address_with_only_scheme_address
{ "repo_name": "interledger/java-ilp-core", "path": "src/test/java/org/interledger/InterledgerAddressSchemeTest.java", "license": "apache-2.0", "size": 4312 }
[ "org.hamcrest.CoreMatchers", "org.hamcrest.MatcherAssert", "org.junit.Test" ]
import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import org.junit.Test;
import org.hamcrest.*; import org.junit.*;
[ "org.hamcrest", "org.junit" ]
org.hamcrest; org.junit;
1,905,177
public void mouseMoved(MouseEvent e) { resetTransform(e); }
void function(MouseEvent e) { resetTransform(e); }
/** * Invoked when the mouse button has been moved on a component * (with no buttons no down). */
Invoked when the mouse button has been moved on a component (with no buttons no down)
mouseMoved
{ "repo_name": "sflyphotobooks/crp-batik", "path": "sources/org/apache/batik/swing/gvt/AbstractResetTransformInteractor.java", "license": "apache-2.0", "size": 3886 }
[ "java.awt.event.MouseEvent" ]
import java.awt.event.MouseEvent;
import java.awt.event.*;
[ "java.awt" ]
java.awt;
2,077,650
@Override public View getView(int position, View convertView, ViewGroup parent) { for (ListAdapter piece : getPieces()) { int size=piece.getCount(); if (position < size) { return(piece.getView(position, convertView, parent)); } position-=size; } return(null); }
View function(int position, View convertView, ViewGroup parent) { for (ListAdapter piece : getPieces()) { int size=piece.getCount(); if (position < size) { return(piece.getView(position, convertView, parent)); } position-=size; } return(null); }
/** * Get a View that displays the data at the specified * position in the data set. * * @param position * Position of the item whose data we want * @param convertView * View to recycle, if not null * @param parent * ViewGroup containing the returned View */
Get a View that displays the data at the specified position in the data set
getView
{ "repo_name": "commonsguy/cwac-merge", "path": "merge/src/main/java/com/commonsware/cwac/merge/MergeAdapter.java", "license": "apache-2.0", "size": 11114 }
[ "android.view.View", "android.view.ViewGroup", "android.widget.ListAdapter" ]
import android.view.View; import android.view.ViewGroup; import android.widget.ListAdapter;
import android.view.*; import android.widget.*;
[ "android.view", "android.widget" ]
android.view; android.widget;
2,490,987
@Test public void testAsList() { final List<ImmutableBitSet> list = getSortedList(); // create a set of integers in and not in the lists final Set<Integer> integers = new HashSet<>(); for (ImmutableBitSet set : list) { for (Integer integer : set) { integers.add(integer); integers.add(integer + 1); integers.add(integer + 10); } } for (ImmutableBitSet bitSet : list) { final List<Integer> list1 = bitSet.toList(); final List<Integer> listView = bitSet.asList(); final Set<Integer> setView = bitSet.asSet(); assertThat(list1.size(), equalTo(bitSet.cardinality())); assertThat(listView.size(), equalTo(bitSet.cardinality())); assertThat(setView.size(), equalTo(bitSet.cardinality())); assertThat(list1.toString(), equalTo(listView.toString())); assertThat(list1.toString(), equalTo(setView.toString())); assertTrue(list1.equals(listView)); assertThat(list1.hashCode(), equalTo(listView.hashCode())); final Set<Integer> set = new HashSet<>(list1); assertThat(setView.hashCode(), is(set.hashCode())); assertThat(setView, equalTo(set)); for (Integer integer : integers) { final boolean b = list1.contains(integer); assertThat(listView.contains(integer), is(b)); assertThat(setView.contains(integer), is(b)); } } }
@Test void function() { final List<ImmutableBitSet> list = getSortedList(); final Set<Integer> integers = new HashSet<>(); for (ImmutableBitSet set : list) { for (Integer integer : set) { integers.add(integer); integers.add(integer + 1); integers.add(integer + 10); } } for (ImmutableBitSet bitSet : list) { final List<Integer> list1 = bitSet.toList(); final List<Integer> listView = bitSet.asList(); final Set<Integer> setView = bitSet.asSet(); assertThat(list1.size(), equalTo(bitSet.cardinality())); assertThat(listView.size(), equalTo(bitSet.cardinality())); assertThat(setView.size(), equalTo(bitSet.cardinality())); assertThat(list1.toString(), equalTo(listView.toString())); assertThat(list1.toString(), equalTo(setView.toString())); assertTrue(list1.equals(listView)); assertThat(list1.hashCode(), equalTo(listView.hashCode())); final Set<Integer> set = new HashSet<>(list1); assertThat(setView.hashCode(), is(set.hashCode())); assertThat(setView, equalTo(set)); for (Integer integer : integers) { final boolean b = list1.contains(integer); assertThat(listView.contains(integer), is(b)); assertThat(setView.contains(integer), is(b)); } } }
/** * Tests the methods * {@link org.apache.calcite.util.ImmutableBitSet#toList}, and * {@link org.apache.calcite.util.ImmutableBitSet#asList} and * {@link org.apache.calcite.util.ImmutableBitSet#asSet}. */
Tests the methods <code>org.apache.calcite.util.ImmutableBitSet#toList</code>, and <code>org.apache.calcite.util.ImmutableBitSet#asList</code> and <code>org.apache.calcite.util.ImmutableBitSet#asSet</code>
testAsList
{ "repo_name": "b-slim/calcite", "path": "core/src/test/java/org/apache/calcite/util/ImmutableBitSetTest.java", "license": "apache-2.0", "size": 21652 }
[ "java.util.HashSet", "java.util.List", "java.util.Set", "org.hamcrest.CoreMatchers", "org.junit.Assert", "org.junit.Test" ]
import java.util.HashSet; import java.util.List; import java.util.Set; import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Test;
import java.util.*; import org.hamcrest.*; import org.junit.*;
[ "java.util", "org.hamcrest", "org.junit" ]
java.util; org.hamcrest; org.junit;
2,174,186
@Override public void validateSearchParameters(Map<String, String> fieldValues) { super.validateSearchParameters(fieldValues); final String lowerBoundMilestoneExpectedCompletionDate = fieldValues.get(KRADConstants.LOOKUP_RANGE_LOWER_BOUND_PROPERTY_PREFIX+ArPropertyConstants.MilestoneFields.MILESTONE_EXPECTED_COMPLETION_DATE); validateDateField(lowerBoundMilestoneExpectedCompletionDate, KRADConstants.LOOKUP_RANGE_LOWER_BOUND_PROPERTY_PREFIX+ArPropertyConstants.MilestoneFields.MILESTONE_EXPECTED_COMPLETION_DATE, getDateTimeService()); final String upperBoundMilestoneExpectedCompletionDate = fieldValues.get(ArPropertyConstants.MilestoneFields.MILESTONE_EXPECTED_COMPLETION_DATE); validateDateField(upperBoundMilestoneExpectedCompletionDate, ArPropertyConstants.MilestoneFields.MILESTONE_EXPECTED_COMPLETION_DATE, getDateTimeService()); }
void function(Map<String, String> fieldValues) { super.validateSearchParameters(fieldValues); final String lowerBoundMilestoneExpectedCompletionDate = fieldValues.get(KRADConstants.LOOKUP_RANGE_LOWER_BOUND_PROPERTY_PREFIX+ArPropertyConstants.MilestoneFields.MILESTONE_EXPECTED_COMPLETION_DATE); validateDateField(lowerBoundMilestoneExpectedCompletionDate, KRADConstants.LOOKUP_RANGE_LOWER_BOUND_PROPERTY_PREFIX+ArPropertyConstants.MilestoneFields.MILESTONE_EXPECTED_COMPLETION_DATE, getDateTimeService()); final String upperBoundMilestoneExpectedCompletionDate = fieldValues.get(ArPropertyConstants.MilestoneFields.MILESTONE_EXPECTED_COMPLETION_DATE); validateDateField(upperBoundMilestoneExpectedCompletionDate, ArPropertyConstants.MilestoneFields.MILESTONE_EXPECTED_COMPLETION_DATE, getDateTimeService()); }
/** * Validate the milestone expected completion date field as a date * @see org.kuali.rice.kns.lookup.AbstractLookupableHelperServiceImpl#validateSearchParameters(java.util.Map) */
Validate the milestone expected completion date field as a date
validateSearchParameters
{ "repo_name": "ua-eas/kfs-devops-automation-fork", "path": "kfs-ar/src/main/java/org/kuali/kfs/module/ar/businessobject/lookup/ContractsGrantsMilestoneReportLookupableHelperServiceImpl.java", "license": "agpl-3.0", "size": 13209 }
[ "java.util.Map", "org.kuali.kfs.module.ar.ArPropertyConstants", "org.kuali.rice.krad.util.KRADConstants" ]
import java.util.Map; import org.kuali.kfs.module.ar.ArPropertyConstants; import org.kuali.rice.krad.util.KRADConstants;
import java.util.*; import org.kuali.kfs.module.ar.*; import org.kuali.rice.krad.util.*;
[ "java.util", "org.kuali.kfs", "org.kuali.rice" ]
java.util; org.kuali.kfs; org.kuali.rice;
1,939,145
@Test public void testSimpleCompleteConfiguration() throws Exception { CppConfigurationLoader loader = loaderWithOptionalTool(""); CppConfiguration toolchain = create(loader, "--cpu=cpu"); assertEquals("toolchain-identifier", toolchain.getToolchainIdentifier()); assertEquals("host-system-name", toolchain.getHostSystemName()); assertEquals("compiler", toolchain.getCompiler()); assertEquals("target-libc", toolchain.getTargetLibc()); assertEquals("piii", toolchain.getTargetCpu()); assertEquals("target-system-name", toolchain.getTargetGnuSystemName()); assertEquals(getToolPath("/path-to-ar"), toolchain.getToolPathFragment(Tool.AR)); assertEquals("abi-version", toolchain.getAbi()); assertEquals("abi-libc-version", toolchain.getAbiGlibcVersion()); assertTrue(toolchain.supportsGoldLinker()); assertFalse(toolchain.supportsStartEndLib()); assertFalse(toolchain.supportsInterfaceSharedObjects()); assertFalse(toolchain.supportsEmbeddedRuntimes()); assertFalse(toolchain.toolchainNeedsPic()); assertTrue(toolchain.supportsFission()); assertEquals( ImmutableList.of(getToolPath("/system-include-dir")), toolchain.getBuiltInIncludeDirectories()); assertNull(toolchain.getSysroot()); assertEquals(Arrays.asList("c", "fastbuild"), toolchain.getCompilerOptions(NO_FEATURES)); assertEquals(Arrays.<String>asList(), toolchain.getCOptions()); assertEquals(Arrays.asList("cxx", "cxx-fastbuild"), toolchain.getCxxOptions(NO_FEATURES)); assertEquals(Arrays.asList("unfiltered"), toolchain.getUnfilteredCompilerOptions(NO_FEATURES)); assertEquals(Arrays.<String>asList(), toolchain.getLinkOptions()); assertEquals( Arrays.asList("linker", "-Wl,-S", "linker-fastbuild", "fully static"), toolchain.getFullyStaticLinkOptions(NO_FEATURES, false)); assertEquals( Arrays.asList("linker", "-Wl,-S", "linker-fastbuild", "dynamic"), toolchain.getDynamicLinkOptions(NO_FEATURES, false)); assertEquals( Arrays.asList("linker", "-Wl,-S", "linker-fastbuild", "mostly static", "solinker"), toolchain.getFullyStaticLinkOptions(NO_FEATURES, true)); assertEquals( Arrays.asList("linker", "-Wl,-S", "linker-fastbuild", "dynamic", "solinker"), toolchain.getDynamicLinkOptions(NO_FEATURES, true)); assertEquals(Arrays.asList("objcopy"), toolchain.getObjCopyOptionsForEmbedding()); assertEquals(Arrays.<String>asList(), toolchain.getLdOptionsForEmbedding()); assertEquals(Arrays.asList("rcsD"), toolchain.getArFlags()); assertThat(toolchain.getAdditionalMakeVariables().entrySet()) .containsExactlyElementsIn( ImmutableMap.of( "SOME_MAKE_VARIABLE", "make-variable-value", "STACK_FRAME_UNLIMITED", "", "CC_FLAGS", "") .entrySet()); assertEquals(getToolPath("/path-to-ld"), toolchain.getLdExecutable()); assertEquals(getToolPath("/path-to-dwp"), toolchain.getToolPathFragment(Tool.DWP)); }
void function() throws Exception { CppConfigurationLoader loader = loaderWithOptionalTool(STR--cpu=cpuSTRtoolchain-identifierSTRhost-system-nameSTRcompilerSTRtarget-libcSTRpiiiSTRtarget-system-nameSTR/path-to-arSTRabi-versionSTRabi-libc-versionSTR/system-include-dirSTRcSTRfastbuildSTRcxxSTRcxx-fastbuildSTRunfilteredSTRlinkerSTR-Wl,-SSTRlinker-fastbuildSTRfully staticSTRlinkerSTR-Wl,-SSTRlinker-fastbuildSTRdynamicSTRlinkerSTR-Wl,-SSTRlinker-fastbuildSTRmostly staticSTRsolinkerSTRlinkerSTR-Wl,-SSTRlinker-fastbuildSTRdynamicSTRsolinkerSTRobjcopySTRrcsDSTRSOME_MAKE_VARIABLESTRmake-variable-valueSTRSTACK_FRAME_UNLIMITEDSTRSTRCC_FLAGSSTRSTR/path-to-ldSTR/path-to-dwp"), toolchain.getToolPathFragment(Tool.DWP)); }
/** * Checks that we do not accidentally change the proto format in incompatible * ways. Do not modify the configuration file in this test, except if you are * absolutely certain that it is backwards-compatible. */
Checks that we do not accidentally change the proto format in incompatible ways. Do not modify the configuration file in this test, except if you are absolutely certain that it is backwards-compatible
testSimpleCompleteConfiguration
{ "repo_name": "hermione521/bazel", "path": "src/test/java/com/google/devtools/build/lib/rules/cpp/CrosstoolConfigurationLoaderTest.java", "license": "apache-2.0", "size": 48479 }
[ "com.google.devtools.build.lib.rules.cpp.CppConfiguration" ]
import com.google.devtools.build.lib.rules.cpp.CppConfiguration;
import com.google.devtools.build.lib.rules.cpp.*;
[ "com.google.devtools" ]
com.google.devtools;
811,277
public com.google.cloud.dialogflow.v2.DetectIntentResponse detectIntent(com.google.cloud.dialogflow.v2.DetectIntentRequest request) { return blockingUnaryCall( getChannel(), getDetectIntentMethodHelper(), getCallOptions(), request); } } public static final class SessionsFutureStub extends io.grpc.stub.AbstractStub<SessionsFutureStub> { private SessionsFutureStub(io.grpc.Channel channel) { super(channel); } private SessionsFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); }
com.google.cloud.dialogflow.v2.DetectIntentResponse function(com.google.cloud.dialogflow.v2.DetectIntentRequest request) { return blockingUnaryCall( getChannel(), getDetectIntentMethodHelper(), getCallOptions(), request); } } public static final class SessionsFutureStub extends io.grpc.stub.AbstractStub<SessionsFutureStub> { private SessionsFutureStub(io.grpc.Channel channel) { super(channel); } private SessionsFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); }
/** * <pre> * Processes a natural language query and returns structured, actionable data * as a result. This method is not idempotent, because it may cause contexts * and session entity types to be updated, which in turn might affect * results of future queries. * </pre> */
<code> Processes a natural language query and returns structured, actionable data as a result. This method is not idempotent, because it may cause contexts and session entity types to be updated, which in turn might affect results of future queries. </code>
detectIntent
{ "repo_name": "pongad/api-client-staging", "path": "generated/java/grpc-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/SessionsGrpc.java", "license": "bsd-3-clause", "size": 18643 }
[ "io.grpc.stub.ClientCalls" ]
import io.grpc.stub.ClientCalls;
import io.grpc.stub.*;
[ "io.grpc.stub" ]
io.grpc.stub;
853,755
public void doHide_view_student_view(RunData data) { SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid()); state.setAttribute(VIEW_ASSIGNMENT_HIDE_STUDENT_VIEW_FLAG, Boolean.valueOf(true)); } // doHide_view_student_view
void function(RunData data) { SessionState state = ((JetspeedRunData) data).getPortletSessionState(((JetspeedRunData) data).getJs_peid()); state.setAttribute(VIEW_ASSIGNMENT_HIDE_STUDENT_VIEW_FLAG, Boolean.valueOf(true)); }
/** * Action is to hide the student view in the view assignment page */
Action is to hide the student view in the view assignment page
doHide_view_student_view
{ "repo_name": "harfalm/Sakai-10.1", "path": "assignment/assignment-tool/tool/src/java/org/sakaiproject/assignment/tool/AssignmentAction.java", "license": "apache-2.0", "size": 605178 }
[ "org.sakaiproject.cheftool.JetspeedRunData", "org.sakaiproject.cheftool.RunData", "org.sakaiproject.event.api.SessionState" ]
import org.sakaiproject.cheftool.JetspeedRunData; import org.sakaiproject.cheftool.RunData; import org.sakaiproject.event.api.SessionState;
import org.sakaiproject.cheftool.*; import org.sakaiproject.event.api.*;
[ "org.sakaiproject.cheftool", "org.sakaiproject.event" ]
org.sakaiproject.cheftool; org.sakaiproject.event;
1,498,561
@Override public String toString() { StringBuffer buff = new StringBuffer(); buff.append("[ "); if (name != null) { buff.append(name); buff.append(": "); } if (isBackward) { for (int i = 0; i < head.length; i++) { buff.append(PrintUtil.print(head[i])); buff.append(" "); } buff.append("<- "); for (int i = 0; i < body.length; i++) { buff.append(PrintUtil.print(body[i])); buff.append(" "); } } else { for (int i = 0; i < body.length; i++) { buff.append(PrintUtil.print(body[i])); buff.append(" "); } buff.append("-> "); for (int i = 0; i < head.length; i++) { buff.append(PrintUtil.print(head[i])); buff.append(" "); } } buff.append("]"); return buff.toString(); }
String function() { StringBuffer buff = new StringBuffer(); buff.append(STR); if (name != null) { buff.append(name); buff.append(STR); } if (isBackward) { for (int i = 0; i < head.length; i++) { buff.append(PrintUtil.print(head[i])); buff.append(" "); } buff.append(STR); for (int i = 0; i < body.length; i++) { buff.append(PrintUtil.print(body[i])); buff.append(" "); } } else { for (int i = 0; i < body.length; i++) { buff.append(PrintUtil.print(body[i])); buff.append(" "); } buff.append(STR); for (int i = 0; i < head.length; i++) { buff.append(PrintUtil.print(head[i])); buff.append(" "); } } buff.append("]"); return buff.toString(); }
/** * Printable string describing the rule */
Printable string describing the rule
toString
{ "repo_name": "jacekkopecky/parkjam", "path": "src/com/hp/hpl/jena/reasoner/rulesys/Rule.java", "license": "apache-2.0", "size": 41749 }
[ "com.hp.hpl.jena.util.PrintUtil" ]
import com.hp.hpl.jena.util.PrintUtil;
import com.hp.hpl.jena.util.*;
[ "com.hp.hpl" ]
com.hp.hpl;
1,316,259
Observable<ArrayList<Repo>> getUserStarredRepos(String username);
Observable<ArrayList<Repo>> getUserStarredRepos(String username);
/** * Get user's starred repositories. * @return */
Get user's starred repositories
getUserStarredRepos
{ "repo_name": "mingjunli/GithubApp", "path": "app/src/main/java/com/anly/githubapp/data/api/RepoApi.java", "license": "apache-2.0", "size": 4056 }
[ "com.anly.githubapp.data.model.Repo", "java.util.ArrayList" ]
import com.anly.githubapp.data.model.Repo; import java.util.ArrayList;
import com.anly.githubapp.data.model.*; import java.util.*;
[ "com.anly.githubapp", "java.util" ]
com.anly.githubapp; java.util;
2,155,152
Collection<ActivityRecoveryDAO> getActivityRecoveries();
Collection<ActivityRecoveryDAO> getActivityRecoveries();
/** * Returns all activity recovery objects for this process instance. */
Returns all activity recovery objects for this process instance
getActivityRecoveries
{ "repo_name": "isurusuranga/wso2-ode", "path": "bpel-dao/src/main/java/org/apache/ode/bpel/dao/ProcessInstanceDAO.java", "license": "apache-2.0", "size": 7897 }
[ "java.util.Collection" ]
import java.util.Collection;
import java.util.*;
[ "java.util" ]
java.util;
623,242
private static List<String> determineOperations(String[] args) { List<String> operations = new ArrayList<String>(); for (String arg : args) { if (!arg.startsWith("-")) { operations.add(arg); } } return operations; }
static List<String> function(String[] args) { List<String> operations = new ArrayList<String>(); for (String arg : args) { if (!arg.startsWith("-")) { operations.add(arg); } } return operations; }
/** * Determine the operations Flyway should execute. * * @param args The command-line arguments passed in. * @return The operations. An empty list if none. */
Determine the operations Flyway should execute
determineOperations
{ "repo_name": "jmahonin/flyway", "path": "flyway-commandline/src/main/java/org/flywaydb/commandline/Main.java", "license": "apache-2.0", "size": 17107 }
[ "java.util.ArrayList", "java.util.List" ]
import java.util.ArrayList; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,084,471
String param1=""; String param2=""; SearchRequestPopupFragment fragment = new SearchRequestPopupFragment(); Bundle args = new Bundle(); args.putString(ARG_PARAM1, param1); args.putString(ARG_PARAM2, param2); fragment.setArguments(args); return fragment; }
String param1=STR"; SearchRequestPopupFragment fragment = new SearchRequestPopupFragment(); Bundle args = new Bundle(); args.putString(ARG_PARAM1, param1); args.putString(ARG_PARAM2, param2); fragment.setArguments(args); return fragment; }
/** * Use this factory method to create a new instance of * this fragment using the provided parameters. * * @return A new instance of fragment SearchRequestPopupFragment. */
Use this factory method to create a new instance of this fragment using the provided parameters
newInstance
{ "repo_name": "fekracomputers/IslamicLibraryAndroid", "path": "app/src/main/java/com/fekracomputers/islamiclibrary/search/view/SearchRequestPopupFragment.java", "license": "gpl-3.0", "size": 5168 }
[ "android.os.Bundle" ]
import android.os.Bundle;
import android.os.*;
[ "android.os" ]
android.os;
1,869,270
private void validateAndConfigureLockType(String lockTypeConfigured) { Configuration configuration = new Configuration(true); String defaultFs = configuration.get("fs.defaultFS"); if (null != defaultFs && (defaultFs.startsWith(CarbonCommonConstants.HDFSURL_PREFIX) || defaultFs.startsWith(CarbonCommonConstants.VIEWFSURL_PREFIX) || defaultFs .startsWith(CarbonCommonConstants.ALLUXIOURL_PREFIX) || defaultFs .startsWith(CarbonCommonConstants.S3A_PREFIX)) && !CarbonCommonConstants.CARBON_LOCK_TYPE_HDFS.equalsIgnoreCase(lockTypeConfigured)) { LOGGER.warn("The value \"" + lockTypeConfigured + "\" configured for key " + LOCK_TYPE + " is invalid for current file system. " + "Use the default value " + CarbonCommonConstants.CARBON_LOCK_TYPE_HDFS + " instead."); carbonProperties.setProperty(LOCK_TYPE, CarbonCommonConstants.CARBON_LOCK_TYPE_HDFS); } else if (null != defaultFs && defaultFs.startsWith(CarbonCommonConstants.LOCAL_FILE_PREFIX) && !CarbonCommonConstants.CARBON_LOCK_TYPE_LOCAL.equalsIgnoreCase(lockTypeConfigured)) { carbonProperties.setProperty(LOCK_TYPE, CarbonCommonConstants.CARBON_LOCK_TYPE_LOCAL); LOGGER.warn("The value \"" + lockTypeConfigured + "\" configured for key " + LOCK_TYPE + " is invalid for current file system. " + "Use the default value " + CarbonCommonConstants.CARBON_LOCK_TYPE_LOCAL + " instead."); } }
void function(String lockTypeConfigured) { Configuration configuration = new Configuration(true); String defaultFs = configuration.get(STR); if (null != defaultFs && (defaultFs.startsWith(CarbonCommonConstants.HDFSURL_PREFIX) defaultFs.startsWith(CarbonCommonConstants.VIEWFSURL_PREFIX) defaultFs .startsWith(CarbonCommonConstants.ALLUXIOURL_PREFIX) defaultFs .startsWith(CarbonCommonConstants.S3A_PREFIX)) && !CarbonCommonConstants.CARBON_LOCK_TYPE_HDFS.equalsIgnoreCase(lockTypeConfigured)) { LOGGER.warn(STRSTR\STR + LOCK_TYPE + STR + STR + CarbonCommonConstants.CARBON_LOCK_TYPE_HDFS + STR); carbonProperties.setProperty(LOCK_TYPE, CarbonCommonConstants.CARBON_LOCK_TYPE_HDFS); } else if (null != defaultFs && defaultFs.startsWith(CarbonCommonConstants.LOCAL_FILE_PREFIX) && !CarbonCommonConstants.CARBON_LOCK_TYPE_LOCAL.equalsIgnoreCase(lockTypeConfigured)) { carbonProperties.setProperty(LOCK_TYPE, CarbonCommonConstants.CARBON_LOCK_TYPE_LOCAL); LOGGER.warn(STRSTR\STR + LOCK_TYPE + STR + STR + CarbonCommonConstants.CARBON_LOCK_TYPE_LOCAL + STR); } }
/** * the method decide and set the lock type based on the configured system type * * @param lockTypeConfigured */
the method decide and set the lock type based on the configured system type
validateAndConfigureLockType
{ "repo_name": "jatin9896/incubator-carbondata", "path": "core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java", "license": "apache-2.0", "size": 68650 }
[ "org.apache.carbondata.core.constants.CarbonCommonConstants", "org.apache.hadoop.conf.Configuration" ]
import org.apache.carbondata.core.constants.CarbonCommonConstants; import org.apache.hadoop.conf.Configuration;
import org.apache.carbondata.core.constants.*; import org.apache.hadoop.conf.*;
[ "org.apache.carbondata", "org.apache.hadoop" ]
org.apache.carbondata; org.apache.hadoop;
2,496,629
public BytesReference doc() { return doc; }
BytesReference function() { return doc; }
/** * Returns the artificial document from which term vectors are requested for. */
Returns the artificial document from which term vectors are requested for
doc
{ "repo_name": "camilojd/elasticsearch", "path": "core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java", "license": "apache-2.0", "size": 24370 }
[ "org.elasticsearch.common.bytes.BytesReference" ]
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.*;
[ "org.elasticsearch.common" ]
org.elasticsearch.common;
2,614,050
public static ReduceSinkDesc getReduceSinkDesc( ArrayList<ExprNodeDesc> keyCols, ArrayList<ExprNodeDesc> valueCols, List<String> outputColumnNames, boolean includeKey, int tag, int numPartitionFields, int numReducers, AcidUtils.Operation writeType) throws SemanticException { return getReduceSinkDesc(keyCols, keyCols.size(), valueCols, new ArrayList<List<Integer>>(), includeKey ? outputColumnNames.subList(0, keyCols.size()) : new ArrayList<String>(), includeKey ? outputColumnNames.subList(keyCols.size(), outputColumnNames.size()) : outputColumnNames, includeKey, tag, numPartitionFields, numReducers, writeType); }
static ReduceSinkDesc function( ArrayList<ExprNodeDesc> keyCols, ArrayList<ExprNodeDesc> valueCols, List<String> outputColumnNames, boolean includeKey, int tag, int numPartitionFields, int numReducers, AcidUtils.Operation writeType) throws SemanticException { return getReduceSinkDesc(keyCols, keyCols.size(), valueCols, new ArrayList<List<Integer>>(), includeKey ? outputColumnNames.subList(0, keyCols.size()) : new ArrayList<String>(), includeKey ? outputColumnNames.subList(keyCols.size(), outputColumnNames.size()) : outputColumnNames, includeKey, tag, numPartitionFields, numReducers, writeType); }
/** * Create the reduce sink descriptor. * * @param keyCols * The columns to be stored in the key * @param valueCols * The columns to be stored in the value * @param outputColumnNames * The output columns names * @param tag * The tag for this reducesink * @param numPartitionFields * The first numPartitionFields of keyCols will be partition columns. * If numPartitionFields=-1, then partition randomly. * @param numReducers * The number of reducers, set to -1 for automatic inference based on * input data size. * @param writeType Whether this is an Acid write, and if so whether it is insert, update, * or delete. * @return The reduceSinkDesc object. */
Create the reduce sink descriptor
getReduceSinkDesc
{ "repo_name": "wangbin83-gmail-com/hive-1.1.0-cdh5.4.8", "path": "ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java", "license": "apache-2.0", "size": 39511 }
[ "java.util.ArrayList", "java.util.List", "org.apache.hadoop.hive.ql.io.AcidUtils", "org.apache.hadoop.hive.ql.parse.SemanticException" ]
import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.parse.SemanticException;
import java.util.*; import org.apache.hadoop.hive.ql.io.*; import org.apache.hadoop.hive.ql.parse.*;
[ "java.util", "org.apache.hadoop" ]
java.util; org.apache.hadoop;
2,188,220
@Test public void testSmallDataFit() { Map<Integer, double[]> data = new HashMap<>(); data.put(0, new double[] {-1.0915526, 1.81983527, -0.91409478, 0.70890712, -24.55724107}); data.put(1, new double[] {-0.61072904, 0.37545517, 0.21705352, 0.09516495, -26.57226867}); data.put(2, new double[] {0.05485406, 0.88219898, -0.80584547, 0.94668307, 61.80919728}); data.put(3, new double[] {-0.24835094, -0.34000053, -1.69984651, -1.45902635, -161.65525991}); data.put(4, new double[] {0.63675392, 0.31675535, 0.38837437, -1.1221971, -14.46432611}); data.put(5, new double[] {0.14194017, 2.18158997, -0.28397346, -0.62090588, -3.2122197}); data.put(6, new double[] {-0.53487507, 1.4454797, 0.21570443, -0.54161422, -46.5469012}); data.put(7, new double[] {-1.58812173, -0.73216803, -2.15670676, -1.03195988, -247.23559889}); data.put(8, new double[] {0.20702671, 0.92864654, 0.32721202, -0.09047503, 31.61484949}); data.put(9, new double[] {-0.37890345, -0.04846179, -0.84122753, -1.14667474, -124.92598583}); LinearRegressionLSQRTrainer trainer = new LinearRegressionLSQRTrainer(); LinearRegressionModel mdl = trainer.fit( data, parts, (k, v) -> Arrays.copyOfRange(v, 0, v.length - 1), (k, v) -> v[4] ); assertArrayEquals( new double[]{72.26948107, 15.95144674, 24.07403921, 66.73038781}, mdl.getWeights().getStorage().data(), 1e-6 ); assertEquals(2.8421709430404007e-14, mdl.getIntercept(), 1e-6); }
void function() { Map<Integer, double[]> data = new HashMap<>(); data.put(0, new double[] {-1.0915526, 1.81983527, -0.91409478, 0.70890712, -24.55724107}); data.put(1, new double[] {-0.61072904, 0.37545517, 0.21705352, 0.09516495, -26.57226867}); data.put(2, new double[] {0.05485406, 0.88219898, -0.80584547, 0.94668307, 61.80919728}); data.put(3, new double[] {-0.24835094, -0.34000053, -1.69984651, -1.45902635, -161.65525991}); data.put(4, new double[] {0.63675392, 0.31675535, 0.38837437, -1.1221971, -14.46432611}); data.put(5, new double[] {0.14194017, 2.18158997, -0.28397346, -0.62090588, -3.2122197}); data.put(6, new double[] {-0.53487507, 1.4454797, 0.21570443, -0.54161422, -46.5469012}); data.put(7, new double[] {-1.58812173, -0.73216803, -2.15670676, -1.03195988, -247.23559889}); data.put(8, new double[] {0.20702671, 0.92864654, 0.32721202, -0.09047503, 31.61484949}); data.put(9, new double[] {-0.37890345, -0.04846179, -0.84122753, -1.14667474, -124.92598583}); LinearRegressionLSQRTrainer trainer = new LinearRegressionLSQRTrainer(); LinearRegressionModel mdl = trainer.fit( data, parts, (k, v) -> Arrays.copyOfRange(v, 0, v.length - 1), (k, v) -> v[4] ); assertArrayEquals( new double[]{72.26948107, 15.95144674, 24.07403921, 66.73038781}, mdl.getWeights().getStorage().data(), 1e-6 ); assertEquals(2.8421709430404007e-14, mdl.getIntercept(), 1e-6); }
/** * Tests {@code fit()} method on a simple small dataset. */
Tests fit() method on a simple small dataset
testSmallDataFit
{ "repo_name": "irudyak/ignite", "path": "modules/ml/src/test/java/org/apache/ignite/ml/regressions/linear/LinearRegressionLSQRTrainerTest.java", "license": "apache-2.0", "size": 4497 }
[ "java.util.Arrays", "java.util.HashMap", "java.util.Map", "org.junit.Assert" ]
import java.util.Arrays; import java.util.HashMap; import java.util.Map; import org.junit.Assert;
import java.util.*; import org.junit.*;
[ "java.util", "org.junit" ]
java.util; org.junit;
2,360,286
public static Status createStatus(Object item, List status, List children) { return new LocalStatus(item, status, children); }
static Status function(Object item, List status, List children) { return new LocalStatus(item, status, children); }
/** * Creates a new <code>IStatus</code> for the given item. * * @param item The item that has a status to be displayed * @param status The list of status * @param children The children <code>IStatus</code> * @return A new <code>IStatus</code> */
Creates a new <code>IStatus</code> for the given item
createStatus
{ "repo_name": "RallySoftware/eclipselink.runtime", "path": "utils/eclipselink.utils.workbench/mappingsplugin/source/org/eclipse/persistence/tools/workbench/mappingsplugin/ui/common/StatusDialog.java", "license": "epl-1.0", "size": 21869 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,286,402
private static ArrayList<ItemInfo> getApplications(final Launcher context, ArrayList<ApplicationInfo> applications, boolean addAllApps) { ArrayList<ItemInfo> apps = new ArrayList<ItemInfo>(applications); // add notifications as a virtual app VirtualAppInfo notifications = new VirtualAppInfo(); notifications.setType(DatabaseHelper.VIRTUAL_NOTIFICATIONS_TYPE); notifications.setTitle(context.getString(R.string.notifications)); notifications.setDrawable(context.getResources().getDrawable(R.drawable.notifications)); apps.add(notifications); // add bookmarks as a virtual app VirtualAppInfo bookmarks = new VirtualAppInfo(); bookmarks.setType(DatabaseHelper.VIRTUAL_BROWSER_BOOKMARKS_TYPE); bookmarks.setTitle(context.getString(R.string.bookmarks)); bookmarks.setDrawable(context.getResources().getDrawable(R.drawable.bookmarks)); apps.add(bookmarks); // add browser history as a virtual app VirtualAppInfo browserHistory = new VirtualAppInfo(); browserHistory.setType(DatabaseHelper.VIRTUAL_BROWSER_HISTORY_TYPE); browserHistory.setTitle(context.getString(R.string.browser_history)); browserHistory.setDrawable(context.getResources().getDrawable(R.drawable.browser_history)); apps.add(browserHistory); if (addAllApps) { // add all apps as a virtual app VirtualAppInfo allApps = new VirtualAppInfo(); allApps.setType(DatabaseHelper.VIRTUAL_ALL_APPS_TYPE); allApps.setTitle(context.getString(R.string.all_apps)); allApps.setDrawable(context.getResources().getDrawable(R.drawable.all_apps)); apps.add(allApps); } // add spotlight web apps as a virtual app VirtualAppInfo spotlightWebApps = new VirtualAppInfo(); spotlightWebApps.setType(DatabaseHelper.VIRTUAL_SPOTLIGHT_WEB_APPS_TYPE); spotlightWebApps.setTitle(context.getString(R.string.spotlight_web_apps)); spotlightWebApps.setDrawable(context.getResources().getDrawable(R.drawable.spotlight)); apps.add(spotlightWebApps); // add live TV as a virtual app VirtualAppInfo liveTV = new VirtualAppInfo(); liveTV.setType(DatabaseHelper.VIRTUAL_LIVE_TV_TYPE); liveTV.setTitle(context.getString(R.string.live_tv)); liveTV.setDrawable(context.getResources().getDrawable(R.drawable.livetv)); apps.add(liveTV); Collections.sort(apps, new Comparator<ItemInfo>() {
static ArrayList<ItemInfo> function(final Launcher context, ArrayList<ApplicationInfo> applications, boolean addAllApps) { ArrayList<ItemInfo> apps = new ArrayList<ItemInfo>(applications); VirtualAppInfo notifications = new VirtualAppInfo(); notifications.setType(DatabaseHelper.VIRTUAL_NOTIFICATIONS_TYPE); notifications.setTitle(context.getString(R.string.notifications)); notifications.setDrawable(context.getResources().getDrawable(R.drawable.notifications)); apps.add(notifications); VirtualAppInfo bookmarks = new VirtualAppInfo(); bookmarks.setType(DatabaseHelper.VIRTUAL_BROWSER_BOOKMARKS_TYPE); bookmarks.setTitle(context.getString(R.string.bookmarks)); bookmarks.setDrawable(context.getResources().getDrawable(R.drawable.bookmarks)); apps.add(bookmarks); VirtualAppInfo browserHistory = new VirtualAppInfo(); browserHistory.setType(DatabaseHelper.VIRTUAL_BROWSER_HISTORY_TYPE); browserHistory.setTitle(context.getString(R.string.browser_history)); browserHistory.setDrawable(context.getResources().getDrawable(R.drawable.browser_history)); apps.add(browserHistory); if (addAllApps) { VirtualAppInfo allApps = new VirtualAppInfo(); allApps.setType(DatabaseHelper.VIRTUAL_ALL_APPS_TYPE); allApps.setTitle(context.getString(R.string.all_apps)); allApps.setDrawable(context.getResources().getDrawable(R.drawable.all_apps)); apps.add(allApps); } VirtualAppInfo spotlightWebApps = new VirtualAppInfo(); spotlightWebApps.setType(DatabaseHelper.VIRTUAL_SPOTLIGHT_WEB_APPS_TYPE); spotlightWebApps.setTitle(context.getString(R.string.spotlight_web_apps)); spotlightWebApps.setDrawable(context.getResources().getDrawable(R.drawable.spotlight)); apps.add(spotlightWebApps); VirtualAppInfo liveTV = new VirtualAppInfo(); liveTV.setType(DatabaseHelper.VIRTUAL_LIVE_TV_TYPE); liveTV.setTitle(context.getString(R.string.live_tv)); liveTV.setDrawable(context.getResources().getDrawable(R.drawable.livetv)); apps.add(liveTV); Collections.sort(apps, new Comparator<ItemInfo>() {
/** * Utility method to add virtual apps to the list of installed Android apps. * * @param context * @param applications * @param addAllApps * @return */
Utility method to add virtual apps to the list of installed Android apps
getApplications
{ "repo_name": "entertailion/Open-Launcher-for-GTV", "path": "src/com/entertailion/android/launcher/Dialogs.java", "license": "apache-2.0", "size": 47060 }
[ "com.entertailion.android.launcher.apps.ApplicationInfo", "com.entertailion.android.launcher.apps.VirtualAppInfo", "com.entertailion.android.launcher.database.DatabaseHelper", "com.entertailion.android.launcher.item.ItemInfo", "java.util.ArrayList", "java.util.Collections", "java.util.Comparator" ]
import com.entertailion.android.launcher.apps.ApplicationInfo; import com.entertailion.android.launcher.apps.VirtualAppInfo; import com.entertailion.android.launcher.database.DatabaseHelper; import com.entertailion.android.launcher.item.ItemInfo; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator;
import com.entertailion.android.launcher.apps.*; import com.entertailion.android.launcher.database.*; import com.entertailion.android.launcher.item.*; import java.util.*;
[ "com.entertailion.android", "java.util" ]
com.entertailion.android; java.util;
680,454
@AfterClass public static void tearDownAfterClass() throws Exception { SparkTestingUtils.recursivelyCleanDir("src/test/resources/testspark_json"); SparkTestingUtils.recursivelyCleanDir("src/test/resources/testspark_csv"); emf.close(); SparkTestingUtils.recursivelyCleanDir(System.getProperty("user.dir")+"/metastore_db"); emf = null; }
static void function() throws Exception { SparkTestingUtils.recursivelyCleanDir(STR); SparkTestingUtils.recursivelyCleanDir(STR); emf.close(); SparkTestingUtils.recursivelyCleanDir(System.getProperty(STR)+STR); emf = null; }
/** * Tear down after class. * * @throws Exception * the exception */
Tear down after class
tearDownAfterClass
{ "repo_name": "impetus-opensource/Kundera", "path": "src/kundera-spark/spark-core/src/test/java/com/impetus/client/spark/tests/SparkHDFSClientTest.java", "license": "apache-2.0", "size": 4264 }
[ "com.impetus.client.spark.utils.SparkTestingUtils" ]
import com.impetus.client.spark.utils.SparkTestingUtils;
import com.impetus.client.spark.utils.*;
[ "com.impetus.client" ]
com.impetus.client;
1,444,286
@Override public FactoryMetaDto createFactory(@NotNull final Map<String, String> factoryParameters) throws ApiException { // no need to check null value of url parameter as accept() method has performed the check final GithubUrl githubUrl = githubUrlParser.parse(factoryParameters.get(URL_PARAMETER_NAME)); // create factory from the following location if location exists, else create default factory return urlFactoryBuilder .createFactoryFromDevfile( githubUrl, new GithubFileContentProvider(githubUrl, urlFetcher), extractOverrideParams(factoryParameters)) .orElseGet(() -> newDto(FactoryDto.class).withV(CURRENT_VERSION).withSource("repo")) .acceptVisitor(new GithubFactoryVisitor(githubUrl)); } private class GithubFactoryVisitor implements FactoryVisitor { private final GithubUrl githubUrl; private GithubFactoryVisitor(GithubUrl githubUrl) { this.githubUrl = githubUrl; }
FactoryMetaDto function(@NotNull final Map<String, String> factoryParameters) throws ApiException { final GithubUrl githubUrl = githubUrlParser.parse(factoryParameters.get(URL_PARAMETER_NAME)); return urlFactoryBuilder .createFactoryFromDevfile( githubUrl, new GithubFileContentProvider(githubUrl, urlFetcher), extractOverrideParams(factoryParameters)) .orElseGet(() -> newDto(FactoryDto.class).withV(CURRENT_VERSION).withSource("repo")) .acceptVisitor(new GithubFactoryVisitor(githubUrl)); } private class GithubFactoryVisitor implements FactoryVisitor { private final GithubUrl githubUrl; private GithubFactoryVisitor(GithubUrl githubUrl) { this.githubUrl = githubUrl; }
/** * Create factory object based on provided parameters * * @param factoryParameters map containing factory data parameters provided through URL * @throws BadRequestException when data are invalid */
Create factory object based on provided parameters
createFactory
{ "repo_name": "codenvy/che", "path": "wsmaster/che-core-api-factory-github/src/main/java/org/eclipse/che/api/factory/server/github/GithubFactoryParametersResolver.java", "license": "epl-1.0", "size": 5692 }
[ "java.util.Map", "javax.validation.constraints.NotNull", "org.eclipse.che.api.core.ApiException", "org.eclipse.che.api.factory.shared.dto.FactoryDto", "org.eclipse.che.api.factory.shared.dto.FactoryMetaDto", "org.eclipse.che.api.factory.shared.dto.FactoryVisitor", "org.eclipse.che.dto.server.DtoFactory" ]
import java.util.Map; import javax.validation.constraints.NotNull; import org.eclipse.che.api.core.ApiException; import org.eclipse.che.api.factory.shared.dto.FactoryDto; import org.eclipse.che.api.factory.shared.dto.FactoryMetaDto; import org.eclipse.che.api.factory.shared.dto.FactoryVisitor; import org.eclipse.che.dto.server.DtoFactory;
import java.util.*; import javax.validation.constraints.*; import org.eclipse.che.api.core.*; import org.eclipse.che.api.factory.shared.dto.*; import org.eclipse.che.dto.server.*;
[ "java.util", "javax.validation", "org.eclipse.che" ]
java.util; javax.validation; org.eclipse.che;
2,166,219
byte[] encode(byte[] data); /** * Decode for ssl encrypted data * * @param data * @return Only decrypted app data, the handshake data will write back to remote by {@link SslHandshakeHandler}
byte[] encode(byte[] data); /** * Decode for ssl encrypted data * * @param data * @return Only decrypted app data, the handshake data will write back to remote by {@link SslHandshakeHandler}
/** * Encode data to ssl encrypted data. * * @param data * @return Encrypted app data */
Encode data to ssl encrypted data
encode
{ "repo_name": "mindwind/craft-atom", "path": "craft-atom-protocol-ssl/src/main/java/io/craft/atom/protocol/ssl/api/SslCodec.java", "license": "mit", "size": 598 }
[ "io.craft.atom.protocol.ssl.spi.SslHandshakeHandler" ]
import io.craft.atom.protocol.ssl.spi.SslHandshakeHandler;
import io.craft.atom.protocol.ssl.spi.*;
[ "io.craft.atom" ]
io.craft.atom;
2,747,804
public JPanel getJContentPane() { if (jContentPane == null) { jContentPane = new JPanel(); jContentPane.setLayout(new BorderLayout()); jContentPane.add(getControl(), java.awt.BorderLayout.EAST); jContentPane.add(getStatusPanel(), java.awt.BorderLayout.SOUTH); } return jContentPane; }
JPanel function() { if (jContentPane == null) { jContentPane = new JPanel(); jContentPane.setLayout(new BorderLayout()); jContentPane.add(getControl(), java.awt.BorderLayout.EAST); jContentPane.add(getStatusPanel(), java.awt.BorderLayout.SOUTH); } return jContentPane; }
/** * This method initializes jContentPane * * @return javax.swing.JPanel */
This method initializes jContentPane
getJContentPane
{ "repo_name": "noushadali/uiunit-core", "path": "src/java/com/cordys/cm/uiunit/framework/gui/OldGUI.java", "license": "apache-2.0", "size": 6503 }
[ "java.awt.BorderLayout", "javax.swing.JPanel" ]
import java.awt.BorderLayout; import javax.swing.JPanel;
import java.awt.*; import javax.swing.*;
[ "java.awt", "javax.swing" ]
java.awt; javax.swing;
408,205
public static Map<String,Object> toMap(Object object) { return gson.fromJson(gson.toJson(object),Map.class); }
static Map<String,Object> function(Object object) { return gson.fromJson(gson.toJson(object),Map.class); }
/** * Returns the JSON object as a plain java object by the specified type. * * @param map * a Map representation of an object. * @param type * the type of the desired object. * @return a plain java object by the specified type. */
Returns the JSON object as a plain java object by the specified type
toMap
{ "repo_name": "craciunbogdangeorge/GsonUtils", "path": "src/main/java/com/apecs/util/GsonUtils.java", "license": "apache-2.0", "size": 5313 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,017,560
private void writeJsonObjectBegin(BaseContext context) throws TException { getCurrentContext().write(); if (getCurrentContext().isMapKey()) { pushWriter(new ByteArrayOutputStream()); } pushContext(context); try { getCurrentWriter().writeStartObject(); } catch (IOException ex) { throw new TException(ex); } }
void function(BaseContext context) throws TException { getCurrentContext().write(); if (getCurrentContext().isMapKey()) { pushWriter(new ByteArrayOutputStream()); } pushContext(context); try { getCurrentWriter().writeStartObject(); } catch (IOException ex) { throw new TException(ex); } }
/** * Helper to write out the beginning of a Thrift type (either struct or map), * both of which are written as JsonObjects. */
Helper to write out the beginning of a Thrift type (either struct or map), both of which are written as JsonObjects
writeJsonObjectBegin
{ "repo_name": "anuraaga/armeria", "path": "thrift/src/main/java/com/linecorp/armeria/common/thrift/text/TTextProtocol.java", "license": "apache-2.0", "size": 26195 }
[ "java.io.ByteArrayOutputStream", "java.io.IOException", "org.apache.thrift.TException" ]
import java.io.ByteArrayOutputStream; import java.io.IOException; import org.apache.thrift.TException;
import java.io.*; import org.apache.thrift.*;
[ "java.io", "org.apache.thrift" ]
java.io; org.apache.thrift;
2,462,333
public Set<Frame> findFrameByNbr(Integer nbr) throws DataAccessException;
Set<Frame> function(Integer nbr) throws DataAccessException;
/** * JPQL Query - findFrameByNbr * */
JPQL Query - findFrameByNbr
findFrameByNbr
{ "repo_name": "didoux/Spring-BowlingDB", "path": "generated/bowling/dao/FrameDAO.java", "license": "gpl-2.0", "size": 2654 }
[ "java.util.Set", "org.springframework.dao.DataAccessException" ]
import java.util.Set; import org.springframework.dao.DataAccessException;
import java.util.*; import org.springframework.dao.*;
[ "java.util", "org.springframework.dao" ]
java.util; org.springframework.dao;
1,134,635
public void setDenseMatrixCudaPointer(Pointer densePtr) throws DMLRuntimeException { if (this.jcudaDenseMatrixPtr != null) { throw new DMLRuntimeException("jcudaDenseMatrixPtr was already allocated for " + this + ", this will cause a memory leak on the GPU"); } this.jcudaDenseMatrixPtr = densePtr; this.isSparse = false; if (getJcudaSparseMatrixPtr() != null) { getJcudaSparseMatrixPtr().deallocate(); jcudaSparseMatrixPtr = null; } getGPUContext().recordBlockUsage(this); }
void function(Pointer densePtr) throws DMLRuntimeException { if (this.jcudaDenseMatrixPtr != null) { throw new DMLRuntimeException(STR + this + STR); } this.jcudaDenseMatrixPtr = densePtr; this.isSparse = false; if (getJcudaSparseMatrixPtr() != null) { getJcudaSparseMatrixPtr().deallocate(); jcudaSparseMatrixPtr = null; } getGPUContext().recordBlockUsage(this); }
/** * Convenience method to directly set the dense matrix pointer on GPU * * @param densePtr dense pointer * @throws DMLRuntimeException ? */
Convenience method to directly set the dense matrix pointer on GPU
setDenseMatrixCudaPointer
{ "repo_name": "asurve/arvind-sysml2", "path": "src/main/java/org/apache/sysml/runtime/instructions/gpu/context/GPUObject.java", "license": "apache-2.0", "size": 35640 }
[ "org.apache.sysml.runtime.DMLRuntimeException" ]
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.*;
[ "org.apache.sysml" ]
org.apache.sysml;
753,352
public boolean boundsIntersect (Rectangle r) { return Colliders.intersects(bounds(), r); }
boolean function (Rectangle r) { return Colliders.intersects(bounds(), r); }
/** Returns true if this game object's bounds intersect with the given rectangle. * * @param r the rectangle to intersect. * @return true if the bounds intersect. */
Returns true if this game object's bounds intersect with the given rectangle
boundsIntersect
{ "repo_name": "ryoenji/libgdx", "path": "demos/very-angry-robots/very-angry-robots/src/com/badlydrawngames/veryangryrobots/mobiles/GameObject.java", "license": "apache-2.0", "size": 5100 }
[ "com.badlogic.gdx.math.Rectangle", "com.badlydrawngames.general.Colliders" ]
import com.badlogic.gdx.math.Rectangle; import com.badlydrawngames.general.Colliders;
import com.badlogic.gdx.math.*; import com.badlydrawngames.general.*;
[ "com.badlogic.gdx", "com.badlydrawngames.general" ]
com.badlogic.gdx; com.badlydrawngames.general;
1,725,476
protected FileRequest createFileRequest(int typ) { // DEBUG if ( DEBUG) Debug.println("CachedNetworkFile.createFileRequest() fullName=" + getFullName() + ", state=" + getFileState()); // Create a file load or save request return new SingleFileRequest(typ, getFileId(), getStreamId(), m_cacheFile.getInfo(), getFullName(), getFileState()); }
FileRequest function(int typ) { if ( DEBUG) Debug.println(STR + getFullName() + STR + getFileState()); return new SingleFileRequest(typ, getFileId(), getStreamId(), m_cacheFile.getInfo(), getFullName(), getFileState()); }
/** * Create a file load or save request. This method may be overridden to allow extending of the * SingleFileRequest class. * * @param typ int * @return FileRequest */
Create a file load or save request. This method may be overridden to allow extending of the SingleFileRequest class
createFileRequest
{ "repo_name": "loftuxab/community-edition-old", "path": "projects/alfresco-jlan/source/java/org/alfresco/jlan/server/filesys/db/CachedNetworkFile.java", "license": "lgpl-3.0", "size": 13742 }
[ "org.alfresco.jlan.debug.Debug", "org.alfresco.jlan.server.filesys.loader.FileRequest", "org.alfresco.jlan.server.filesys.loader.SingleFileRequest" ]
import org.alfresco.jlan.debug.Debug; import org.alfresco.jlan.server.filesys.loader.FileRequest; import org.alfresco.jlan.server.filesys.loader.SingleFileRequest;
import org.alfresco.jlan.debug.*; import org.alfresco.jlan.server.filesys.loader.*;
[ "org.alfresco.jlan" ]
org.alfresco.jlan;
833,657
public List<AzureFirewallIpConfiguration> ipConfigurations() { return this.innerProperties() == null ? null : this.innerProperties().ipConfigurations(); }
List<AzureFirewallIpConfiguration> function() { return this.innerProperties() == null ? null : this.innerProperties().ipConfigurations(); }
/** * Get the ipConfigurations property: IP configuration of the Azure Firewall resource. * * @return the ipConfigurations value. */
Get the ipConfigurations property: IP configuration of the Azure Firewall resource
ipConfigurations
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/fluent/models/AzureFirewallInner.java", "license": "mit", "size": 14546 }
[ "com.azure.resourcemanager.network.models.AzureFirewallIpConfiguration", "java.util.List" ]
import com.azure.resourcemanager.network.models.AzureFirewallIpConfiguration; import java.util.List;
import com.azure.resourcemanager.network.models.*; import java.util.*;
[ "com.azure.resourcemanager", "java.util" ]
com.azure.resourcemanager; java.util;
2,263,422
protected ResponseMessage sendLoginMessage() throws IOException, MessageTooLargeException { ResponseMessage temp = this.sendMessage(new Login(entry)); return temp; }
ResponseMessage function() throws IOException, MessageTooLargeException { ResponseMessage temp = this.sendMessage(new Login(entry)); return temp; }
/** * Sends a Login message to the server; may be overridden by subclasses that * need to add addtional information to the Login message. * * @throws MessageTooLargeException * */
Sends a Login message to the server; may be overridden by subclasses that need to add addtional information to the Login message
sendLoginMessage
{ "repo_name": "ecologylab/ecologylabAuthentication", "path": "src/ecologylab/authentication/nio/NIODatagramAuthClient.java", "license": "lgpl-3.0", "size": 7693 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
722,522
@SimpleFunction(description = "Draw a line on the screen.") public void DrawLine(int color, int x1, int y1, int x2, int y2) { String functionName = "DrawLine"; if (color != 0 && color != 1) { form.dispatchErrorOccurredEvent(this, functionName, ErrorMessages.ERROR_EV3_ILLEGAL_ARGUMENT, functionName); return; } byte[] command = Ev3BinaryParser.encodeDirectCommand(Ev3Constants.Opcode.UI_DRAW, false, 0, 0, "cccccc", Ev3Constants.UIDrawSubcode.LINE, (byte) color, (short) x1, (short) y1, (short) x2, (short) y2); sendCommand(functionName, command, false); command = Ev3BinaryParser.encodeDirectCommand(Ev3Constants.Opcode.UI_DRAW, false, 0, 0, "c", Ev3Constants.UIDrawSubcode.UPDATE); sendCommand(functionName, command, false); }
@SimpleFunction(description = STR) void function(int color, int x1, int y1, int x2, int y2) { String functionName = STR; if (color != 0 && color != 1) { form.dispatchErrorOccurredEvent(this, functionName, ErrorMessages.ERROR_EV3_ILLEGAL_ARGUMENT, functionName); return; } byte[] command = Ev3BinaryParser.encodeDirectCommand(Ev3Constants.Opcode.UI_DRAW, false, 0, 0, STR, Ev3Constants.UIDrawSubcode.LINE, (byte) color, (short) x1, (short) y1, (short) x2, (short) y2); sendCommand(functionName, command, false); command = Ev3BinaryParser.encodeDirectCommand(Ev3Constants.Opcode.UI_DRAW, false, 0, 0, "c", Ev3Constants.UIDrawSubcode.UPDATE); sendCommand(functionName, command, false); }
/** * Draw a line on the screen. */
Draw a line on the screen
DrawLine
{ "repo_name": "warren922/appinventor-sources", "path": "appinventor/components/src/com/google/appinventor/components/runtime/Ev3UI.java", "license": "apache-2.0", "size": 11562 }
[ "com.google.appinventor.components.annotations.SimpleFunction", "com.google.appinventor.components.runtime.util.ErrorMessages", "com.google.appinventor.components.runtime.util.Ev3BinaryParser", "com.google.appinventor.components.runtime.util.Ev3Constants" ]
import com.google.appinventor.components.annotations.SimpleFunction; import com.google.appinventor.components.runtime.util.ErrorMessages; import com.google.appinventor.components.runtime.util.Ev3BinaryParser; import com.google.appinventor.components.runtime.util.Ev3Constants;
import com.google.appinventor.components.annotations.*; import com.google.appinventor.components.runtime.util.*;
[ "com.google.appinventor" ]
com.google.appinventor;
1,587,336
String[] doConvert(RowData rowdata, RowMetaData metadata) throws PropertyVetoException, RowDataException { // Validate the metadata parameter. if (metadata == null) throw new NullPointerException("metadata"); // do the conversion. Vector forms = convertRowData(rowdata, metadata); // Return the list of form as String array. String[] data = new String[forms.size()]; for (int i=0; i< data.length; i++) data[i] = ((HTMLTable)forms.elementAt(i)).getTag(); return data; }
String[] doConvert(RowData rowdata, RowMetaData metadata) throws PropertyVetoException, RowDataException { if (metadata == null) throw new NullPointerException(STR); Vector forms = convertRowData(rowdata, metadata); String[] data = new String[forms.size()]; for (int i=0; i< data.length; i++) data[i] = ((HTMLTable)forms.elementAt(i)).getTag(); return data; }
/** * Converts the specified <i>rowdata</i> to an array of HTML strings. * * @param rowdata The row data. * @param metadata The meta data. * @return An array of HTML Strings. * @exception PropertyVetoException If a change is vetoed. * @exception RowDataException If a row data error occurs. **/
Converts the specified rowdata to an array of HTML strings
doConvert
{ "repo_name": "devjunix/libjt400-java", "path": "src/com/ibm/as400/util/servlet/HTMLFormConverter.java", "license": "epl-1.0", "size": 33101 }
[ "com.ibm.as400.util.html.HTMLTable", "java.beans.PropertyVetoException", "java.util.Vector" ]
import com.ibm.as400.util.html.HTMLTable; import java.beans.PropertyVetoException; import java.util.Vector;
import com.ibm.as400.util.html.*; import java.beans.*; import java.util.*;
[ "com.ibm.as400", "java.beans", "java.util" ]
com.ibm.as400; java.beans; java.util;
1,090,300
public static boolean saveToStream(OutputStream stream, byte[] data) { try { stream.write(data, 0, data.length); return true; } catch (IOException e) { e.printStackTrace(); return false; } finally { try { stream.close(); } catch (IOException e) { } } }
static boolean function(OutputStream stream, byte[] data) { try { stream.write(data, 0, data.length); return true; } catch (IOException e) { e.printStackTrace(); return false; } finally { try { stream.close(); } catch (IOException e) { } } }
/** * Saves bytes to file. * * @param file * The file to save bytes to. * @param data * The data to save. * @return True, if successful. */
Saves bytes to file
saveToStream
{ "repo_name": "lottie-c/spl_tests_new", "path": "src/java/cz/cuni/mff/spl/utils/StreamUtils.java", "license": "bsd-3-clause", "size": 5174 }
[ "java.io.IOException", "java.io.OutputStream" ]
import java.io.IOException; import java.io.OutputStream;
import java.io.*;
[ "java.io" ]
java.io;
790,284
public void setContainerAnnotationGraphicsHLAPI( AnnotationGraphicsHLAPI elem){ if(elem!=null) item.setContainerAnnotationGraphics((AnnotationGraphics)elem.getContainedItem()); } //setters/remover for lists.
void function( AnnotationGraphicsHLAPI elem){ if(elem!=null) item.setContainerAnnotationGraphics((AnnotationGraphics)elem.getContainedItem()); }
/** * set ContainerAnnotationGraphics */
set ContainerAnnotationGraphics
setContainerAnnotationGraphicsHLAPI
{ "repo_name": "lhillah/pnmlframework", "path": "pnmlFw-PTNet/src/fr/lip6/move/pnml/ptnet/hlapi/FontHLAPI.java", "license": "epl-1.0", "size": 9267 }
[ "fr.lip6.move.pnml.ptnet.AnnotationGraphics" ]
import fr.lip6.move.pnml.ptnet.AnnotationGraphics;
import fr.lip6.move.pnml.ptnet.*;
[ "fr.lip6.move" ]
fr.lip6.move;
594,343
public void addIndicies(final int index) { if (indicies == null) { indicies = new TreeSet<>(); } indicies.add(index); }
void function(final int index) { if (indicies == null) { indicies = new TreeSet<>(); } indicies.add(index); }
/** * Adds start indices * * @param index */
Adds start indices
addIndicies
{ "repo_name": "renespeck/FOX", "path": "src/main/java/org/aksw/fox/data/Entity.java", "license": "gpl-2.0", "size": 3191 }
[ "java.util.TreeSet" ]
import java.util.TreeSet;
import java.util.*;
[ "java.util" ]
java.util;
937,773
@Test public void testDisableLedegerReplication() throws Exception { final LedgerUnderreplicationManager replicaMgr = lmf1 .newLedgerUnderreplicationManager(); // simulate few urLedgers before disabling final Long ledgerA = 0xfeadeefdacL; final String missingReplica = "localhost:3181"; // disabling replication replicaMgr.disableLedgerReplication(); LOG.info("Disabled Ledeger Replication"); try { replicaMgr.markLedgerUnderreplicated(ledgerA, missingReplica); } catch (UnavailableException e) { LOG.debug("Unexpected exception while marking urLedger", e); fail("Unexpected exception while marking urLedger" + e.getMessage()); } Future<Long> fA = getLedgerToReplicate(replicaMgr); try { fA.get(1, TimeUnit.SECONDS); fail("Shouldn't be able to find a ledger to replicate"); } catch (TimeoutException te) { // expected behaviour, as the replication is disabled isLedgerReplicationDisabled = false; } assertTrue("Ledger replication is not disabled!", !isLedgerReplicationDisabled); }
void function() throws Exception { final LedgerUnderreplicationManager replicaMgr = lmf1 .newLedgerUnderreplicationManager(); final Long ledgerA = 0xfeadeefdacL; final String missingReplica = STR; replicaMgr.disableLedgerReplication(); LOG.info(STR); try { replicaMgr.markLedgerUnderreplicated(ledgerA, missingReplica); } catch (UnavailableException e) { LOG.debug(STR, e); fail(STR + e.getMessage()); } Future<Long> fA = getLedgerToReplicate(replicaMgr); try { fA.get(1, TimeUnit.SECONDS); fail(STR); } catch (TimeoutException te) { isLedgerReplicationDisabled = false; } assertTrue(STR, !isLedgerReplicationDisabled); }
/** * Test disabling the ledger re-replication. After disabling, it will not be * able to getLedgerToRereplicate(). This calls will enter into infinite * waiting until enabling rereplication process */
Test disabling the ledger re-replication. After disabling, it will not be able to getLedgerToRereplicate(). This calls will enter into infinite waiting until enabling rereplication process
testDisableLedegerReplication
{ "repo_name": "ivankelly/bookkeeper", "path": "bookkeeper-server/src/test/java/org/apache/bookkeeper/replication/TestLedgerUnderreplicationManager.java", "license": "apache-2.0", "size": 31224 }
[ "java.util.concurrent.Future", "java.util.concurrent.TimeUnit", "java.util.concurrent.TimeoutException", "org.apache.bookkeeper.meta.LedgerUnderreplicationManager", "org.apache.bookkeeper.replication.ReplicationException", "org.junit.Assert" ]
import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.bookkeeper.meta.LedgerUnderreplicationManager; import org.apache.bookkeeper.replication.ReplicationException; import org.junit.Assert;
import java.util.concurrent.*; import org.apache.bookkeeper.meta.*; import org.apache.bookkeeper.replication.*; import org.junit.*;
[ "java.util", "org.apache.bookkeeper", "org.junit" ]
java.util; org.apache.bookkeeper; org.junit;
1,680,528
Future<RouteGetResponse> getAsync(String resourceGroupName, String routeTableName, String routeName);
Future<RouteGetResponse> getAsync(String resourceGroupName, String routeTableName, String routeName);
/** * The Get route operation retreives information about the specified route * from the route table. * * @param resourceGroupName Required. The name of the resource group. * @param routeTableName Required. The name of the route table. * @param routeName Required. The name of the route. * @return Response for GetRoute Api service call */
The Get route operation retreives information about the specified route from the route table
getAsync
{ "repo_name": "southworkscom/azure-sdk-for-java", "path": "resource-management/azure-mgmt-network/src/main/java/com/microsoft/azure/management/network/RouteOperations.java", "license": "apache-2.0", "size": 12161 }
[ "com.microsoft.azure.management.network.models.RouteGetResponse", "java.util.concurrent.Future" ]
import com.microsoft.azure.management.network.models.RouteGetResponse; import java.util.concurrent.Future;
import com.microsoft.azure.management.network.models.*; import java.util.concurrent.*;
[ "com.microsoft.azure", "java.util" ]
com.microsoft.azure; java.util;
2,386,508
@Path("nodes") @POST @Consumes(MediaType.APPLICATION_JSON) public void registerNode(Map<String, String> formParams) { auth.requireManage(); String node = formParams.get("node"); if (node == null) { throw new BadRequestException("Node not found in params"); } if (logger.isDebugEnabled()) logger.debug("Register node: " + node); client.registerNode(node, Time.currentTime()); adminEvent.operation(OperationType.ACTION).resourcePath(uriInfo).success(); }
@Path("nodes") @Consumes(MediaType.APPLICATION_JSON) void function(Map<String, String> formParams) { auth.requireManage(); String node = formParams.get("node"); if (node == null) { throw new BadRequestException(STR); } if (logger.isDebugEnabled()) logger.debug(STR + node); client.registerNode(node, Time.currentTime()); adminEvent.operation(OperationType.ACTION).resourcePath(uriInfo).success(); }
/** * Manually register cluster node to this client - usually it's not needed to call this directly as adapter should handle * by sending registration request to Keycloak * * @param formParams */
Manually register cluster node to this client - usually it's not needed to call this directly as adapter should handle by sending registration request to Keycloak
registerNode
{ "repo_name": "j-bore/keycloak", "path": "services/src/main/java/org/keycloak/services/resources/admin/ClientResource.java", "license": "apache-2.0", "size": 15304 }
[ "java.util.Map", "javax.ws.rs.Consumes", "javax.ws.rs.Path", "javax.ws.rs.core.MediaType", "org.jboss.resteasy.spi.BadRequestException", "org.keycloak.events.admin.OperationType", "org.keycloak.util.Time" ]
import java.util.Map; import javax.ws.rs.Consumes; import javax.ws.rs.Path; import javax.ws.rs.core.MediaType; import org.jboss.resteasy.spi.BadRequestException; import org.keycloak.events.admin.OperationType; import org.keycloak.util.Time;
import java.util.*; import javax.ws.rs.*; import javax.ws.rs.core.*; import org.jboss.resteasy.spi.*; import org.keycloak.events.admin.*; import org.keycloak.util.*;
[ "java.util", "javax.ws", "org.jboss.resteasy", "org.keycloak.events", "org.keycloak.util" ]
java.util; javax.ws; org.jboss.resteasy; org.keycloak.events; org.keycloak.util;
2,128,402
public static boolean isMouseUpOnce(MouseButton button) { return MouseImpl.isMouseUpOnce(button); }
static boolean function(MouseButton button) { return MouseImpl.isMouseUpOnce(button); }
/** * Determines if a mouse button is released. * @param button which button to check * @return whether the button is up starting at this frame */
Determines if a mouse button is released
isMouseUpOnce
{ "repo_name": "AaronFriesen/Trydent", "path": "src/main/java/edu/gatech/cs2340/trydent/Mouse.java", "license": "mit", "size": 1454 }
[ "edu.gatech.cs2340.trydent.internal.MouseImpl" ]
import edu.gatech.cs2340.trydent.internal.MouseImpl;
import edu.gatech.cs2340.trydent.internal.*;
[ "edu.gatech.cs2340" ]
edu.gatech.cs2340;
1,554,091
@Test public void checkLabel() { // Initially it should be unset. assertNull(series.getLabel()); // We should be able to set it. series.setLabel("label"); assertEquals("label", series.getLabel()); // We should be able to unset it. series.setLabel(null); assertNull(series.getLabel()); return; }
void function() { assertNull(series.getLabel()); series.setLabel("label"); assertEquals("label", series.getLabel()); series.setLabel(null); assertNull(series.getLabel()); return; }
/** * Checks that the label can be set. */
Checks that the label can be set
checkLabel
{ "repo_name": "gorindn/ice", "path": "tests/org.eclipse.ice.viz.service.test/src/org/eclipse/ice/viz/service/test/AbstractSeriesTester.java", "license": "epl-1.0", "size": 3530 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
2,792,147
public void afterIndexRowRequalification(Boolean success, CompactCompositeIndexKey ccKey, ExecRow row, Activation activation);
void function(Boolean success, CompactCompositeIndexKey ccKey, ExecRow row, Activation activation);
/** * after a row a qualified or disqualified in the requalification phase (in * non-txn case) by SortedMap2IndexScanController * * @param success * true means requalification succeeded, false means it failed and * null means that it was skipped with success since no change in * value was detected */
after a row a qualified or disqualified in the requalification phase (in non-txn case) by SortedMap2IndexScanController
afterIndexRowRequalification
{ "repo_name": "gemxd/gemfirexd-oss", "path": "gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/engine/GemFireXDQueryObserver.java", "license": "apache-2.0", "size": 36937 }
[ "com.pivotal.gemfirexd.internal.engine.store.CompactCompositeIndexKey", "com.pivotal.gemfirexd.internal.iapi.sql.Activation", "com.pivotal.gemfirexd.internal.iapi.sql.execute.ExecRow" ]
import com.pivotal.gemfirexd.internal.engine.store.CompactCompositeIndexKey; import com.pivotal.gemfirexd.internal.iapi.sql.Activation; import com.pivotal.gemfirexd.internal.iapi.sql.execute.ExecRow;
import com.pivotal.gemfirexd.internal.engine.store.*; import com.pivotal.gemfirexd.internal.iapi.sql.*; import com.pivotal.gemfirexd.internal.iapi.sql.execute.*;
[ "com.pivotal.gemfirexd" ]
com.pivotal.gemfirexd;
2,530,149
private String getUsername(Metadata metadata, PropertyList propertyList) throws SFTPException { // Check ActionProperties first Property username = PropertyHelper.getFromList(propertyList, PROPERTY_USERNAME); // Check MetadataProperties if (username == null) { username = PropertyHelper.getFromList(metadata.getPropertyList(), PROPERTY_USERNAME); } if (username == null || username.getType() != PropertyType.TEXT) { throw new SFTPException("No username provided for SFTP-Connection"); } return ((TextProperty) username).getValue().getValue(); }
String function(Metadata metadata, PropertyList propertyList) throws SFTPException { Property username = PropertyHelper.getFromList(propertyList, PROPERTY_USERNAME); if (username == null) { username = PropertyHelper.getFromList(metadata.getPropertyList(), PROPERTY_USERNAME); } if (username == null username.getType() != PropertyType.TEXT) { throw new SFTPException(STR); } return ((TextProperty) username).getValue().getValue(); }
/** * Retrieves the username from the property list. * * @param metadata * the metadata holding additional connection properties * @param propertyList * the property list holding the connection properties * * @return the SFTP username * * @throws SFTPException * when the properties do not hold a username */
Retrieves the username from the property list
getUsername
{ "repo_name": "NABUCCO/org.nabucco.testautomation.engine.proxy.process", "path": "org.nabucco.testautomation.engine.proxy.process/src/main/org/nabucco/testautomation/engine/proxy/process/command/sftp/ConnectCommand.java", "license": "epl-1.0", "size": 6470 }
[ "org.nabucco.testautomation.engine.proxy.process.exeption.SFTPException", "org.nabucco.testautomation.property.facade.datatype.PropertyList", "org.nabucco.testautomation.property.facade.datatype.TextProperty", "org.nabucco.testautomation.property.facade.datatype.base.Property", "org.nabucco.testautomation.property.facade.datatype.base.PropertyType", "org.nabucco.testautomation.property.facade.datatype.util.PropertyHelper", "org.nabucco.testautomation.script.facade.datatype.metadata.Metadata" ]
import org.nabucco.testautomation.engine.proxy.process.exeption.SFTPException; import org.nabucco.testautomation.property.facade.datatype.PropertyList; import org.nabucco.testautomation.property.facade.datatype.TextProperty; import org.nabucco.testautomation.property.facade.datatype.base.Property; import org.nabucco.testautomation.property.facade.datatype.base.PropertyType; import org.nabucco.testautomation.property.facade.datatype.util.PropertyHelper; import org.nabucco.testautomation.script.facade.datatype.metadata.Metadata;
import org.nabucco.testautomation.engine.proxy.process.exeption.*; import org.nabucco.testautomation.property.facade.datatype.*; import org.nabucco.testautomation.property.facade.datatype.base.*; import org.nabucco.testautomation.property.facade.datatype.util.*; import org.nabucco.testautomation.script.facade.datatype.metadata.*;
[ "org.nabucco.testautomation" ]
org.nabucco.testautomation;
2,032,673
public void seekStationAsync(float frequency, boolean isUp) { mFmServiceHandler.removeMessages(FmListener.MSGID_SEEK_FINISHED); final int bundleSize = 2; Bundle bundle = new Bundle(bundleSize); bundle.putFloat(FM_FREQUENCY, frequency); bundle.putBoolean(OPTION, isUp); Message msg = mFmServiceHandler.obtainMessage(FmListener.MSGID_SEEK_FINISHED); msg.setData(bundle); mFmServiceHandler.sendMessage(msg); }
void function(float frequency, boolean isUp) { mFmServiceHandler.removeMessages(FmListener.MSGID_SEEK_FINISHED); final int bundleSize = 2; Bundle bundle = new Bundle(bundleSize); bundle.putFloat(FM_FREQUENCY, frequency); bundle.putBoolean(OPTION, isUp); Message msg = mFmServiceHandler.obtainMessage(FmListener.MSGID_SEEK_FINISHED); msg.setData(bundle); mFmServiceHandler.sendMessage(msg); }
/** * Seek station according frequency and direction * * @param frequency start frequency(100KHZ, 87.5) * @param isUp direction(true, next station; false, previous station) * * @return the frequency after seek */
Seek station according frequency and direction
seekStationAsync
{ "repo_name": "KobeMing/FMRadio", "path": "src/com/android/fmradio/FmService.java", "license": "apache-2.0", "size": 97210 }
[ "android.os.Bundle", "android.os.Message" ]
import android.os.Bundle; import android.os.Message;
import android.os.*;
[ "android.os" ]
android.os;
353,832
public Alert getAlert(BigInteger alertId) throws IOException, TokenExpiredException { String requestUrl = RESOURCE + "/" + alertId.toString(); ArgusResponse response = getClient().executeHttpRequest(ArgusHttpClient.RequestType.GET, requestUrl, null); assertValidResponse(response, requestUrl); return fromJson(response.getResult(), Alert.class); }
Alert function(BigInteger alertId) throws IOException, TokenExpiredException { String requestUrl = RESOURCE + "/" + alertId.toString(); ArgusResponse response = getClient().executeHttpRequest(ArgusHttpClient.RequestType.GET, requestUrl, null); assertValidResponse(response, requestUrl); return fromJson(response.getResult(), Alert.class); }
/** * Returns the alert for the given ID. * * @param alertId The alert ID. * * @return The alert for the given ID. May be null. * * @throws IOException If the server cannot be reached. * @throws TokenExpiredException If the token sent along with the request has expired */
Returns the alert for the given ID
getAlert
{ "repo_name": "SalesforceEng/Argus", "path": "ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/AlertService.java", "license": "bsd-3-clause", "size": 23177 }
[ "com.salesforce.dva.argus.sdk.ArgusHttpClient", "com.salesforce.dva.argus.sdk.entity.Alert", "com.salesforce.dva.argus.sdk.exceptions.TokenExpiredException", "java.io.IOException", "java.math.BigInteger" ]
import com.salesforce.dva.argus.sdk.ArgusHttpClient; import com.salesforce.dva.argus.sdk.entity.Alert; import com.salesforce.dva.argus.sdk.exceptions.TokenExpiredException; import java.io.IOException; import java.math.BigInteger;
import com.salesforce.dva.argus.sdk.*; import com.salesforce.dva.argus.sdk.entity.*; import com.salesforce.dva.argus.sdk.exceptions.*; import java.io.*; import java.math.*;
[ "com.salesforce.dva", "java.io", "java.math" ]
com.salesforce.dva; java.io; java.math;
1,489,345
public synchronized void createPrincipal(String principal, String password) throws Exception { String orgName= conf.getProperty(ORG_NAME); String orgDomain = conf.getProperty(ORG_DOMAIN); String baseDn = "ou=users,dc=" + orgName.toLowerCase(Locale.ENGLISH) + ",dc=" + orgDomain.toLowerCase(Locale.ENGLISH); String content = "dn: uid=" + principal + "," + baseDn + "\n" + "objectClass: top\n" + "objectClass: person\n" + "objectClass: inetOrgPerson\n" + "objectClass: krb5principal\n" + "objectClass: krb5kdcentry\n" + "cn: " + principal + "\n" + "sn: " + principal + "\n" + "uid: " + principal + "\n" + "userPassword: " + password + "\n" + "krb5PrincipalName: " + principal + "@" + getRealm() + "\n" + "krb5KeyVersionNumber: 0"; for (LdifEntry ldifEntry : new LdifReader(new StringReader(content))) { ds.getAdminSession().add(new DefaultEntry(ds.getSchemaManager(), ldifEntry.getEntry())); } }
synchronized void function(String principal, String password) throws Exception { String orgName= conf.getProperty(ORG_NAME); String orgDomain = conf.getProperty(ORG_DOMAIN); String baseDn = STR + orgName.toLowerCase(Locale.ENGLISH) + ",dc=" + orgDomain.toLowerCase(Locale.ENGLISH); String content = STR + principal + "," + baseDn + "\n" + STR + STR + STR + STR + STR + STR + principal + "\n" + STR + principal + "\n" + STR + principal + "\n" + STR + password + "\n" + STR + principal + "@" + getRealm() + "\n" + STR; for (LdifEntry ldifEntry : new LdifReader(new StringReader(content))) { ds.getAdminSession().add(new DefaultEntry(ds.getSchemaManager(), ldifEntry.getEntry())); } }
/** * Creates a principal in the KDC with the specified user and password. * * @param principal principal name, do not include the domain. * @param password password. * @throws Exception thrown if the principal could not be created. */
Creates a principal in the KDC with the specified user and password
createPrincipal
{ "repo_name": "aliyun-beta/aliyun-oss-hadoop-fs", "path": "hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java", "license": "apache-2.0", "size": 23226 }
[ "java.io.StringReader", "java.util.Locale", "org.apache.directory.api.ldap.model.entry.DefaultEntry", "org.apache.directory.api.ldap.model.ldif.LdifEntry", "org.apache.directory.api.ldap.model.ldif.LdifReader" ]
import java.io.StringReader; import java.util.Locale; import org.apache.directory.api.ldap.model.entry.DefaultEntry; import org.apache.directory.api.ldap.model.ldif.LdifEntry; import org.apache.directory.api.ldap.model.ldif.LdifReader;
import java.io.*; import java.util.*; import org.apache.directory.api.ldap.model.entry.*; import org.apache.directory.api.ldap.model.ldif.*;
[ "java.io", "java.util", "org.apache.directory" ]
java.io; java.util; org.apache.directory;
560,354
public static MapType findMapType(TypeMirror typeMirror, EnunciateJacksonContext context) { if (!(typeMirror instanceof DeclaredType)) { return null; } DeclaredType declaredType = (DeclaredType) typeMirror; TypeElement element = (TypeElement) declaredType.asElement(); if (element == null) { return null; } else { String fqn = element.getQualifiedName().toString(); @SuppressWarnings ( "unchecked" ) Map<String, MapType> mapTypes = (Map<String, MapType>) context.getContext().getProperty(PROPERTY_MAP_TYPES); if (mapTypes == null) { mapTypes = new HashMap<String, MapType>(); context.getContext().setProperty(PROPERTY_MAP_TYPES, mapTypes); } MapType mapType = mapTypes.get(fqn); if (mapType != null) { return mapType; } else { DeclaredType declaredMapType = findMapTypeDeclaration(declaredType, context); if (declaredMapType == null) { return null; } MapType newMapType = new MapType(declaredType, context.getContext().getProcessingEnvironment()); mapTypes.put(fqn, newMapType); TypeMirror keyType = null; TypeMirror valueType = null; List<? extends TypeMirror> typeArgs = declaredMapType.getTypeArguments(); if ((typeArgs != null) && (typeArgs.size() == 2)) { Iterator<? extends TypeMirror> argIt = typeArgs.iterator(); keyType = argIt.next(); valueType = argIt.next(); } if ((keyType == null) || (valueType == null)) { TypeMirror objectType = TypeMirrorUtils.objectType(context.getContext().getProcessingEnvironment()); keyType = objectType; valueType = objectType; } TypeMirror mapKeyType = findMapType(keyType, context); newMapType.keyType = mapKeyType == null ? keyType : mapKeyType; TypeMirror mapValueType = findMapType(valueType, context); newMapType.valueType = mapValueType == null ? valueType : mapValueType; return newMapType; } } }
static MapType function(TypeMirror typeMirror, EnunciateJacksonContext context) { if (!(typeMirror instanceof DeclaredType)) { return null; } DeclaredType declaredType = (DeclaredType) typeMirror; TypeElement element = (TypeElement) declaredType.asElement(); if (element == null) { return null; } else { String fqn = element.getQualifiedName().toString(); @SuppressWarnings ( STR ) Map<String, MapType> mapTypes = (Map<String, MapType>) context.getContext().getProperty(PROPERTY_MAP_TYPES); if (mapTypes == null) { mapTypes = new HashMap<String, MapType>(); context.getContext().setProperty(PROPERTY_MAP_TYPES, mapTypes); } MapType mapType = mapTypes.get(fqn); if (mapType != null) { return mapType; } else { DeclaredType declaredMapType = findMapTypeDeclaration(declaredType, context); if (declaredMapType == null) { return null; } MapType newMapType = new MapType(declaredType, context.getContext().getProcessingEnvironment()); mapTypes.put(fqn, newMapType); TypeMirror keyType = null; TypeMirror valueType = null; List<? extends TypeMirror> typeArgs = declaredMapType.getTypeArguments(); if ((typeArgs != null) && (typeArgs.size() == 2)) { Iterator<? extends TypeMirror> argIt = typeArgs.iterator(); keyType = argIt.next(); valueType = argIt.next(); } if ((keyType == null) (valueType == null)) { TypeMirror objectType = TypeMirrorUtils.objectType(context.getContext().getProcessingEnvironment()); keyType = objectType; valueType = objectType; } TypeMirror mapKeyType = findMapType(keyType, context); newMapType.keyType = mapKeyType == null ? keyType : mapKeyType; TypeMirror mapValueType = findMapType(valueType, context); newMapType.valueType = mapValueType == null ? valueType : mapValueType; return newMapType; } } }
/** * Finds the map type for the specified type mirror, if it exists. * * @param typeMirror The type mirror. * @param context The context * @return The map type or null. */
Finds the map type for the specified type mirror, if it exists
findMapType
{ "repo_name": "uniqueid001/enunciate", "path": "jackson/src/main/java/com/webcohesion/enunciate/modules/jackson/model/util/MapType.java", "license": "apache-2.0", "size": 5934 }
[ "com.webcohesion.enunciate.javac.decorations.type.TypeMirrorUtils", "com.webcohesion.enunciate.modules.jackson.EnunciateJacksonContext", "java.util.HashMap", "java.util.Iterator", "java.util.List", "java.util.Map", "javax.lang.model.element.TypeElement", "javax.lang.model.type.DeclaredType", "javax.lang.model.type.TypeMirror" ]
import com.webcohesion.enunciate.javac.decorations.type.TypeMirrorUtils; import com.webcohesion.enunciate.modules.jackson.EnunciateJacksonContext; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.lang.model.element.TypeElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeMirror;
import com.webcohesion.enunciate.javac.decorations.type.*; import com.webcohesion.enunciate.modules.jackson.*; import java.util.*; import javax.lang.model.element.*; import javax.lang.model.type.*;
[ "com.webcohesion.enunciate", "java.util", "javax.lang" ]
com.webcohesion.enunciate; java.util; javax.lang;
613,567
public void addMemberToSystem(ObjectName objectName, MemberMXBean proxy, FederationComponent newState) { if (objectName.equals(thisMemberName)) { ObjectName distrObjectName = MBeanJMXAdapter.getDistributedSystemName(); DistributedSystemMXBean systemMBean = new DistributedSystemMBean(this); service.registerInternalMBean(systemMBean, distrObjectName); this.systemLevelNotifEmitter = (DistributedSystemMBean) service.getDistributedSystemMXBean(); this.distListener = new DistributedSystemNotifListener(); } if (mapOfMembers != null) { Objects.requireNonNull(objectName); Objects.requireNonNull(proxy); mapOfMembers.put(objectName, proxy); memberSetSize = mapOfMembers.values().size(); } updateMember(objectName, newState, null); try { mbeanServer.addNotificationListener(objectName, distListener, null, null); } catch (InstanceNotFoundException e) { if (logger.isDebugEnabled()) { logger.debug(e.getMessage()); } logger.info("{} Instance Not Found in Platform MBean Server", objectName); } }
void function(ObjectName objectName, MemberMXBean proxy, FederationComponent newState) { if (objectName.equals(thisMemberName)) { ObjectName distrObjectName = MBeanJMXAdapter.getDistributedSystemName(); DistributedSystemMXBean systemMBean = new DistributedSystemMBean(this); service.registerInternalMBean(systemMBean, distrObjectName); this.systemLevelNotifEmitter = (DistributedSystemMBean) service.getDistributedSystemMXBean(); this.distListener = new DistributedSystemNotifListener(); } if (mapOfMembers != null) { Objects.requireNonNull(objectName); Objects.requireNonNull(proxy); mapOfMembers.put(objectName, proxy); memberSetSize = mapOfMembers.values().size(); } updateMember(objectName, newState, null); try { mbeanServer.addNotificationListener(objectName, distListener, null, null); } catch (InstanceNotFoundException e) { if (logger.isDebugEnabled()) { logger.debug(e.getMessage()); } logger.info(STR, objectName); } }
/** * Add a proxy to the map to be used by bridge. * * @param objectName object name of the proxy * @param proxy actual proxy instance */
Add a proxy to the map to be used by bridge
addMemberToSystem
{ "repo_name": "masaki-yamakawa/geode", "path": "geode-core/src/main/java/org/apache/geode/management/internal/beans/DistributedSystemBridge.java", "license": "apache-2.0", "size": 53962 }
[ "java.util.Objects", "javax.management.InstanceNotFoundException", "javax.management.ObjectName", "org.apache.geode.management.DistributedSystemMXBean", "org.apache.geode.management.MemberMXBean", "org.apache.geode.management.internal.FederationComponent", "org.apache.geode.management.internal.MBeanJMXAdapter" ]
import java.util.Objects; import javax.management.InstanceNotFoundException; import javax.management.ObjectName; import org.apache.geode.management.DistributedSystemMXBean; import org.apache.geode.management.MemberMXBean; import org.apache.geode.management.internal.FederationComponent; import org.apache.geode.management.internal.MBeanJMXAdapter;
import java.util.*; import javax.management.*; import org.apache.geode.management.*; import org.apache.geode.management.internal.*;
[ "java.util", "javax.management", "org.apache.geode" ]
java.util; javax.management; org.apache.geode;
657,587
public void setHARole(Role role);
void function(Role role);
/** * Set switch's HA role to role. The haRoleReplyReceived indicates * if a reply was received from the switch (error replies excluded). * * If role is null, the switch should close the channel connection. * * @param role * @param haRoleReplyReceived */
Set switch's HA role to role. The haRoleReplyReceived indicates if a reply was received from the switch (error replies excluded). If role is null, the switch should close the channel connection
setHARole
{ "repo_name": "alsmadi/CSCI-6617", "path": "src/main/java/net/floodlightcontroller/core/IOFSwitch.java", "license": "apache-2.0", "size": 22216 }
[ "net.floodlightcontroller.core.IFloodlightProviderService" ]
import net.floodlightcontroller.core.IFloodlightProviderService;
import net.floodlightcontroller.core.*;
[ "net.floodlightcontroller.core" ]
net.floodlightcontroller.core;
1,383,178
@Test public void testReadProtectedPDFHasMetaData() throws Exception { try (InputStream is = XMPUtilTest.class.getResourceAsStream("/pdfs/write-protected.pdf")) { Assert.assertTrue(XMPUtil.hasMetadata(is, xmpPreferences)); } }
void function() throws Exception { try (InputStream is = XMPUtilTest.class.getResourceAsStream(STR)) { Assert.assertTrue(XMPUtil.hasMetadata(is, xmpPreferences)); } }
/** * Tests whether a edit-protected PDF can be read */
Tests whether a edit-protected PDF can be read
testReadProtectedPDFHasMetaData
{ "repo_name": "ambro2/jabref", "path": "src/test/java/net/sf/jabref/logic/xmp/XMPUtilTest.java", "license": "gpl-2.0", "size": 63473 }
[ "java.io.InputStream", "org.junit.Assert" ]
import java.io.InputStream; import org.junit.Assert;
import java.io.*; import org.junit.*;
[ "java.io", "org.junit" ]
java.io; org.junit;
2,477,784
void startLocalityGroup(Text cf);
void startLocalityGroup(Text cf);
/** * Start a new LocalityGroup. This method is used when the RFile seeks to the next LocalityGroup. * * @param cf * Text object of the column family of the first entry in the locality group */
Start a new LocalityGroup. This method is used when the RFile seeks to the next LocalityGroup
startLocalityGroup
{ "repo_name": "milleruntime/accumulo", "path": "core/src/main/java/org/apache/accumulo/core/file/rfile/MetricsGatherer.java", "license": "apache-2.0", "size": 2921 }
[ "org.apache.hadoop.io.Text" ]
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
1,900,782
public void setOutput(int i, Denotator d);
void function(int i, Denotator d);
/** * Stores the output denotator <code>d</code> for output connector number <code>i</code>. * This is usually called at the end of the run() method to store * the result of the computation. */
Stores the output denotator <code>d</code> for output connector number <code>i</code>. This is usually called at the end of the run() method to store the result of the computation
setOutput
{ "repo_name": "wells369/Rubato", "path": "java/src/org/rubato/base/Rubette.java", "license": "gpl-2.0", "size": 7842 }
[ "org.rubato.math.yoneda.Denotator" ]
import org.rubato.math.yoneda.Denotator;
import org.rubato.math.yoneda.*;
[ "org.rubato.math" ]
org.rubato.math;
722,218
default void checkCanRenameColumn(ConnectorTransactionHandle transactionHandle, Identity identity, SchemaTableName tableName) { denyRenameColumn(tableName.toString()); }
default void checkCanRenameColumn(ConnectorTransactionHandle transactionHandle, Identity identity, SchemaTableName tableName) { denyRenameColumn(tableName.toString()); }
/** * Check if identity is allowed to rename a column in the specified table in this catalog. * * @throws com.facebook.presto.spi.security.AccessDeniedException if not allowed */
Check if identity is allowed to rename a column in the specified table in this catalog
checkCanRenameColumn
{ "repo_name": "Teradata/presto", "path": "presto-spi/src/main/java/com/facebook/presto/spi/connector/ConnectorAccessControl.java", "license": "apache-2.0", "size": 12513 }
[ "com.facebook.presto.spi.SchemaTableName", "com.facebook.presto.spi.security.AccessDeniedException", "com.facebook.presto.spi.security.Identity" ]
import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.security.AccessDeniedException; import com.facebook.presto.spi.security.Identity;
import com.facebook.presto.spi.*; import com.facebook.presto.spi.security.*;
[ "com.facebook.presto" ]
com.facebook.presto;
2,904,249
// Create controls stackPanel = new Composite(parent, SWT.NONE); stack = new StackLayout(); stackPanel.setLayout(stack); topPanel = new Composite(stackPanel, SWT.NONE); // Filter panel Control filterControl = filterPanel.createControl(topPanel); // Main panel mainPanel = new Composite(stackPanel, SWT.NONE); createMainControl(mainPanel); // Layout stack.topControl = mainPanel; GridLayout layout = new GridLayout(1, false); layout.horizontalSpacing = 0; layout.verticalSpacing = 0; layout.marginHeight = 0; layout.marginWidth = 0; topPanel.setLayout(layout); mainPanel.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 2, 1)); filterControl.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false)); // Toolbar createActions(); fillToolBar(getViewSite().getActionBars() .getToolBarManager()); }
stackPanel = new Composite(parent, SWT.NONE); stack = new StackLayout(); stackPanel.setLayout(stack); topPanel = new Composite(stackPanel, SWT.NONE); Control filterControl = filterPanel.createControl(topPanel); mainPanel = new Composite(stackPanel, SWT.NONE); createMainControl(mainPanel); stack.topControl = mainPanel; GridLayout layout = new GridLayout(1, false); layout.horizontalSpacing = 0; layout.verticalSpacing = 0; layout.marginHeight = 0; layout.marginWidth = 0; topPanel.setLayout(layout); mainPanel.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 2, 1)); filterControl.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false)); createActions(); fillToolBar(getViewSite().getActionBars() .getToolBarManager()); }
/** * Implements {@link ViewPart#createPartControl(Composite)} */
Implements <code>ViewPart#createPartControl(Composite)</code>
createPartControl
{ "repo_name": "psoreide/bnd", "path": "bndtools.core/src/bndtools/views/repository/FilteredViewPart.java", "license": "apache-2.0", "size": 3961 }
[ "org.eclipse.swt.custom.StackLayout", "org.eclipse.swt.layout.GridData", "org.eclipse.swt.layout.GridLayout", "org.eclipse.swt.widgets.Composite", "org.eclipse.swt.widgets.Control" ]
import org.eclipse.swt.custom.StackLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.custom.*; import org.eclipse.swt.layout.*; import org.eclipse.swt.widgets.*;
[ "org.eclipse.swt" ]
org.eclipse.swt;
429,747
@GET @Path("/livy/sessions/appId/{sessionId}") @Produces(MediaType.TEXT_PLAIN) public Response getLivySessionAppId(@PathParam("sessionId") int sessionId) throws AppException { LivyMsg.Session session = livyService.getLivySession(sessionId); if (session == null) { return new JsonResponse(Response.Status.NOT_FOUND, "Session '" + sessionId + "' not found.").build(); } String projName = hdfsUsersController.getProjectName(session.getProxyUser()); if (!this.project.getName().equals(projName)) { throw new AppException(Status.BAD_REQUEST.getStatusCode(), "You can't stop sessions in another project."); } List<YarnApplicationstate> appStates = appStateBean.findByAppname("livy-session-" + sessionId); if (appStates == null || appStates.isEmpty()) { return new JsonResponse(Response.Status.NOT_FOUND, "Session '" + sessionId + "' not running.").build(); } return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(appStates.get(0).getApplicationid()). build(); }
@Path(STR) @Produces(MediaType.TEXT_PLAIN) Response function(@PathParam(STR) int sessionId) throws AppException { LivyMsg.Session session = livyService.getLivySession(sessionId); if (session == null) { return new JsonResponse(Response.Status.NOT_FOUND, STR + sessionId + STR).build(); } String projName = hdfsUsersController.getProjectName(session.getProxyUser()); if (!this.project.getName().equals(projName)) { throw new AppException(Status.BAD_REQUEST.getStatusCode(), STR); } List<YarnApplicationstate> appStates = appStateBean.findByAppname(STR + sessionId); if (appStates == null appStates.isEmpty()) { return new JsonResponse(Response.Status.NOT_FOUND, STR + sessionId + STR).build(); } return noCacheResponse.getNoCacheResponseBuilder(Response.Status.OK).entity(appStates.get(0).getApplicationid()). build(); }
/** * Get livy session Yarn AppId * * @param sessionId * @return * @throws AppException */
Get livy session Yarn AppId
getLivySessionAppId
{ "repo_name": "FilotasSiskos/hopsworks", "path": "hopsworks-api/src/main/java/io/hops/hopsworks/api/zeppelin/rest/InterpreterRestApi.java", "license": "agpl-3.0", "size": 22244 }
[ "io.hops.hopsworks.api.zeppelin.server.JsonResponse", "io.hops.hopsworks.api.zeppelin.util.LivyMsg", "io.hops.hopsworks.common.dao.jobhistory.YarnApplicationstate", "io.hops.hopsworks.common.exception.AppException", "java.util.List", "javax.ws.rs.Path", "javax.ws.rs.PathParam", "javax.ws.rs.Produces", "javax.ws.rs.core.MediaType", "javax.ws.rs.core.Response" ]
import io.hops.hopsworks.api.zeppelin.server.JsonResponse; import io.hops.hopsworks.api.zeppelin.util.LivyMsg; import io.hops.hopsworks.common.dao.jobhistory.YarnApplicationstate; import io.hops.hopsworks.common.exception.AppException; import java.util.List; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response;
import io.hops.hopsworks.api.zeppelin.server.*; import io.hops.hopsworks.api.zeppelin.util.*; import io.hops.hopsworks.common.dao.jobhistory.*; import io.hops.hopsworks.common.exception.*; import java.util.*; import javax.ws.rs.*; import javax.ws.rs.core.*;
[ "io.hops.hopsworks", "java.util", "javax.ws" ]
io.hops.hopsworks; java.util; javax.ws;
2,494,078
public ImmutableList<ImmutableSet<String>> getMandatoryProvidersList() { return mandatoryProvidersList; }
ImmutableList<ImmutableSet<String>> function() { return mandatoryProvidersList; }
/** * Returns the list of sets of mandatory Skylark providers. */
Returns the list of sets of mandatory Skylark providers
getMandatoryProvidersList
{ "repo_name": "mikelalcon/bazel", "path": "src/main/java/com/google/devtools/build/lib/packages/Attribute.java", "license": "apache-2.0", "size": 64223 }
[ "com.google.common.collect.ImmutableList", "com.google.common.collect.ImmutableSet" ]
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet;
import com.google.common.collect.*;
[ "com.google.common" ]
com.google.common;
816,906
public void setTarget(Activity activity) { target = activity; }
void function(Activity activity) { target = activity; }
/** * Sets the target Activity assoicated with this command * * @param activity * the target Activity */
Sets the target Activity assoicated with this command
setTarget
{ "repo_name": "CarlAtComputer/tracker", "path": "playground/other_gef/org.eclipse.gef.examples.flow/src/org/eclipse/gef/examples/flow/model/commands/ReconnectSourceCommand.java", "license": "gpl-2.0", "size": 3139 }
[ "org.eclipse.gef.examples.flow.model.Activity" ]
import org.eclipse.gef.examples.flow.model.Activity;
import org.eclipse.gef.examples.flow.model.*;
[ "org.eclipse.gef" ]
org.eclipse.gef;
1,357,385
private static void printTree(Writer ps, Node node, String indent, String lineSeparator) throws IOException { switch (node.getNodeType()) { case Node.DOCUMENT_NODE: NodeList nodes = node.getChildNodes(); if (nodes != null) { for (int i = 0; i < nodes.getLength(); i++) { printTree(ps, nodes.item(i), "", lineSeparator); } } break; case Node.ELEMENT_NODE: // // Print element and atributes // String name = node.getNodeName(); ps.write(indent); ps.write("<"); ps.write(name); // // Print attributes // NamedNodeMap attributes = node.getAttributes(); for (int i = 0; i < attributes.getLength(); i++) { Node current = attributes.item(i); ps.write(" "); ps.write(current.getNodeName()); ps.write("=\""); ps.write(current.getNodeValue()); ps.write("\""); } // // Recurse on each child // NodeList children = node.getChildNodes(); if (children != null) { ps.write(">"); ps.write(lineSeparator); for (int i = 0; i < children.getLength(); i++) printTree(ps, children.item(i), indent + " ", lineSeparator); ps.write(indent); ps.write("</"); ps.write(name); ps.write(">"); ps.write(lineSeparator); } else { ps.write("/>"); ps.write(lineSeparator); } break; case Node.CDATA_SECTION_NODE: ps.write(indent); ps.write("<![CDATA["); ps.write(node.getNodeValue()); ps.write("]]>"); ps.write(lineSeparator); break; case Node.TEXT_NODE: String text = node.getNodeValue().trim(); if (text.length() > 0) { ps.write(indent); ps.write(text); ps.write(lineSeparator); } break; case Node.PROCESSING_INSTRUCTION_NODE: break; case Node.ENTITY_REFERENCE_NODE: break; case Node.DOCUMENT_TYPE_NODE: break; default: break; } }
static void function(Writer ps, Node node, String indent, String lineSeparator) throws IOException { switch (node.getNodeType()) { case Node.DOCUMENT_NODE: NodeList nodes = node.getChildNodes(); if (nodes != null) { for (int i = 0; i < nodes.getLength(); i++) { printTree(ps, nodes.item(i), STR<STR STR=\STR\STR>STR STR</STR>STR/>STR<![CDATA[STR]]>"); ps.write(lineSeparator); break; case Node.TEXT_NODE: String text = node.getNodeValue().trim(); if (text.length() > 0) { ps.write(indent); ps.write(text); ps.write(lineSeparator); } break; case Node.PROCESSING_INSTRUCTION_NODE: break; case Node.ENTITY_REFERENCE_NODE: break; case Node.DOCUMENT_TYPE_NODE: break; default: break; } }
/** * Print the tree of a node. This method is recursive. * * @param ps The writer to where the XML document is written * @param node The node with which to start the printing * @param indent A string that is printed on each line before any XML * @param lineSeparator The string written at the end of each line */
Print the tree of a node. This method is recursive
printTree
{ "repo_name": "brucebeisel/BDB-Utilities", "path": "src/main/java/com/bdb/util/XMLUtils.java", "license": "gpl-3.0", "size": 5510 }
[ "java.io.IOException", "java.io.Writer", "org.w3c.dom.Node", "org.w3c.dom.NodeList" ]
import java.io.IOException; import java.io.Writer; import org.w3c.dom.Node; import org.w3c.dom.NodeList;
import java.io.*; import org.w3c.dom.*;
[ "java.io", "org.w3c.dom" ]
java.io; org.w3c.dom;
854,429
@FIXVersion(introduced="4.4") public TrdInstrmtLegGroup[] getTrdInstrmtLegGroups() { throw new UnsupportedOperationException(getUnsupportedTagMessage()); }
@FIXVersion(introduced="4.4") TrdInstrmtLegGroup[] function() { throw new UnsupportedOperationException(getUnsupportedTagMessage()); }
/** * Message field getter for {@link TrdInstrmtLegGroup} array of groups. * @return field value */
Message field getter for <code>TrdInstrmtLegGroup</code> array of groups
getTrdInstrmtLegGroups
{ "repo_name": "marvisan/HadesFIX", "path": "Model/src/main/java/net/hades/fix/message/TradeCaptureReportAckMsg.java", "license": "gpl-3.0", "size": 118717 }
[ "net.hades.fix.message.anno.FIXVersion", "net.hades.fix.message.group.TrdInstrmtLegGroup" ]
import net.hades.fix.message.anno.FIXVersion; import net.hades.fix.message.group.TrdInstrmtLegGroup;
import net.hades.fix.message.anno.*; import net.hades.fix.message.group.*;
[ "net.hades.fix" ]
net.hades.fix;
544,589
private void insertEntry(int index, Field key, int bufferId) throws IOException { // Make room for key data int offset = moveKeys(index, -key.length()); // Make room for key entry int start = BASE + (index * ENTRY_SIZE); int end = BASE + (keyCount * ENTRY_SIZE); buffer.move(start, start + ENTRY_SIZE, end - start); // Store key entry and data buffer.putInt(start, offset); buffer.putInt(start + KEY_OFFSET_SIZE, bufferId); key.write(buffer, offset); setKeyCount(keyCount + 1); }
void function(int index, Field key, int bufferId) throws IOException { int offset = moveKeys(index, -key.length()); int start = BASE + (index * ENTRY_SIZE); int end = BASE + (keyCount * ENTRY_SIZE); buffer.move(start, start + ENTRY_SIZE, end - start); buffer.putInt(start, offset); buffer.putInt(start + KEY_OFFSET_SIZE, bufferId); key.write(buffer, offset); setKeyCount(keyCount + 1); }
/** * Insert the child node entry (key and buffer ID) associated with the specified key index. * All entries at and after index are shifted to make space for new entry. * The node key count is adjusted to reflect the addition of a child. * @param index child key index * @param key child node key * @param bufferId child node buffer ID */
Insert the child node entry (key and buffer ID) associated with the specified key index. All entries at and after index are shifted to make space for new entry. The node key count is adjusted to reflect the addition of a child
insertEntry
{ "repo_name": "NationalSecurityAgency/ghidra", "path": "Ghidra/Framework/DB/src/main/java/db/VarKeyInteriorNode.java", "license": "apache-2.0", "size": 25119 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
53,926
private boolean openReader() { boolean bRet = true; boolean readOnly = true; // WARNING - potential poor multi-threading performance - check // SimpleSimpleFSDirectory try { if (_searcher != null) { _searcher.close(); } _searcher = new IndexSearcher(LuceneUtils.getDirectory(new File(_indexName)), readOnly); this.nbDocInIndex = _searcher.maxDoc(); } catch (IOException e) { LOGGER.warn("Unable to open index (read mode): " + e); // e.printStackTrace(); bRet = false; } return bRet; }
boolean function() { boolean bRet = true; boolean readOnly = true; try { if (_searcher != null) { _searcher.close(); } _searcher = new IndexSearcher(LuceneUtils.getDirectory(new File(_indexName)), readOnly); this.nbDocInIndex = _searcher.maxDoc(); } catch (IOException e) { LOGGER.warn(STR + e); bRet = false; } return bRet; }
/** * Opens a Lucene reader. */
Opens a Lucene reader
openReader
{ "repo_name": "pgdurand/BeeDeeM", "path": "src/bzh/plealog/dbmirror/util/runner/DBMSUniqueSeqIdIndex.java", "license": "agpl-3.0", "size": 6936 }
[ "java.io.File", "java.io.IOException", "org.apache.lucene.search.IndexSearcher" ]
import java.io.File; import java.io.IOException; import org.apache.lucene.search.IndexSearcher;
import java.io.*; import org.apache.lucene.search.*;
[ "java.io", "org.apache.lucene" ]
java.io; org.apache.lucene;
1,564,051
protected Schema getRecordSchema(Object record) { return ((GenericContainer)record).getSchema(); }
Schema function(Object record) { return ((GenericContainer)record).getSchema(); }
/** Called to obtain the schema of a record. By default calls * {GenericContainer#getSchema(). May be overridden for alternate record * representations. */
Called to obtain the schema of a record. By default calls {GenericContainer#getSchema(). May be overridden for alternate record
getRecordSchema
{ "repo_name": "cloudera/avro", "path": "lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java", "license": "apache-2.0", "size": 21455 }
[ "org.apache.avro.Schema" ]
import org.apache.avro.Schema;
import org.apache.avro.*;
[ "org.apache.avro" ]
org.apache.avro;
1,288,937
void engineUpdate(SecretKey key) throws InvalidKeyException;
void engineUpdate(SecretKey key) throws InvalidKeyException;
/** * Updates the digest using the specified key. * This is used for SSL 3.0 only, we may deprecate and remove the support * of this in the future * * @param key the key whose value is to be digested. */
Updates the digest using the specified key. This is used for SSL 3.0 only, we may deprecate and remove the support of this in the future
engineUpdate
{ "repo_name": "md-5/jdk10", "path": "src/java.base/share/classes/sun/security/util/MessageDigestSpi2.java", "license": "gpl-2.0", "size": 1726 }
[ "java.security.InvalidKeyException", "javax.crypto.SecretKey" ]
import java.security.InvalidKeyException; import javax.crypto.SecretKey;
import java.security.*; import javax.crypto.*;
[ "java.security", "javax.crypto" ]
java.security; javax.crypto;
887,473
private void checkFinishedBuildingWhenAboutToSetValue() { Preconditions.checkState(evaluating, "not started building %s", this); Preconditions.checkState( !isDirty() || dirtyState == DirtyState.VERIFIED_CLEAN || dirtyState == DirtyState.REBUILDING, "not done building %s", this); Preconditions.checkState(isReady(), "not done building %s", this); }
void function() { Preconditions.checkState(evaluating, STR, this); Preconditions.checkState( !isDirty() dirtyState == DirtyState.VERIFIED_CLEAN dirtyState == DirtyState.REBUILDING, STR, this); Preconditions.checkState(isReady(), STR, this); }
/** * Helper method to assert that node has finished building, as far as we can tell. We would * actually like to check that the node has been evaluated, but that is not available in * this context. */
Helper method to assert that node has finished building, as far as we can tell. We would actually like to check that the node has been evaluated, but that is not available in this context
checkFinishedBuildingWhenAboutToSetValue
{ "repo_name": "kamalmarhubi/bazel", "path": "src/main/java/com/google/devtools/build/skyframe/BuildingState.java", "license": "apache-2.0", "size": 18790 }
[ "com.google.common.base.Preconditions", "com.google.devtools.build.skyframe.NodeEntry" ]
import com.google.common.base.Preconditions; import com.google.devtools.build.skyframe.NodeEntry;
import com.google.common.base.*; import com.google.devtools.build.skyframe.*;
[ "com.google.common", "com.google.devtools" ]
com.google.common; com.google.devtools;
1,409,678
public static Map<String, Object> aggPreMergeCompPageBolt(Map<String, Object> beat, String window, boolean includeSys) { Map<String, Object> ret = new HashMap<>(); putKV(ret, EXECUTOR_ID, getByKey(beat, "exec-id")); putKV(ret, HOST, getByKey(beat, HOST)); putKV(ret, PORT, getByKey(beat, PORT)); putKV(ret, UPTIME, getByKey(beat, UPTIME)); putKV(ret, NUM_EXECUTORS, 1); putKV(ret, NUM_TASKS, getByKey(beat, NUM_TASKS)); Map stat2win2sid2num = getMapByKey(beat, STATS); putKV(ret, CAPACITY, computeAggCapacity(stat2win2sid2num, getByKeyOr0(beat, UPTIME).intValue())); // calc cid+sid->input_stats Map inputStats = new HashMap(); Map sid2acked = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, ACKED), TO_STRING).get(window); Map sid2failed = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, FAILED), TO_STRING).get(window); putKV(inputStats, ACKED, sid2acked != null ? sid2acked : new HashMap()); putKV(inputStats, FAILED, sid2failed != null ? sid2failed : new HashMap()); inputStats = swapMapOrder(inputStats); Map sid2execLat = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, EXEC_LATENCIES), TO_STRING).get(window); Map sid2procLat = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, PROC_LATENCIES), TO_STRING).get(window); Map sid2exec = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, EXECUTED), TO_STRING).get(window); mergeMaps(inputStats, aggBoltStreamsLatAndCount(sid2execLat, sid2procLat, sid2exec)); putKV(ret, CID_SID_TO_IN_STATS, inputStats); // calc sid->output_stats Map outputStats = new HashMap(); Map sid2emitted = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, EMITTED), TO_STRING).get(window); Map sid2transferred = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, TRANSFERRED), TO_STRING).get(window); if (sid2emitted != null) { putKV(outputStats, EMITTED, filterSysStreams2Stat(sid2emitted, includeSys)); } else { putKV(outputStats, EMITTED, new HashMap()); } if (sid2transferred != null) { putKV(outputStats, TRANSFERRED, filterSysStreams2Stat(sid2transferred, includeSys)); } else { putKV(outputStats, TRANSFERRED, new HashMap()); } outputStats = swapMapOrder(outputStats); putKV(ret, SID_TO_OUT_STATS, outputStats); return ret; }
static Map<String, Object> function(Map<String, Object> beat, String window, boolean includeSys) { Map<String, Object> ret = new HashMap<>(); putKV(ret, EXECUTOR_ID, getByKey(beat, STR)); putKV(ret, HOST, getByKey(beat, HOST)); putKV(ret, PORT, getByKey(beat, PORT)); putKV(ret, UPTIME, getByKey(beat, UPTIME)); putKV(ret, NUM_EXECUTORS, 1); putKV(ret, NUM_TASKS, getByKey(beat, NUM_TASKS)); Map stat2win2sid2num = getMapByKey(beat, STATS); putKV(ret, CAPACITY, computeAggCapacity(stat2win2sid2num, getByKeyOr0(beat, UPTIME).intValue())); Map inputStats = new HashMap(); Map sid2acked = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, ACKED), TO_STRING).get(window); Map sid2failed = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, FAILED), TO_STRING).get(window); putKV(inputStats, ACKED, sid2acked != null ? sid2acked : new HashMap()); putKV(inputStats, FAILED, sid2failed != null ? sid2failed : new HashMap()); inputStats = swapMapOrder(inputStats); Map sid2execLat = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, EXEC_LATENCIES), TO_STRING).get(window); Map sid2procLat = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, PROC_LATENCIES), TO_STRING).get(window); Map sid2exec = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, EXECUTED), TO_STRING).get(window); mergeMaps(inputStats, aggBoltStreamsLatAndCount(sid2execLat, sid2procLat, sid2exec)); putKV(ret, CID_SID_TO_IN_STATS, inputStats); Map outputStats = new HashMap(); Map sid2emitted = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, EMITTED), TO_STRING).get(window); Map sid2transferred = (Map) windowSetConverter(getMapByKey(stat2win2sid2num, TRANSFERRED), TO_STRING).get(window); if (sid2emitted != null) { putKV(outputStats, EMITTED, filterSysStreams2Stat(sid2emitted, includeSys)); } else { putKV(outputStats, EMITTED, new HashMap()); } if (sid2transferred != null) { putKV(outputStats, TRANSFERRED, filterSysStreams2Stat(sid2transferred, includeSys)); } else { putKV(outputStats, TRANSFERRED, new HashMap()); } outputStats = swapMapOrder(outputStats); putKV(ret, SID_TO_OUT_STATS, outputStats); return ret; }
/** * pre-merge component page bolt stats from an executor heartbeat * 1. computes component capacity * 2. converts map keys of stats * 3. filters streams if necessary * * @param beat executor heartbeat data * @param window specified window * @param includeSys whether to include system streams * @return per-merged stats */
pre-merge component page bolt stats from an executor heartbeat 1. computes component capacity 2. converts map keys of stats 3. filters streams if necessary
aggPreMergeCompPageBolt
{ "repo_name": "roshannaik/storm", "path": "storm-client/src/jvm/org/apache/storm/stats/StatsUtil.java", "license": "apache-2.0", "size": 111115 }
[ "java.util.HashMap", "java.util.Map" ]
import java.util.HashMap; import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
147,690
StanzaCollector createStanzaCollectorAndSend(StanzaFilter stanzaFilter, Stanza stanza) throws NotConnectedException, InterruptedException; /** * Creates a new stanza collector for this connection. A stanza filter * determines which stanzas will be accumulated by the collector. A * StanzaCollector is more suitable to use than a {@link StanzaListener}
StanzaCollector createStanzaCollectorAndSend(StanzaFilter stanzaFilter, Stanza stanza) throws NotConnectedException, InterruptedException; /** * Creates a new stanza collector for this connection. A stanza filter * determines which stanzas will be accumulated by the collector. A * StanzaCollector is more suitable to use than a {@link StanzaListener}
/** * Creates a new stanza collector for this connection. A stanza filter determines * which stanzas will be accumulated by the collector. A StanzaCollector is * more suitable to use than a {@link StanzaListener} when you need to wait for * a specific result. * * @param stanzaFilter the stanza filter to use. * @param stanza the stanza to send right after the collector got created * @return a new stanza collector. * @throws InterruptedException if the calling thread was interrupted. * @throws NotConnectedException if the XMPP connection is not connected. */
Creates a new stanza collector for this connection. A stanza filter determines which stanzas will be accumulated by the collector. A StanzaCollector is more suitable to use than a <code>StanzaListener</code> when you need to wait for a specific result
createStanzaCollectorAndSend
{ "repo_name": "igniterealtime/Smack", "path": "smack-core/src/main/java/org/jivesoftware/smack/XMPPConnection.java", "license": "apache-2.0", "size": 31078 }
[ "org.jivesoftware.smack.SmackException", "org.jivesoftware.smack.filter.StanzaFilter", "org.jivesoftware.smack.packet.Stanza" ]
import org.jivesoftware.smack.SmackException; import org.jivesoftware.smack.filter.StanzaFilter; import org.jivesoftware.smack.packet.Stanza;
import org.jivesoftware.smack.*; import org.jivesoftware.smack.filter.*; import org.jivesoftware.smack.packet.*;
[ "org.jivesoftware.smack" ]
org.jivesoftware.smack;
616,581
public static void setOSXHideFromDock(boolean hideFromDock) { try { if (isOSX()) { boolean currentHideFromDock = isHideFromDock(); if (currentHideFromDock != hideFromDock) { String content = getInfoPlistContent(); FileOutputStream fileOutputStream = new FileOutputStream(getInfoPlistPath()); try { fileOutputStream.write(content.replaceFirst( "<key>LSUIElement</key><string>" + (currentHideFromDock ? "1" : "0") + "</string>", "<key>LSUIElement</key><string>" + (hideFromDock ? "1" : "0") + "</string>").getBytes("UTF-8")); } finally { fileOutputStream.close(); } fileOutputStream.close(); } } } catch (IOException e) { LOGGER.warn("Unable to update Info.plist", e); } }
static void function(boolean hideFromDock) { try { if (isOSX()) { boolean currentHideFromDock = isHideFromDock(); if (currentHideFromDock != hideFromDock) { String content = getInfoPlistContent(); FileOutputStream fileOutputStream = new FileOutputStream(getInfoPlistPath()); try { fileOutputStream.write(content.replaceFirst( STR + (currentHideFromDock ? "1" : "0") + STR, STR + (hideFromDock ? "1" : "0") + STR).getBytes("UTF-8")); } finally { fileOutputStream.close(); } fileOutputStream.close(); } } } catch (IOException e) { LOGGER.warn(STR, e); } }
/** * Update LSUIElement (hide from dock) value * * @param hideFromDock new hide from dock value */
Update LSUIElement (hide from dock) value
setOSXHideFromDock
{ "repo_name": "digolo/davmail-enterprise", "path": "davmail/src/main/java/davmail/ui/OSXInfoPlist.java", "license": "gpl-2.0", "size": 4285 }
[ "java.io.FileOutputStream", "java.io.IOException" ]
import java.io.FileOutputStream; import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,691,288
private void registerWatcher() { try { final byte[] layoutUpdate = TableLayoutMonitor.this.mZKClient.getData(mTableLayoutFile, mWatcher, mLayoutStat); LOG.info("Received layout update for table {}: {}.", mTableURI, Bytes.toStringBinary(layoutUpdate)); // This assumes handlers do not let exceptions pop up: mHandler.update(layoutUpdate); } catch (KeeperException ke) { LOG.error("Unrecoverable ZooKeeper error: {}", ke.getMessage()); throw new RuntimeException(ke); } }
void function() { try { final byte[] layoutUpdate = TableLayoutMonitor.this.mZKClient.getData(mTableLayoutFile, mWatcher, mLayoutStat); LOG.info(STR, mTableURI, Bytes.toStringBinary(layoutUpdate)); mHandler.update(layoutUpdate); } catch (KeeperException ke) { LOG.error(STR, ke.getMessage()); throw new RuntimeException(ke); } }
/** * Registers a ZooKeeper watcher for the specified table's layout. * * <p> Retries on ZooKeeper failure (no deadline, no limit). </p> * <p> Dies whenever an exception pops up while running a handler. </p> */
Registers a ZooKeeper watcher for the specified table's layout. Retries on ZooKeeper failure (no deadline, no limit). Dies whenever an exception pops up while running a handler.
registerWatcher
{ "repo_name": "zenoss/kiji-schema", "path": "kiji-schema/src/main/java/org/kiji/schema/layout/impl/TableLayoutMonitor.java", "license": "apache-2.0", "size": 20667 }
[ "org.apache.hadoop.hbase.util.Bytes", "org.apache.zookeeper.KeeperException" ]
import org.apache.hadoop.hbase.util.Bytes; import org.apache.zookeeper.KeeperException;
import org.apache.hadoop.hbase.util.*; import org.apache.zookeeper.*;
[ "org.apache.hadoop", "org.apache.zookeeper" ]
org.apache.hadoop; org.apache.zookeeper;
2,898,664
protected Commandline buildCommandline() { Commandline cmd = new Commandline(); cmd.setExecutable(findExecutable()); if (stacktraces) { appendOption(cmd, "--stacktraces"); } if (getCwd() != null) { appendOption(cmd, "--cwd=" + getCwd()); } else { appendOption(cmd, "--cwd=" + getProject().getBaseDir().getPath()); } if (getConfig() != null) { appendOption(cmd, "--config=" + getConfig()); } cmd.createArgument().setValue(toolName); completeCommandline(cmd); return cmd; }
Commandline function() { Commandline cmd = new Commandline(); cmd.setExecutable(findExecutable()); if (stacktraces) { appendOption(cmd, STR); } if (getCwd() != null) { appendOption(cmd, STR + getCwd()); } else { appendOption(cmd, STR + getProject().getBaseDir().getPath()); } if (getConfig() != null) { appendOption(cmd, STR + getConfig()); } cmd.createArgument().setValue(toolName); completeCommandline(cmd); return cmd; }
/** * Builds the command line to be executed * @return the command line to be executed, * or null if there is no command to be executed */
Builds the command line to be executed
buildCommandline
{ "repo_name": "ceylon/ceylon-compiler", "path": "src/com/redhat/ceylon/ant/CeylonAntTask.java", "license": "gpl-2.0", "size": 12570 }
[ "org.apache.tools.ant.types.Commandline" ]
import org.apache.tools.ant.types.Commandline;
import org.apache.tools.ant.types.*;
[ "org.apache.tools" ]
org.apache.tools;
1,479,129
Collection<Owner> findByLastName(String lastName) throws DataAccessException;
Collection<Owner> findByLastName(String lastName) throws DataAccessException;
/** * Retrieve <code>Owner</code>s from the data store by last name, returning all owners whose last name <i>starts</i> * with the given name. * * @param lastName Value to search for * @return a <code>Collection</code> of matching <code>Owner</code>s (or an empty <code>Collection</code> if none * found) */
Retrieve <code>Owner</code>s from the data store by last name, returning all owners whose last name starts with the given name
findByLastName
{ "repo_name": "YoannBuch/DependencyInjectionAgent", "path": "test-applications/spring-petclinic/src/main/java/org/springframework/samples/petclinic/repository/OwnerRepository.java", "license": "apache-2.0", "size": 2986 }
[ "java.util.Collection", "org.springframework.dao.DataAccessException", "org.springframework.samples.petclinic.model.Owner" ]
import java.util.Collection; import org.springframework.dao.DataAccessException; import org.springframework.samples.petclinic.model.Owner;
import java.util.*; import org.springframework.dao.*; import org.springframework.samples.petclinic.model.*;
[ "java.util", "org.springframework.dao", "org.springframework.samples" ]
java.util; org.springframework.dao; org.springframework.samples;
1,067,362
protected void fireCreationEvent() { fireEvent(new DebugEvent(this, DebugEvent.CREATE)); }
void function() { fireEvent(new DebugEvent(this, DebugEvent.CREATE)); }
/** * Fires a creation event. */
Fires a creation event
fireCreationEvent
{ "repo_name": "timothyjward/bndtools", "path": "bndtools.core/src/bndtools/launch/bnd/LaunchThread.java", "license": "epl-1.0", "size": 7533 }
[ "org.eclipse.debug.core.DebugEvent" ]
import org.eclipse.debug.core.DebugEvent;
import org.eclipse.debug.core.*;
[ "org.eclipse.debug" ]
org.eclipse.debug;
1,900,572
Promise<Void> resume(String id, ResumeActionDto action);
Promise<Void> resume(String id, ResumeActionDto action);
/** * Resumes application. * * @param id debug session id */
Resumes application
resume
{ "repo_name": "sudaraka94/che", "path": "ide/che-core-ide-api/src/main/java/org/eclipse/che/ide/api/debug/DebuggerServiceClient.java", "license": "epl-1.0", "size": 4500 }
[ "org.eclipse.che.api.debug.shared.dto.action.ResumeActionDto", "org.eclipse.che.api.promises.client.Promise" ]
import org.eclipse.che.api.debug.shared.dto.action.ResumeActionDto; import org.eclipse.che.api.promises.client.Promise;
import org.eclipse.che.api.debug.shared.dto.action.*; import org.eclipse.che.api.promises.client.*;
[ "org.eclipse.che" ]
org.eclipse.che;
2,907,179
public void processSupplementalTables() throws DataSetProcessingException { final String signature = CLASS_NAME + ".processSupplementalTables(long dataSetId)"; try { for (TableInfo table : tables) { if (table.getTableType().getSemantics().contains(INVENTORY_TABLE_SEMANTICS)) { processInventoryTable(table); } if (table.getTableType().getSemantics().contains(REGULAR_TABLE_SEMANTICS)) { processRegularTable(table); } } } catch (DataSetProcessingException e) { throw LoggingWrapperUtility.logException(logger, signature, e); } }
void function() throws DataSetProcessingException { final String signature = CLASS_NAME + STR; try { for (TableInfo table : tables) { if (table.getTableType().getSemantics().contains(INVENTORY_TABLE_SEMANTICS)) { processInventoryTable(table); } if (table.getTableType().getSemantics().contains(REGULAR_TABLE_SEMANTICS)) { processRegularTable(table); } } } catch (DataSetProcessingException e) { throw LoggingWrapperUtility.logException(logger, signature, e); } }
/** * Processes supplemental tables of the given dataset. Processing involves storing table's data in the database * and persisting other meta-information related to supplementary tables. * * @throws DataSetProcessingException * if failed to process supplemental table */
Processes supplemental tables of the given dataset. Processing involves storing table's data in the database and persisting other meta-information related to supplementary tables
processSupplementalTables
{ "repo_name": "Small-Bodies-Node/ntl_archive_db_demo", "path": "import_and_persistence/src/java/main/gov/nasa/pds/processors/impl/profile/cassini/CassiniProfile.java", "license": "bsd-3-clause", "size": 26337 }
[ "com.topcoder.commons.utils.LoggingWrapperUtility", "gov.nasa.pds.services.DataSetProcessingException" ]
import com.topcoder.commons.utils.LoggingWrapperUtility; import gov.nasa.pds.services.DataSetProcessingException;
import com.topcoder.commons.utils.*; import gov.nasa.pds.services.*;
[ "com.topcoder.commons", "gov.nasa.pds" ]
com.topcoder.commons; gov.nasa.pds;
396,628
@WebMethod(operationName = "getAction") @WebResult(name = "action") @Cacheable(value= ActionDefinition.Cache.NAME, key="'actionId=' + #p0") public ActionDefinition getAction(@WebParam(name = "actionId") String actionId) throws RiceIllegalArgumentException;
@WebMethod(operationName = STR) @WebResult(name = STR) @Cacheable(value= ActionDefinition.Cache.NAME, key=STR) ActionDefinition function(@WebParam(name = STR) String actionId) throws RiceIllegalArgumentException;
/** * Retrieves the action for the given actionId. The action includes the * propositions which define the condition that is to be evaluated on the * action. It also defines a collection of actions which will be invoked if * the action succeeds. * * @param actionId the id of the action to retrieve * @return the action definition, or null if no action could be located for the * given actionId * @throws IllegalArgumentException if the given actionId is null */
Retrieves the action for the given actionId. The action includes the propositions which define the condition that is to be evaluated on the action. It also defines a collection of actions which will be invoked if the action succeeds
getAction
{ "repo_name": "mztaylor/rice-git", "path": "rice-middleware/krms/api/src/main/java/org/kuali/rice/krms/api/repository/RuleManagementService.java", "license": "apache-2.0", "size": 61035 }
[ "javax.jws.WebMethod", "javax.jws.WebParam", "javax.jws.WebResult", "org.kuali.rice.core.api.exception.RiceIllegalArgumentException", "org.kuali.rice.krms.api.repository.action.ActionDefinition", "org.springframework.cache.annotation.Cacheable" ]
import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebResult; import org.kuali.rice.core.api.exception.RiceIllegalArgumentException; import org.kuali.rice.krms.api.repository.action.ActionDefinition; import org.springframework.cache.annotation.Cacheable;
import javax.jws.*; import org.kuali.rice.core.api.exception.*; import org.kuali.rice.krms.api.repository.action.*; import org.springframework.cache.annotation.*;
[ "javax.jws", "org.kuali.rice", "org.springframework.cache" ]
javax.jws; org.kuali.rice; org.springframework.cache;
1,494,424
public static void main(String[] args) { // We are using the US locale to keep all date and double formats // the same wherever we go. Locale.setDefault(Locale.US); if (null != System.getProperty("unitth.debug")) { if (System.getProperty("unitth.debug").equalsIgnoreCase("true")) { c_DBG = true; // default is false } } // Store away all args inputArgs = args; UnitTH thg = new UnitTH(args); if (c_DBG) { System.out.println(thg.toString()); } } public UnitTH() { // void } public UnitTH(String... args) { doTheStuff(args); }
static void function(String[] args) { Locale.setDefault(Locale.US); if (null != System.getProperty(STR)) { if (System.getProperty(STR).equalsIgnoreCase("true")) { c_DBG = true; } } inputArgs = args; UnitTH thg = new UnitTH(args); if (c_DBG) { System.out.println(thg.toString()); } } public UnitTH() { } public UnitTH(String... args) { doTheStuff(args); }
/** * UnitTH entry point. Sets the Locale to US and creates the UnitTH object. * * @param args * The command line arguments. */
UnitTH entry point. Sets the Locale to US and creates the UnitTH object
main
{ "repo_name": "zBrainiac/UnitTH", "path": "src/main/java/unitth/core/UnitTH.java", "license": "gpl-3.0", "size": 41866 }
[ "java.util.Locale" ]
import java.util.Locale;
import java.util.*;
[ "java.util" ]
java.util;
2,371,135
private ArrayMap<String, View> mapEnteringSharedElements(TransitionState state, Fragment inFragment, boolean isBack) { ArrayMap<String, View> namedViews = new ArrayMap<String, View>(); View root = inFragment.getView(); if (root != null) { if (mSharedElementSourceNames != null) { root.findNamedViews(namedViews); if (isBack) { namedViews = remapNames(mSharedElementSourceNames, mSharedElementTargetNames, namedViews); } else { namedViews.retainAll(mSharedElementTargetNames); } } } return namedViews; }
ArrayMap<String, View> function(TransitionState state, Fragment inFragment, boolean isBack) { ArrayMap<String, View> namedViews = new ArrayMap<String, View>(); View root = inFragment.getView(); if (root != null) { if (mSharedElementSourceNames != null) { root.findNamedViews(namedViews); if (isBack) { namedViews = remapNames(mSharedElementSourceNames, mSharedElementTargetNames, namedViews); } else { namedViews.retainAll(mSharedElementTargetNames); } } } return namedViews; }
/** * Maps shared elements to views in the entering fragment. * * @param state The transition State as returned from {@link #beginTransition( * android.util.SparseArray, android.util.SparseArray, boolean)}. * @param inFragment The last fragment to be added. * @param isBack true if this is popping the back stack or false if this is a * forward operation. */
Maps shared elements to views in the entering fragment
mapEnteringSharedElements
{ "repo_name": "syslover33/ctank", "path": "java/android-sdk-linux_r24.4.1_src/sources/android-23/android/app/BackStackRecord.java", "license": "gpl-3.0", "size": 70339 }
[ "android.util.ArrayMap", "android.view.View" ]
import android.util.ArrayMap; import android.view.View;
import android.util.*; import android.view.*;
[ "android.util", "android.view" ]
android.util; android.view;
2,495,327
Exchange invokeImplementationArtifactDeployment(Exchange exchange);
Exchange invokeImplementationArtifactDeployment(Exchange exchange);
/** * Invokes the deployment of an Implementation Artifact. * * @param exchange contains all needed information like the NodeTypeImplementation the ArtifactReferences to the * files that have to be deployed and the "ServiceEndpoint" property if it is defined. * @return the endpoint of the deployed Implementation Artifact as header field (see {@link MBHeader#ENDPOINT_URI}) * of the exchange message or null if the deployment failed. */
Invokes the deployment of an Implementation Artifact
invokeImplementationArtifactDeployment
{ "repo_name": "OpenTOSCA/container", "path": "org.opentosca.bus/org.opentosca.bus.management.deployment.plugin/src/main/java/org/opentosca/bus/management/deployment/plugin/IManagementBusDeploymentPluginService.java", "license": "apache-2.0", "size": 2084 }
[ "org.apache.camel.Exchange" ]
import org.apache.camel.Exchange;
import org.apache.camel.*;
[ "org.apache.camel" ]
org.apache.camel;
276,306
Composite container = new Composite(parent, SWT.NULL); setControl(container); container.setLayout(new GridLayout(2, false)); Group group = new Group(container, SWT.NONE); GridData gd_group = new GridData(SWT.LEFT, SWT.FILL, false, false, 1, 1); gd_group.widthHint = 350; gd_group.heightHint = 299; group.setLayoutData(gd_group); group.setText("Data File"); group.setLayout(new GridLayout(1, false)); tbFieldHeaders = new Table(group, SWT.BORDER | SWT.FULL_SELECTION); tbFieldHeaders.setLinesVisible(true); tbFieldHeaders.setHeaderVisible(true); tbFieldHeaders.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); // tbFieldHeader.addListener(SWT.Activate, new Listener() { // // @Override // public void handleEvent(Event arg0) { // tbFieldHeader.clearAll(); // tbFieldHeader.removeAll(); // for(int i=0; i<dto.getHeader().size(); i++){ // TableItem item = new TableItem(tbFieldHeader, SWT.NONE); // item.setText(0, dto.getHeader().get(i)); // } // } // }); Utils.setDndColumnSource(tbFieldHeaders); TableColumn tblclmnFieldHeaders = new TableColumn(tbFieldHeaders, SWT.NONE); tblclmnFieldHeaders.setWidth(200); tblclmnFieldHeaders.setText("Field Headers"); Composite composite = new Composite(container, SWT.NONE); composite.setLayout(new GridLayout(1, false)); composite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); tbGermplasm = new Table(composite, SWT.BORDER | SWT.FULL_SELECTION); tbGermplasm.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); tbGermplasm.setSize(599, 368); tbGermplasm.setLinesVisible(true); tbGermplasm.setHeaderVisible(true); TableColumn tblclmnIndex = new TableColumn(tbGermplasm, SWT.NONE); tblclmnIndex.setWidth(60); TableColumn tblclmnGermplasmInformation = new TableColumn(tbGermplasm, SWT.NONE); tblclmnGermplasmInformation.setWidth(150); tblclmnGermplasmInformation.setText("Germplasm Information"); TableColumn tableColumn_1 = new TableColumn(tbGermplasm, SWT.NONE); tableColumn_1.setWidth(150); tableColumn_1.setText("Header"); TableColumn tableColumn_2 = new TableColumn(tbGermplasm, SWT.NONE); tableColumn_2.setWidth(100); tableColumn_2.setText("From"); TableColumn tableColumn_3 = new TableColumn(tbGermplasm, SWT.NONE); tableColumn_3.setWidth(100); tableColumn_3.setText("To"); Utils.unmarshalColumns(tbGermplasm, config+"/xml/Germplasm.xml", dto.getGermplasmFields(), dto.getSubGermplasmFields()); Utils.setDndColumnTarget(tbFieldHeaders, tbGermplasm, dto.getGermplasmFields(), dto.getSubGermplasmFields()); Utils.setTableMouseLister(tbGermplasm, dto.getGermplasmFields()); tbGermplasmProp = new Table(composite, SWT.BORDER | SWT.FULL_SELECTION); tbGermplasmProp.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); tbGermplasmProp.setSize(258, 97); tbGermplasmProp.setLinesVisible(true); tbGermplasmProp.setHeaderVisible(true); TableColumn tblclmnIndex_1 = new TableColumn(tbGermplasmProp, SWT.NONE); tblclmnIndex_1.setWidth(60); TableColumn tableColumn_4 = new TableColumn(tbGermplasmProp, SWT.NONE); tableColumn_4.setWidth(200); tableColumn_4.setText("Property"); TableColumn tableColumn_5 = new TableColumn(tbGermplasmProp, SWT.NONE); tableColumn_5.setWidth(200); tableColumn_5.setText("Value"); Utils.loadTableProps(tbGermplasmProp, "germplasm_prop", dto.getGermplasmPropFields()); Utils.setDndColumnTarget(tbFieldHeaders, tbGermplasmProp, dto.getGermplasmPropFields(), null); Utils.setTableMouseLister(tbGermplasmProp, dto.getGermplasmPropFields()); TableColumn tblclmnPreview = new TableColumn(tbFieldHeaders, SWT.NONE); tblclmnPreview.setWidth(100); tblclmnPreview.setText("Preview"); container.addListener(SWT.Show, new Listener(){
Composite container = new Composite(parent, SWT.NULL); setControl(container); container.setLayout(new GridLayout(2, false)); Group group = new Group(container, SWT.NONE); GridData gd_group = new GridData(SWT.LEFT, SWT.FILL, false, false, 1, 1); gd_group.widthHint = 350; gd_group.heightHint = 299; group.setLayoutData(gd_group); group.setText(STR); group.setLayout(new GridLayout(1, false)); tbFieldHeaders = new Table(group, SWT.BORDER SWT.FULL_SELECTION); tbFieldHeaders.setLinesVisible(true); tbFieldHeaders.setHeaderVisible(true); tbFieldHeaders.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); Utils.setDndColumnSource(tbFieldHeaders); TableColumn tblclmnFieldHeaders = new TableColumn(tbFieldHeaders, SWT.NONE); tblclmnFieldHeaders.setWidth(200); tblclmnFieldHeaders.setText(STR); Composite composite = new Composite(container, SWT.NONE); composite.setLayout(new GridLayout(1, false)); composite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); tbGermplasm = new Table(composite, SWT.BORDER SWT.FULL_SELECTION); tbGermplasm.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); tbGermplasm.setSize(599, 368); tbGermplasm.setLinesVisible(true); tbGermplasm.setHeaderVisible(true); TableColumn tblclmnIndex = new TableColumn(tbGermplasm, SWT.NONE); tblclmnIndex.setWidth(60); TableColumn tblclmnGermplasmInformation = new TableColumn(tbGermplasm, SWT.NONE); tblclmnGermplasmInformation.setWidth(150); tblclmnGermplasmInformation.setText(STR); TableColumn tableColumn_1 = new TableColumn(tbGermplasm, SWT.NONE); tableColumn_1.setWidth(150); tableColumn_1.setText(STR); TableColumn tableColumn_2 = new TableColumn(tbGermplasm, SWT.NONE); tableColumn_2.setWidth(100); tableColumn_2.setText("From"); TableColumn tableColumn_3 = new TableColumn(tbGermplasm, SWT.NONE); tableColumn_3.setWidth(100); tableColumn_3.setText("To"); Utils.unmarshalColumns(tbGermplasm, config+STR, dto.getGermplasmFields(), dto.getSubGermplasmFields()); Utils.setDndColumnTarget(tbFieldHeaders, tbGermplasm, dto.getGermplasmFields(), dto.getSubGermplasmFields()); Utils.setTableMouseLister(tbGermplasm, dto.getGermplasmFields()); tbGermplasmProp = new Table(composite, SWT.BORDER SWT.FULL_SELECTION); tbGermplasmProp.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); tbGermplasmProp.setSize(258, 97); tbGermplasmProp.setLinesVisible(true); tbGermplasmProp.setHeaderVisible(true); TableColumn tblclmnIndex_1 = new TableColumn(tbGermplasmProp, SWT.NONE); tblclmnIndex_1.setWidth(60); TableColumn tableColumn_4 = new TableColumn(tbGermplasmProp, SWT.NONE); tableColumn_4.setWidth(200); tableColumn_4.setText(STR); TableColumn tableColumn_5 = new TableColumn(tbGermplasmProp, SWT.NONE); tableColumn_5.setWidth(200); tableColumn_5.setText("Value"); Utils.loadTableProps(tbGermplasmProp, STR, dto.getGermplasmPropFields()); Utils.setDndColumnTarget(tbFieldHeaders, tbGermplasmProp, dto.getGermplasmPropFields(), null); Utils.setTableMouseLister(tbGermplasmProp, dto.getGermplasmPropFields()); TableColumn tblclmnPreview = new TableColumn(tbFieldHeaders, SWT.NONE); tblclmnPreview.setWidth(100); tblclmnPreview.setText(STR); container.addListener(SWT.Show, new Listener(){
/** * Create contents of the wizard. * @param parent */
Create contents of the wizard
createControl
{ "repo_name": "gobiiproject/GOBii-System", "path": "loaderui/src/edu/cornell/gobii/gdi/wizards/dnasamples/Pg2DNAsamples.java", "license": "mit", "size": 5420 }
[ "edu.cornell.gobii.gdi.utils.Utils", "org.eclipse.swt.layout.GridData", "org.eclipse.swt.layout.GridLayout", "org.eclipse.swt.widgets.Composite", "org.eclipse.swt.widgets.Group", "org.eclipse.swt.widgets.Listener", "org.eclipse.swt.widgets.Table", "org.eclipse.swt.widgets.TableColumn" ]
import edu.cornell.gobii.gdi.utils.Utils; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn;
import edu.cornell.gobii.gdi.utils.*; import org.eclipse.swt.layout.*; import org.eclipse.swt.widgets.*;
[ "edu.cornell.gobii", "org.eclipse.swt" ]
edu.cornell.gobii; org.eclipse.swt;
2,737,685
public static Set<String> getStringSet( SharedPreferences prefs, String key, Set<String> defaultReturnValue) { return IMPL.getStringSet(prefs, key, defaultReturnValue); } public static class EditorCompat { // private static final String TAG = // "SharedPreferenceCompat.EditorCompat"; private static final EditorCompatImpl IMPL; private EditorCompat() { // hide constructor } static { final int version = Build.VERSION.SDK_INT; if (version >= 11) { IMPL = new EditorCompatImplHoneycomb(); } else { IMPL = new EditorCompatImplGB(); } }
static Set<String> function( SharedPreferences prefs, String key, Set<String> defaultReturnValue) { return IMPL.getStringSet(prefs, key, defaultReturnValue); } public static class EditorCompat { private static final EditorCompatImpl IMPL; private EditorCompat() { } static { final int version = Build.VERSION.SDK_INT; if (version >= 11) { IMPL = new EditorCompatImplHoneycomb(); } else { IMPL = new EditorCompatImplGB(); } }
/** * Retrieve a set of String values from the preferences. * * @param prefs SharedPreferences to attempts to get a set of Strings. * @param key The name of the preference to retrieve. * @param defaultReturnValue Values to return if this preference does not exist. * @return Returns the preference values if they exist, or defValues. Throws * ClassCastException if there is a preference with this name that * is not a Set. * @throws ClassCastException */
Retrieve a set of String values from the preferences
getStringSet
{ "repo_name": "h6ah4i/mulsellistprefcompat", "path": "lib/src/com/h6ah4i/android/compat/content/SharedPreferenceCompat.java", "license": "apache-2.0", "size": 6020 }
[ "android.content.SharedPreferences", "android.os.Build", "java.util.Set" ]
import android.content.SharedPreferences; import android.os.Build; import java.util.Set;
import android.content.*; import android.os.*; import java.util.*;
[ "android.content", "android.os", "java.util" ]
android.content; android.os; java.util;
685,231
final CriteriaBuilder cb = em.getCriteriaBuilder(); final CriteriaQuery<Snapshot> query = cb.createQuery(Snapshot.class); Root<Snapshot>root = query.from(Snapshot.class); query.where(cb.equal(root.get(Snapshot_.project), project)); query.orderBy(cb.desc(root.get(Snapshot_.date))); return em.createQuery(query).getResultList(); }
final CriteriaBuilder cb = em.getCriteriaBuilder(); final CriteriaQuery<Snapshot> query = cb.createQuery(Snapshot.class); Root<Snapshot>root = query.from(Snapshot.class); query.where(cb.equal(root.get(Snapshot_.project), project)); query.orderBy(cb.desc(root.get(Snapshot_.date))); return em.createQuery(query).getResultList(); }
/** * Return a list of Snapshots of the provided project * @return */
Return a list of Snapshots of the provided project
getProjectSnapshot
{ "repo_name": "schlotze/u-qasar.platform", "path": "src/main/java/eu/uqasar/service/SnapshotService.java", "license": "apache-2.0", "size": 2120 }
[ "eu.uqasar.model.tree.historic.Snapshot", "javax.persistence.criteria.CriteriaBuilder", "javax.persistence.criteria.CriteriaQuery", "javax.persistence.criteria.Root" ]
import eu.uqasar.model.tree.historic.Snapshot; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Root;
import eu.uqasar.model.tree.historic.*; import javax.persistence.criteria.*;
[ "eu.uqasar.model", "javax.persistence" ]
eu.uqasar.model; javax.persistence;
1,501,484
public void setErrorHandler (ErrorHandler handler) { errorHandler = handler; }
void function (ErrorHandler handler) { errorHandler = handler; }
/** * Assigns the error handler used to report catalog errors. * These errors may come either from the SAX2 parser or * from the catalog parsing code driven by the parser. * * <p> If you're sharing the resolver between parsers, don't * change this once lookups have begun. * * @see #getErrorHandler * * @param parser The error handler, or null saying to use the default * (no diagnostics, and only fatal errors terminate loading). */
Assigns the error handler used to report catalog errors. These errors may come either from the SAX2 parser or from the catalog parsing code driven by the parser. If you're sharing the resolver between parsers, don't change this once lookups have begun
setErrorHandler
{ "repo_name": "unofficial-opensource-apple/gcc_40", "path": "libjava/gnu/xml/util/XCat.java", "license": "gpl-2.0", "size": 48916 }
[ "org.xml.sax.ErrorHandler" ]
import org.xml.sax.ErrorHandler;
import org.xml.sax.*;
[ "org.xml.sax" ]
org.xml.sax;
365,339
private static final DatabaseInterface findDatabaseInterface( String databaseTypeDesc ) throws KettleDatabaseException { PluginRegistry registry = PluginRegistry.getInstance(); PluginInterface plugin = registry.getPlugin( DatabasePluginType.class, databaseTypeDesc ); if ( plugin == null ) { plugin = registry.findPluginWithName( DatabasePluginType.class, databaseTypeDesc ); } if ( plugin == null ) { throw new KettleDatabaseException( "database type with plugin id [" + databaseTypeDesc + "] couldn't be found!" ); } return getDatabaseInterfacesMap().get( plugin.getIds()[0] ); }
static final DatabaseInterface function( String databaseTypeDesc ) throws KettleDatabaseException { PluginRegistry registry = PluginRegistry.getInstance(); PluginInterface plugin = registry.getPlugin( DatabasePluginType.class, databaseTypeDesc ); if ( plugin == null ) { plugin = registry.findPluginWithName( DatabasePluginType.class, databaseTypeDesc ); } if ( plugin == null ) { throw new KettleDatabaseException( STR + databaseTypeDesc + STR ); } return getDatabaseInterfacesMap().get( plugin.getIds()[0] ); }
/** * Search for the right type of DatabaseInterface object and return it. * * @param databaseTypeDesc * the type of DatabaseInterface to look for (id or description) * @return The requested DatabaseInterface * * @throws KettleDatabaseException * when the type could not be found or referenced. */
Search for the right type of DatabaseInterface object and return it
findDatabaseInterface
{ "repo_name": "yshakhau/pentaho-kettle", "path": "core/src/org/pentaho/di/core/database/DatabaseMeta.java", "license": "apache-2.0", "size": 90969 }
[ "org.pentaho.di.core.exception.KettleDatabaseException", "org.pentaho.di.core.plugins.DatabasePluginType", "org.pentaho.di.core.plugins.PluginInterface", "org.pentaho.di.core.plugins.PluginRegistry" ]
import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.plugins.DatabasePluginType; import org.pentaho.di.core.plugins.PluginInterface; import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.exception.*; import org.pentaho.di.core.plugins.*;
[ "org.pentaho.di" ]
org.pentaho.di;
226,518
@SuppressWarnings({ "unchecked", "rawtypes", "nls" }) private final void determineParameterType(final Class enumClass) throws ParameterException { Method method; String name = null; String value = null; this.enumParameterClass = enumClass; Object o = Enum.valueOf(enumClass, "UNKNOWN"); try { method = enumClass.getDeclaredMethod("fromName", String.class); for (IParameterConfiguration parameter : configuration.getContent().getElements()) { value = parameter.getName(); parameter.setType((Enum<? extends IParameterType>) method.invoke(o, value)); parameter.setEntityType(((IParameterType) parameter.getType()).getEntityType()); if (parameter.getType() == null) { throw new ParameterException("No parameter definition found for: " + value); } } for (IParameterExecution parameter : execution.getContent().getElements()) { name = parameter.getName(); parameter.setType((Enum<? extends IParameterType>) method.invoke(o, name)); parameter.setEntityType(((IParameterType) parameter.getType()).getEntityType()); parameter.setStatus(ParameterStatusType.UNPROCESSED); parameter.setConfiguration(configuration.getParameter(parameter.getType())); } } catch (Exception e) { throw new ParameterException(String.format("Unable to create enumerated value for enumeration: %1s, parameter: %2s", enumClass.getName(), name == null ? value : name)); } checkParameterDefinitionAgainstEnumeration(o.getClass()); }
@SuppressWarnings({ STR, STR, "nls" }) final void function(final Class enumClass) throws ParameterException { Method method; String name = null; String value = null; this.enumParameterClass = enumClass; Object o = Enum.valueOf(enumClass, STR); try { method = enumClass.getDeclaredMethod(STR, String.class); for (IParameterConfiguration parameter : configuration.getContent().getElements()) { value = parameter.getName(); parameter.setType((Enum<? extends IParameterType>) method.invoke(o, value)); parameter.setEntityType(((IParameterType) parameter.getType()).getEntityType()); if (parameter.getType() == null) { throw new ParameterException(STR + value); } } for (IParameterExecution parameter : execution.getContent().getElements()) { name = parameter.getName(); parameter.setType((Enum<? extends IParameterType>) method.invoke(o, name)); parameter.setEntityType(((IParameterType) parameter.getType()).getEntityType()); parameter.setStatus(ParameterStatusType.UNPROCESSED); parameter.setConfiguration(configuration.getParameter(parameter.getType())); } } catch (Exception e) { throw new ParameterException(String.format(STR, enumClass.getName(), name == null ? value : name)); } checkParameterDefinitionAgainstEnumeration(o.getClass()); }
/** * Determines the parameters' type. * <hr> * @param enumClass Parameter enumeration class to use to determine the parameters' type. * @throws ParameterException Thrown in case an error occurred when determining a parameter type. */
Determines the parameters' type.
determineParameterType
{ "repo_name": "ressec/demeter", "path": "demeter-base/src/main/java/com/heliosphere/demeter/base/runner/AbstractRunner.java", "license": "apache-2.0", "size": 24883 }
[ "com.heliosphere.demeter.base.runner.parameter.base.IParameterType", "com.heliosphere.demeter.base.runner.parameter.base.ParameterException", "com.heliosphere.demeter.base.runner.parameter.base.ParameterStatusType", "com.heliosphere.demeter.base.runner.parameter.configuration.IParameterConfiguration", "com.heliosphere.demeter.base.runner.parameter.execution.IParameterExecution", "java.lang.reflect.Method" ]
import com.heliosphere.demeter.base.runner.parameter.base.IParameterType; import com.heliosphere.demeter.base.runner.parameter.base.ParameterException; import com.heliosphere.demeter.base.runner.parameter.base.ParameterStatusType; import com.heliosphere.demeter.base.runner.parameter.configuration.IParameterConfiguration; import com.heliosphere.demeter.base.runner.parameter.execution.IParameterExecution; import java.lang.reflect.Method;
import com.heliosphere.demeter.base.runner.parameter.base.*; import com.heliosphere.demeter.base.runner.parameter.configuration.*; import com.heliosphere.demeter.base.runner.parameter.execution.*; import java.lang.reflect.*;
[ "com.heliosphere.demeter", "java.lang" ]
com.heliosphere.demeter; java.lang;
1,910,633
public String body() { if (body == null) { try { body = IOUtils.toString(servletRequest.getInputStream()); } catch (Exception e) { throw Throwables.propagate(e); } } return body; }
String function() { if (body == null) { try { body = IOUtils.toString(servletRequest.getInputStream()); } catch (Exception e) { throw Throwables.propagate(e); } } return body; }
/** * Returns the request body sent by the client */
Returns the request body sent by the client
body
{ "repo_name": "mAzurkovic/concourse", "path": "concourse-server/src/main/java/org/cinchapi/vendor/spark/Request.java", "license": "apache-2.0", "size": 13414 }
[ "com.google.common.base.Throwables", "org.cinchapi.vendor.spark.utils.IOUtils" ]
import com.google.common.base.Throwables; import org.cinchapi.vendor.spark.utils.IOUtils;
import com.google.common.base.*; import org.cinchapi.vendor.spark.utils.*;
[ "com.google.common", "org.cinchapi.vendor" ]
com.google.common; org.cinchapi.vendor;
57,006
ClusterState clusterState = clusterService.state(); ClusterStateObserver observer = new ClusterStateObserver(clusterState, clusterService, null, logger, threadContext); observer.waitForNextChange(new NodeAndClusterIdStateListener(), NodeAndClusterIdStateListener::isNodeAndClusterIdPresent); }
ClusterState clusterState = clusterService.state(); ClusterStateObserver observer = new ClusterStateObserver(clusterState, clusterService, null, logger, threadContext); observer.waitForNextChange(new NodeAndClusterIdStateListener(), NodeAndClusterIdStateListener::isNodeAndClusterIdPresent); }
/** * Subscribes for the first cluster state update where nodeId and clusterId is present * and sets these values in {@link NodeAndClusterIdConverter}. */
Subscribes for the first cluster state update where nodeId and clusterId is present and sets these values in <code>NodeAndClusterIdConverter</code>
getAndSetNodeIdAndClusterId
{ "repo_name": "coding0011/elasticsearch", "path": "server/src/main/java/org/elasticsearch/common/logging/NodeAndClusterIdStateListener.java", "license": "apache-2.0", "size": 3264 }
[ "org.elasticsearch.cluster.ClusterState", "org.elasticsearch.cluster.ClusterStateObserver" ]
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.*;
[ "org.elasticsearch.cluster" ]
org.elasticsearch.cluster;
458,252
List<AnalyticsTableColumn> getFixedColumns();
List<AnalyticsTableColumn> getFixedColumns();
/** * Returns a list of non-dynamic {@link AnalyticsTableColumn}. * * @return a List of {@link AnalyticsTableColumn}. */
Returns a list of non-dynamic <code>AnalyticsTableColumn</code>
getFixedColumns
{ "repo_name": "msf-oca-his/dhis2-core", "path": "dhis-2/dhis-services/dhis-service-analytics/src/main/java/org/hisp/dhis/analytics/AnalyticsTableManager.java", "license": "bsd-3-clause", "size": 6537 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,630,431
public void setFromStream(InputStream in) throws IOException { clearBoard(); BufferedReader r = new BufferedReader(new InputStreamReader(in)); for (int y = 0; y < height; ++y) { String[] tokens = r.readLine().split(" "); for (int x = 0; x < width; ++x) { if (!"*".equals(tokens[x].substring(0, 1))) { Piece piece = Piece.valueOf(tokens[x]); grid[x][y] = piece; Log.d(TAG, String.format("added %s at (%d,%d)", piece, x, y)); } } } this.whoseTurn = "G"; this.movesRemaining = Board.MOVES_PER_TURN; }
void function(InputStream in) throws IOException { clearBoard(); BufferedReader r = new BufferedReader(new InputStreamReader(in)); for (int y = 0; y < height; ++y) { String[] tokens = r.readLine().split(" "); for (int x = 0; x < width; ++x) { if (!"*".equals(tokens[x].substring(0, 1))) { Piece piece = Piece.valueOf(tokens[x]); grid[x][y] = piece; Log.d(TAG, String.format(STR, piece, x, y)); } } } this.whoseTurn = "G"; this.movesRemaining = Board.MOVES_PER_TURN; }
/** * Initializes the board from a text file input stream. * * @param in * The input stream with the file in the standard format. * @throws IOException * For any errors during reading the stream. */
Initializes the board from a text file input stream
setFromStream
{ "repo_name": "coreyabshire/DTF", "path": "src/edu/purdue/dtf/game/Board.java", "license": "gpl-2.0", "size": 13665 }
[ "android.util.Log", "java.io.BufferedReader", "java.io.IOException", "java.io.InputStream", "java.io.InputStreamReader" ]
import android.util.Log; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader;
import android.util.*; import java.io.*;
[ "android.util", "java.io" ]
android.util; java.io;
2,347,292
public static boolean equals(FieldSchema fschema, FieldSchema fother, boolean relaxInner, boolean relaxAlias) { if (fschema == null) { return false ; } if (fother == null) { return false ; } if (fschema.type != fother.type) { return false ; } if (!relaxAlias) { if ( (fschema.alias == null) && (fother.alias == null) ) { // good } else if ( (fschema.alias != null) && (fother.alias == null) ) { return false ; } else if ( (fschema.alias == null) && (fother.alias != null) ) { return false ; } else if (!fschema.alias.equals(fother.alias)) { return false ; } } if ( (!relaxInner) && (DataType.isSchemaType(fschema.type))) { // Don't do the comparison if both embedded schemas are // null. That will cause Schema.equals to return false, // even though we want to view that as true. if (!(fschema.schema == null && fother.schema == null)) { // compare recursively using schema if (!Schema.equals(fschema.schema, fother.schema, false, relaxAlias)) { return false ; } } } return true ; }
static boolean function(FieldSchema fschema, FieldSchema fother, boolean relaxInner, boolean relaxAlias) { if (fschema == null) { return false ; } if (fother == null) { return false ; } if (fschema.type != fother.type) { return false ; } if (!relaxAlias) { if ( (fschema.alias == null) && (fother.alias == null) ) { } else if ( (fschema.alias != null) && (fother.alias == null) ) { return false ; } else if ( (fschema.alias == null) && (fother.alias != null) ) { return false ; } else if (!fschema.alias.equals(fother.alias)) { return false ; } } if ( (!relaxInner) && (DataType.isSchemaType(fschema.type))) { if (!(fschema.schema == null && fother.schema == null)) { if (!Schema.equals(fschema.schema, fother.schema, false, relaxAlias)) { return false ; } } } return true ; }
/*** * Compare two field schema for equality * @param fschema * @param fother * @param relaxInner If true, we don't check inner tuple schemas * @param relaxAlias If true, we don't check aliases * @return true if FieldSchemas are equal, false otherwise */
Compare two field schema for equality
equals
{ "repo_name": "kaituo/sedge", "path": "trunk/src/org/apache/pig/impl/logicalLayer/schema/Schema.java", "license": "mit", "size": 72734 }
[ "org.apache.pig.data.DataType" ]
import org.apache.pig.data.DataType;
import org.apache.pig.data.*;
[ "org.apache.pig" ]
org.apache.pig;
1,125,477
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore ) throws KettleStepException;
SQLStatement function( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore ) throws KettleStepException;
/** * Standard method to return an SQLStatement object with SQL statements that the step needs in order to work * correctly. This can mean "create table", "create index" statements but also "alter table ... add/drop/modify" * statements. * * @return The SQL Statements for this step. If nothing has to be done, the SQLStatement.getSQL() == null. @see * SQLStatement * @param transMeta * TransInfo object containing the complete transformation * @param stepMeta * StepMeta object containing the complete step * @param prev * Row containing meta-data for the input fields (no data) * @param repository * the repository to use to load Kettle metadata objects impacting the output fields * @param metaStore * the MetaStore to use to load additional external data or metadata impacting the output fields */
Standard method to return an SQLStatement object with SQL statements that the step needs in order to work correctly. This can mean "create table", "create index" statements but also "alter table ... add/drop/modify" statements
getSQLStatements
{ "repo_name": "apratkin/pentaho-kettle", "path": "engine/src/org/pentaho/di/trans/step/StepMetaInterface.java", "license": "apache-2.0", "size": 29826 }
[ "org.pentaho.di.core.SQLStatement", "org.pentaho.di.core.exception.KettleStepException", "org.pentaho.di.core.row.RowMetaInterface", "org.pentaho.di.repository.Repository", "org.pentaho.di.trans.TransMeta", "org.pentaho.metastore.api.IMetaStore" ]
import org.pentaho.di.core.SQLStatement; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.TransMeta; import org.pentaho.metastore.api.IMetaStore;
import org.pentaho.di.core.*; import org.pentaho.di.core.exception.*; import org.pentaho.di.core.row.*; import org.pentaho.di.repository.*; import org.pentaho.di.trans.*; import org.pentaho.metastore.api.*;
[ "org.pentaho.di", "org.pentaho.metastore" ]
org.pentaho.di; org.pentaho.metastore;
1,076,613
private ControlFlowGraph<Node> createCfg(String input, boolean runSynBlockPass) { Compiler compiler = new Compiler(); ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, true, true); Node root = compiler.parseSyntheticCode("cfgtest", input); if (runSynBlockPass) { CreateSyntheticBlocks pass = new CreateSyntheticBlocks( compiler, "START", "END"); pass.process(null, root); } cfa.process(null, root); return cfa.getCfg(); }
ControlFlowGraph<Node> function(String input, boolean runSynBlockPass) { Compiler compiler = new Compiler(); ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, true, true); Node root = compiler.parseSyntheticCode(STR, input); if (runSynBlockPass) { CreateSyntheticBlocks pass = new CreateSyntheticBlocks( compiler, "START", "END"); pass.process(null, root); } cfa.process(null, root); return cfa.getCfg(); }
/** * Given an input in JavaScript, get a control flow graph for it. * * @param input Input JavaScript. */
Given an input in JavaScript, get a control flow graph for it
createCfg
{ "repo_name": "GerHobbelt/closure-compiler", "path": "test/com/google/javascript/jscomp/ControlFlowAnalysisTest.java", "license": "apache-2.0", "size": 65633 }
[ "com.google.javascript.rhino.Node" ]
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.*;
[ "com.google.javascript" ]
com.google.javascript;
711,351
CompletableFuture<Void> enable(ScheduledExecutorService scheduler, long administrativeYieldMillis);
CompletableFuture<Void> enable(ScheduledExecutorService scheduler, long administrativeYieldMillis);
/** * Will enable this service. Enabling of the service typically means * invoking it's operation that is annotated with @OnEnabled. * * @param scheduler * implementation of {@link ScheduledExecutorService} used to * initiate service enabling task as well as its re-tries * @param administrativeYieldMillis * the amount of milliseconds to wait for administrative yield * * @return a CompletableFuture that can be used to wait for the service to finish enabling */
Will enable this service. Enabling of the service typically means invoking it's operation that is annotated with @OnEnabled
enable
{ "repo_name": "jjmeyer0/nifi", "path": "nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/service/ControllerServiceNode.java", "license": "apache-2.0", "size": 7724 }
[ "java.util.concurrent.CompletableFuture", "java.util.concurrent.ScheduledExecutorService" ]
import java.util.concurrent.CompletableFuture; import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
1,692,907
private List<Examinee> convertIEExaminee(IEExamineeWrapper ieExamineeWrapper, List<Researcher> researchers, List<Institution> institutions) { ArrayList<Examinee> examineds = new ArrayList<>(); Examinee examined; for (IEExamineeWrapper.IEExaminee ieExaminee : ieExamineeWrapper.examinded) { if (dbAccess().getDaoSession().getExamineeDao().queryBuilder().where(ExamineeDao.Properties.TextId.eq(ieExaminee.id)).list().size() > 0) { continue; } examined = new Examinee(); examined.setTextId(ieExaminee.id); examined.setFirstName(ieExaminee.name); examined.setLastName(ieExaminee.surname); examined.setAdditionalData(ieExaminee.additionalData); try { examined.setBirthday(TimeUtils.stringToDate(ieExaminee.birth, TimeUtils.defaultPatern)); } catch (ParseException e) { LogUtils.d(TAG, "Birthday ", e); e.printStackTrace(); } Gender gender = Gender.resolveGender(ieExaminee.gender); examined.setGender(gender.toString()); resolveDepartment(examined, ieExaminee); for (Institution institution : institutions) { if (institution.getTextId().equals(ieExaminee.school)) { examined.setInstitution(institution); break; } } dbAccess().getDaoSession().getExamineeDao().insert(examined); ResearcherJoinExaminee researcherJoinExaminee; if (ieExaminee.assignedResearchers == null || ieExaminee.assignedResearchers.size() == 0) { List<Researcher> researcherList = dbAccess().getDaoSession().getResearcherDao(). queryBuilder().where(ResearcherDao.Properties.TextId.eq(ieExaminee.assign)).list(); if (researcherList.size() > 0) { Researcher researcher = researcherList.get(0); researcherJoinExaminee = new ResearcherJoinExaminee(); researcherJoinExaminee.setResearcher(researcher); researcherJoinExaminee.setExaminee(examined); dbAccess().getDaoSession().getResearcherJoinExamineeDao().insert(researcherJoinExaminee); researcher.resetResearcherJoinExamineeList(); examined.resetResearcherJoinExamineeList(); } } else { for (String assignedResearcher : ieExaminee.assignedResearchers) { for (Researcher researcher : researchers) { if (researcher.getTextId().equals(assignedResearcher)) { researcherJoinExaminee = new ResearcherJoinExaminee(); researcherJoinExaminee.setResearcher(researcher); researcherJoinExaminee.setExaminee(examined); dbAccess().getDaoSession().getResearcherJoinExamineeDao().insert(researcherJoinExaminee); researcher.resetResearcherJoinExamineeList(); examined.resetResearcherJoinExamineeList(); } } } } examineds.add(examined); } return examineds; }
List<Examinee> function(IEExamineeWrapper ieExamineeWrapper, List<Researcher> researchers, List<Institution> institutions) { ArrayList<Examinee> examineds = new ArrayList<>(); Examinee examined; for (IEExamineeWrapper.IEExaminee ieExaminee : ieExamineeWrapper.examinded) { if (dbAccess().getDaoSession().getExamineeDao().queryBuilder().where(ExamineeDao.Properties.TextId.eq(ieExaminee.id)).list().size() > 0) { continue; } examined = new Examinee(); examined.setTextId(ieExaminee.id); examined.setFirstName(ieExaminee.name); examined.setLastName(ieExaminee.surname); examined.setAdditionalData(ieExaminee.additionalData); try { examined.setBirthday(TimeUtils.stringToDate(ieExaminee.birth, TimeUtils.defaultPatern)); } catch (ParseException e) { LogUtils.d(TAG, STR, e); e.printStackTrace(); } Gender gender = Gender.resolveGender(ieExaminee.gender); examined.setGender(gender.toString()); resolveDepartment(examined, ieExaminee); for (Institution institution : institutions) { if (institution.getTextId().equals(ieExaminee.school)) { examined.setInstitution(institution); break; } } dbAccess().getDaoSession().getExamineeDao().insert(examined); ResearcherJoinExaminee researcherJoinExaminee; if (ieExaminee.assignedResearchers == null ieExaminee.assignedResearchers.size() == 0) { List<Researcher> researcherList = dbAccess().getDaoSession().getResearcherDao(). queryBuilder().where(ResearcherDao.Properties.TextId.eq(ieExaminee.assign)).list(); if (researcherList.size() > 0) { Researcher researcher = researcherList.get(0); researcherJoinExaminee = new ResearcherJoinExaminee(); researcherJoinExaminee.setResearcher(researcher); researcherJoinExaminee.setExaminee(examined); dbAccess().getDaoSession().getResearcherJoinExamineeDao().insert(researcherJoinExaminee); researcher.resetResearcherJoinExamineeList(); examined.resetResearcherJoinExamineeList(); } } else { for (String assignedResearcher : ieExaminee.assignedResearchers) { for (Researcher researcher : researchers) { if (researcher.getTextId().equals(assignedResearcher)) { researcherJoinExaminee = new ResearcherJoinExaminee(); researcherJoinExaminee.setResearcher(researcher); researcherJoinExaminee.setExaminee(examined); dbAccess().getDaoSession().getResearcherJoinExamineeDao().insert(researcherJoinExaminee); researcher.resetResearcherJoinExamineeList(); examined.resetResearcherJoinExamineeList(); } } } } examineds.add(examined); } return examineds; }
/** * Converts xml data to database readable format * * @param ieExamineeWrapper * @param researchers * @param institutions * @return */
Converts xml data to database readable format
convertIEExaminee
{ "repo_name": "instytut-badan-edukacyjnych/platforma-testow", "path": "LoremIpsum/src/main/java/pl/edu/ibe/loremipsum/resultfixer/data/ImportExportService.java", "license": "gpl-3.0", "size": 20231 }
[ "java.text.ParseException", "java.util.ArrayList", "java.util.List", "pl.edu.ibe.loremipsum.configuration.Gender", "pl.edu.ibe.loremipsum.db.schema.Examinee", "pl.edu.ibe.loremipsum.db.schema.ExamineeDao", "pl.edu.ibe.loremipsum.db.schema.Institution", "pl.edu.ibe.loremipsum.db.schema.Researcher", "pl.edu.ibe.loremipsum.db.schema.ResearcherDao", "pl.edu.ibe.loremipsum.db.schema.ResearcherJoinExaminee", "pl.edu.ibe.loremipsum.tools.LogUtils", "pl.edu.ibe.loremipsum.tools.TimeUtils" ]
import java.text.ParseException; import java.util.ArrayList; import java.util.List; import pl.edu.ibe.loremipsum.configuration.Gender; import pl.edu.ibe.loremipsum.db.schema.Examinee; import pl.edu.ibe.loremipsum.db.schema.ExamineeDao; import pl.edu.ibe.loremipsum.db.schema.Institution; import pl.edu.ibe.loremipsum.db.schema.Researcher; import pl.edu.ibe.loremipsum.db.schema.ResearcherDao; import pl.edu.ibe.loremipsum.db.schema.ResearcherJoinExaminee; import pl.edu.ibe.loremipsum.tools.LogUtils; import pl.edu.ibe.loremipsum.tools.TimeUtils;
import java.text.*; import java.util.*; import pl.edu.ibe.loremipsum.configuration.*; import pl.edu.ibe.loremipsum.db.schema.*; import pl.edu.ibe.loremipsum.tools.*;
[ "java.text", "java.util", "pl.edu.ibe" ]
java.text; java.util; pl.edu.ibe;
1,477,186
@Override protected Schema fetchSchemaByKey(String key) throws SchemaRegistryException { String schemaUrl = KafkaAvroSchemaRegistry.this.url + GET_RESOURCE_BY_ID + key; GetMethod get = new GetMethod(schemaUrl); int statusCode; String schemaString; HttpClient httpClient = this.borrowClient(); try { statusCode = httpClient.executeMethod(get); schemaString = get.getResponseBodyAsString(); } catch (IOException e) { throw new SchemaRegistryException(e); } finally { get.releaseConnection(); this.httpClientPool.returnObject(httpClient); } if (statusCode != HttpStatus.SC_OK) { throw new SchemaRegistryException( String.format("Schema with key %s cannot be retrieved, statusCode = %d", key, statusCode)); } Schema schema; try { schema = new Schema.Parser().parse(schemaString); } catch (Throwable t) { throw new SchemaRegistryException(String.format("Schema with ID = %s cannot be parsed", key), t); } return schema; }
Schema function(String key) throws SchemaRegistryException { String schemaUrl = KafkaAvroSchemaRegistry.this.url + GET_RESOURCE_BY_ID + key; GetMethod get = new GetMethod(schemaUrl); int statusCode; String schemaString; HttpClient httpClient = this.borrowClient(); try { statusCode = httpClient.executeMethod(get); schemaString = get.getResponseBodyAsString(); } catch (IOException e) { throw new SchemaRegistryException(e); } finally { get.releaseConnection(); this.httpClientPool.returnObject(httpClient); } if (statusCode != HttpStatus.SC_OK) { throw new SchemaRegistryException( String.format(STR, key, statusCode)); } Schema schema; try { schema = new Schema.Parser().parse(schemaString); } catch (Throwable t) { throw new SchemaRegistryException(String.format(STR, key), t); } return schema; }
/** * Fetch schema by key. */
Fetch schema by key
fetchSchemaByKey
{ "repo_name": "zliu41/gobblin", "path": "gobblin-metrics/src/main/java/gobblin/metrics/kafka/KafkaAvroSchemaRegistry.java", "license": "apache-2.0", "size": 9246 }
[ "java.io.IOException", "org.apache.avro.Schema", "org.apache.commons.httpclient.HttpClient", "org.apache.commons.httpclient.HttpStatus", "org.apache.commons.httpclient.methods.GetMethod" ]
import java.io.IOException; import org.apache.avro.Schema; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.httpclient.methods.GetMethod;
import java.io.*; import org.apache.avro.*; import org.apache.commons.httpclient.*; import org.apache.commons.httpclient.methods.*;
[ "java.io", "org.apache.avro", "org.apache.commons" ]
java.io; org.apache.avro; org.apache.commons;
1,033,922
private final NFSSrvSession findAuthNullSession(RpcPacket rpc, Object sessKey) { // Check if the null authentication session table is valid NFSSrvSession sess = null; if (m_sessAuthNull != null) { // Search for the required session using the client IP address sess = m_sessAuthNull.findSession(sessKey); } else { // Allocate the null authentication session table m_sessAuthNull = new NFSSessionTable(); } // Check if we found the required session object if (sess == null) { // Create a new session for the request sess = new NFSSrvSession(this, rpc.getClientAddress(), rpc.getClientPort(), rpc.getClientProtocol()); sess.setAuthIdentifier(sessKey); // Get the client information from the RPC sess.setClientInformation(getRpcAuthenticator().getRpcClientInformation(sessKey, rpc)); // Add the new session to the session table m_sessAuthNull.addSession(sess); // Set the session id and debug output prefix sess.setUniqueId("" + sessKey.hashCode()); sess.setDebugPrefix("[NFS_AN_" + getNextSessionId() + "] "); sess.setDebug(getNFSConfiguration().getNFSDebug()); // DEBUG if (Debug.EnableInfo && hasDebugFlag(DBG_SESSION)) Debug.println("[NFS] Added Null session " + sess.getUniqueId()); } // Return the session return sess; }
final NFSSrvSession function(RpcPacket rpc, Object sessKey) { NFSSrvSession sess = null; if (m_sessAuthNull != null) { sess = m_sessAuthNull.findSession(sessKey); } else { m_sessAuthNull = new NFSSessionTable(); } if (sess == null) { sess = new NFSSrvSession(this, rpc.getClientAddress(), rpc.getClientPort(), rpc.getClientProtocol()); sess.setAuthIdentifier(sessKey); sess.setClientInformation(getRpcAuthenticator().getRpcClientInformation(sessKey, rpc)); m_sessAuthNull.addSession(sess); sess.setUniqueId(STR[NFS_AN_STR] STR[NFS] Added Null session " + sess.getUniqueId()); } return sess; }
/** * Find, or create, a null authentication session for the specified request * * @param rpc RpcPacket * @param sessKey Object * @return NFSSrvSession */
Find, or create, a null authentication session for the specified request
findAuthNullSession
{ "repo_name": "loftuxab/community-edition-old", "path": "projects/alfresco-jlan/source/java/org/alfresco/jlan/oncrpc/nfs/NFSServer.java", "license": "lgpl-3.0", "size": 140029 }
[ "org.alfresco.jlan.oncrpc.RpcPacket" ]
import org.alfresco.jlan.oncrpc.RpcPacket;
import org.alfresco.jlan.oncrpc.*;
[ "org.alfresco.jlan" ]
org.alfresco.jlan;
1,629,320
public synchronized void write(byte[] bytes) throws IOException { write(bytes, 0, bytes.length); }
synchronized void function(byte[] bytes) throws IOException { write(bytes, 0, bytes.length); }
/** * Write a complete byte array to the underlying ByteBuffer via this * OutputStream. * * @param bytes Byte array to be written. */
Write a complete byte array to the underlying ByteBuffer via this OutputStream
write
{ "repo_name": "A-Studio0/InsPicSoc", "path": "src/de/tavendo/autobahn/ByteBufferOutputStream.java", "license": "apache-2.0", "size": 4911 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
513,576