method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public synchronized List<RegionState> serverOffline(final ServerName sn) { // Clean up this server from map of servers to regions, and remove all regions // of this server from online map of regions. List<RegionState> rits = new ArrayList<RegionState>(); Set<HRegionInfo> assignedRegions = serverHoldings.get(sn); if (assignedRegions == null) { assignedRegions = new HashSet<HRegionInfo>(); } for (HRegionInfo region : assignedRegions) { regionAssignments.remove(region); } // See if any of the regions that were online on this server were in RIT // If they are, normal timeouts will deal with them appropriately so // let's skip a manual re-assignment. for (RegionState state : regionsInTransition.values()) { if (assignedRegions.contains(state.getRegion())) { rits.add(state); } else if (sn.equals(state.getServerName())) { // Region is in transition on this region server, and this // region is not open on this server. So the region must be // moving to this server from another one (i.e. opening or // pending open on this server, was open on another one if (state.isPendingOpen() || state.isOpening()) { state.setTimestamp(0); // timeout it, let timeout monitor reassign } else { LOG.warn("THIS SHOULD NOT HAPPEN: unexpected state " + state + " of region in transition on server " + sn); } } } assignedRegions.clear(); this.notifyAll(); return rits; }
synchronized List<RegionState> function(final ServerName sn) { List<RegionState> rits = new ArrayList<RegionState>(); Set<HRegionInfo> assignedRegions = serverHoldings.get(sn); if (assignedRegions == null) { assignedRegions = new HashSet<HRegionInfo>(); } for (HRegionInfo region : assignedRegions) { regionAssignments.remove(region); } for (RegionState state : regionsInTransition.values()) { if (assignedRegions.contains(state.getRegion())) { rits.add(state); } else if (sn.equals(state.getServerName())) { if (state.isPendingOpen() state.isOpening()) { state.setTimestamp(0); } else { LOG.warn(STR + state + STR + sn); } } } assignedRegions.clear(); this.notifyAll(); return rits; }
/** * A server is offline, all regions on it are dead. */
A server is offline, all regions on it are dead
serverOffline
{ "repo_name": "daidong/DominoHBase", "path": "hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionStates.java", "license": "apache-2.0", "size": 19397 }
[ "java.util.ArrayList", "java.util.HashSet", "java.util.List", "java.util.Set", "org.apache.hadoop.hbase.HRegionInfo", "org.apache.hadoop.hbase.ServerName" ]
import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.ServerName;
import java.util.*; import org.apache.hadoop.hbase.*;
[ "java.util", "org.apache.hadoop" ]
java.util; org.apache.hadoop;
1,268,099
@Override public void mouseExited(final MouseEvent e) { lblProductPreview.setIcon(null); } } class RefreshDocumentWorker extends SwingWorker<String, Object> { //~ Instance fields ---------------------------------------------------- private final String pointcode; //~ Constructors ------------------------------------------------------- public RefreshDocumentWorker(final String pointcode) { this.pointcode = pointcode; } //~ Methods ------------------------------------------------------------
void function(final MouseEvent e) { lblProductPreview.setIcon(null); } } class RefreshDocumentWorker extends SwingWorker<String, Object> { private final String pointcode; public RefreshDocumentWorker(final String pointcode) { this.pointcode = pointcode; }
/** * DOCUMENT ME! * * @param e DOCUMENT ME! */
DOCUMENT ME
mouseExited
{ "repo_name": "cismet/cids-custom-wuppertal", "path": "src/main/java/de/cismet/cids/custom/objectrenderer/wunda_blau/AlkisPointRenderer.java", "license": "lgpl-3.0", "size": 118383 }
[ "java.awt.event.MouseEvent", "javax.swing.SwingWorker" ]
import java.awt.event.MouseEvent; import javax.swing.SwingWorker;
import java.awt.event.*; import javax.swing.*;
[ "java.awt", "javax.swing" ]
java.awt; javax.swing;
509,293
public void doSendFile(Person person, File file);
void function(Person person, File file);
/** User requested sending a file to a person * @param person receipient * @param file File to send */
User requested sending a file to a person
doSendFile
{ "repo_name": "ControlSystemStudio/org.csstudio.sns", "path": "plugins/org.csstudio.utility.chat/src/org/csstudio/utility/chat/GroupChatGUIListener.java", "license": "epl-1.0", "size": 1443 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
983,145
public void itemsFinished( final ReportEvent event ) { if ( ( event.getType() & ReportEvent.ITEMS_FINISHED ) != ReportEvent.ITEMS_FINISHED ) { TestCase.fail( "ReportEvent was expected to be of type ITEMS_FINISHED" ); } if ( ( lastEventType & ReportEvent.ITEMS_ADVANCED ) != ReportEvent.ITEMS_ADVANCED ) { TestCase.fail( "Unexpected Event: ItemsFinished: " + lastEventType ); } lastEventType = ReportEvent.ITEMS_FINISHED; }
void function( final ReportEvent event ) { if ( ( event.getType() & ReportEvent.ITEMS_FINISHED ) != ReportEvent.ITEMS_FINISHED ) { TestCase.fail( STR ); } if ( ( lastEventType & ReportEvent.ITEMS_ADVANCED ) != ReportEvent.ITEMS_ADVANCED ) { TestCase.fail( STR + lastEventType ); } lastEventType = ReportEvent.ITEMS_FINISHED; }
/** * Receives notification that a group of item bands has been completed. * <p/> * The itemBand is finished, the report starts to close open groups. * * @param event * The event. */
Receives notification that a group of item bands has been completed. The itemBand is finished, the report starts to close open groups
itemsFinished
{ "repo_name": "EgorZhuk/pentaho-reporting", "path": "engine/core/src/test/java/org/pentaho/reporting/engine/classic/core/testsupport/EventOrderFunction.java", "license": "lgpl-2.1", "size": 9310 }
[ "junit.framework.TestCase", "org.pentaho.reporting.engine.classic.core.event.ReportEvent" ]
import junit.framework.TestCase; import org.pentaho.reporting.engine.classic.core.event.ReportEvent;
import junit.framework.*; import org.pentaho.reporting.engine.classic.core.event.*;
[ "junit.framework", "org.pentaho.reporting" ]
junit.framework; org.pentaho.reporting;
46,523
@Override public String getText(Object object) { String label = ((RestrictionStartAfterOtherFinished)object).getTaskNames(); return label == null || label.length() == 0 ? getString("_UI_RestrictionStartAfterOtherFinished_type") : getString("_UI_RestrictionStartAfterOtherFinished_type") + " " + label; }
String function(Object object) { String label = ((RestrictionStartAfterOtherFinished)object).getTaskNames(); return label == null label.length() == 0 ? getString(STR) : getString(STR) + " " + label; }
/** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This returns the label text for the adapted class.
getText
{ "repo_name": "RobertHilbrich/assist", "path": "ch.hilbri.assist.model.edit/src/ch/hilbri/assist/model/provider/RestrictionStartAfterOtherFinishedItemProvider.java", "license": "gpl-2.0", "size": 2993 }
[ "ch.hilbri.assist.model.RestrictionStartAfterOtherFinished" ]
import ch.hilbri.assist.model.RestrictionStartAfterOtherFinished;
import ch.hilbri.assist.model.*;
[ "ch.hilbri.assist" ]
ch.hilbri.assist;
198,309
public BigDecimal normalizeTalliedScore(ScoreSummary summary, Set<String> scoreCards, Result.ResultScoreType scoreType,BigDecimal maximumValue, BigDecimal normalizedTarget) { BigDecimal tallied = tallyScoreFor(summary, scoreCards, scoreType); return normalize(tallied, maximumValue, normalizedTarget); }
BigDecimal function(ScoreSummary summary, Set<String> scoreCards, Result.ResultScoreType scoreType,BigDecimal maximumValue, BigDecimal normalizedTarget) { BigDecimal tallied = tallyScoreFor(summary, scoreCards, scoreType); return normalize(tallied, maximumValue, normalizedTarget); }
/** * Normalize a tallied score with a maximum possible value and normalize target. * * @param summary The scoring summary for an object * @param scoreCards The score cards used * @param scoreType What type of scoring * @param maximumValue The max value within the object groups * @param normalizedTarget The normalizing multiplier * @return The normalized score */
Normalize a tallied score with a maximum possible value and normalize target
normalizeTalliedScore
{ "repo_name": "dmillett/prank", "path": "src/main/java/net/prank/tools/ScoringTool.java", "license": "apache-2.0", "size": 13647 }
[ "java.math.BigDecimal", "java.util.Set", "net.prank.core.Result", "net.prank.core.ScoreSummary" ]
import java.math.BigDecimal; import java.util.Set; import net.prank.core.Result; import net.prank.core.ScoreSummary;
import java.math.*; import java.util.*; import net.prank.core.*;
[ "java.math", "java.util", "net.prank.core" ]
java.math; java.util; net.prank.core;
2,226,845
public void finer(String message, Throwable t) { privlog(Level.FINER, message, t, 2, FrameworkLogger.class); }
void function(String message, Throwable t) { privlog(Level.FINER, message, t, 2, FrameworkLogger.class); }
/** * Just like FrameworkLogger.finer(String), but also logs a stack trace. * * @param t Throwable whose stack trace will be logged. * @see #finer(String) */
Just like FrameworkLogger.finer(String), but also logs a stack trace
finer
{ "repo_name": "ferquies/2dam", "path": "AD/Tema 2/hsqldb-2.3.1/hsqldb/src/org/hsqldb/lib/FrameworkLogger.java", "license": "gpl-3.0", "size": 22646 }
[ "java.util.logging.Level" ]
import java.util.logging.Level;
import java.util.logging.*;
[ "java.util" ]
java.util;
1,924,274
public void assertOuterHtml(String cssq, String html) { String val = outerHtml(cssq); if (val != null) assertThat(val, is(html)); else fail("cannot find " + cssq); }
void function(String cssq, String html) { String val = outerHtml(cssq); if (val != null) assertThat(val, is(html)); else fail(STR + cssq); }
/** * Check that the html at the "css" query is the expected Note that it is * returned also the selected node. * * of the selected node. * * @param cssq * @param html */
Check that the html at the "css" query is the expected Note that it is returned also the selected node. of the selected node
assertOuterHtml
{ "repo_name": "agilesites/agilesites2-lib", "path": "api/src/main/java/wcs/java/util/TestElement.java", "license": "mit", "size": 6966 }
[ "org.hamcrest.CoreMatchers", "org.junit.Assert" ]
import org.hamcrest.CoreMatchers; import org.junit.Assert;
import org.hamcrest.*; import org.junit.*;
[ "org.hamcrest", "org.junit" ]
org.hamcrest; org.junit;
542,809
@NotNull IMobAIRegistry registerNewAiTaskForMobs( final int priority, final Function<AbstractEntityMinecoloniesMob, Goal> aiTaskProducer, Predicate<AbstractEntityMinecoloniesMob> applyPredicate);
IMobAIRegistry registerNewAiTaskForMobs( final int priority, final Function<AbstractEntityMinecoloniesMob, Goal> aiTaskProducer, Predicate<AbstractEntityMinecoloniesMob> applyPredicate);
/** * Method used to register a entity AI task for a mob that matches the predicate. * * @param priority The priority to register this task on. * @param aiTaskProducer The task producer in question to register. * @param applyPredicate The predicate used to indicate if the task should be applied to a given mob. * @return The registry. */
Method used to register a entity AI task for a mob that matches the predicate
registerNewAiTaskForMobs
{ "repo_name": "Minecolonies/minecolonies", "path": "src/api/java/com/minecolonies/api/entity/ai/registry/IMobAIRegistry.java", "license": "gpl-3.0", "size": 3730 }
[ "com.minecolonies.api.entity.mobs.AbstractEntityMinecoloniesMob", "java.util.function.Function", "java.util.function.Predicate", "net.minecraft.entity.ai.goal.Goal" ]
import com.minecolonies.api.entity.mobs.AbstractEntityMinecoloniesMob; import java.util.function.Function; import java.util.function.Predicate; import net.minecraft.entity.ai.goal.Goal;
import com.minecolonies.api.entity.mobs.*; import java.util.function.*; import net.minecraft.entity.ai.goal.*;
[ "com.minecolonies.api", "java.util", "net.minecraft.entity" ]
com.minecolonies.api; java.util; net.minecraft.entity;
753,106
public static boolean contains(long[] array, long value) { if(array.length > SEARCH_BINARY_START) { return Arrays.binarySearch(array, value) >= 0; } else { for(int i = 0; i < array.length; i++) { if(array[i] == value) return true; } return false; } }
static boolean function(long[] array, long value) { if(array.length > SEARCH_BINARY_START) { return Arrays.binarySearch(array, value) >= 0; } else { for(int i = 0; i < array.length; i++) { if(array[i] == value) return true; } return false; } }
/** * Searches for a specific {@code value} in the given {@code array} and returns {@code true} if it was found. * If the array length is above {@link ArrayUtils#SEARCH_BINARY_START}, this method will call {@link Arrays#binarySearch(long[], long)} to * ensure that the search is fast enough for every situation. * <br><br> * For further information see {@link Arrays#binarySearch(long[], long)}. * * @param array Array to search in * @param value Value to search * @return {@code true} if the value was found in this array, {@code false} otherwise. */
Searches for a specific value in the given array and returns true if it was found. If the array length is above <code>ArrayUtils#SEARCH_BINARY_START</code>, this method will call <code>Arrays#binarySearch(long[], long)</code> to ensure that the search is fast enough for every situation. For further information see <code>Arrays#binarySearch(long[], long)</code>
contains
{ "repo_name": "fusselrulezz/ets2editor", "path": "src/tk/fusselrulezz/ets2editor/util/ArrayUtils.java", "license": "mit", "size": 6248 }
[ "java.util.Arrays" ]
import java.util.Arrays;
import java.util.*;
[ "java.util" ]
java.util;
1,898,623
private boolean isChild(Node cn, Node cn2) { if (cn == cn2) return false; Queue<Node> toProcess = new LinkedList<Node>(); toProcess.addAll(cn.getParents()); while (!toProcess.isEmpty()) { Node tcn = toProcess.poll(); if (tcn.equals(cn2)) return true; Set<Node> parents = tcn.getParents(); if (parents != null && !parents.isEmpty()) toProcess.addAll(parents); } return false; } class Pair { private final Node a; private final Node b; public Pair(Node a, Node b) { String[] aa = new String[a.getEquivalentConcepts().size()]; String[] bb = new String[b.getEquivalentConcepts().size()]; if (aa.length < bb.length) { this.a = a; this.b = b; } else if (aa.length > bb.length) { this.a = b; this.b = a; } else { int i = 0; for (String c : a.getEquivalentConcepts()) { aa[i++] = c; } i = 0; for (String c : b.getEquivalentConcepts()) { bb[i++] = c; } int res = 0; // 0 equal, 1 a <, 2 a > for (i = 0; i < aa.length; i++) { if (aa[i].compareTo(bb[i]) < 0) { res = 1; break; } else if (aa[i].compareTo(bb[i]) > 0) { res = 2; break; } } if (res == 1) { this.a = a; this.b = b; } else if (res == 2) { this.a = b; this.b = a; } else { this.a = a; this.b = b; } } }
boolean function(Node cn, Node cn2) { if (cn == cn2) return false; Queue<Node> toProcess = new LinkedList<Node>(); toProcess.addAll(cn.getParents()); while (!toProcess.isEmpty()) { Node tcn = toProcess.poll(); if (tcn.equals(cn2)) return true; Set<Node> parents = tcn.getParents(); if (parents != null && !parents.isEmpty()) toProcess.addAll(parents); } return false; } class Pair { private final Node a; private final Node b; public Pair(Node a, Node b) { String[] aa = new String[a.getEquivalentConcepts().size()]; String[] bb = new String[b.getEquivalentConcepts().size()]; if (aa.length < bb.length) { this.a = a; this.b = b; } else if (aa.length > bb.length) { this.a = b; this.b = a; } else { int i = 0; for (String c : a.getEquivalentConcepts()) { aa[i++] = c; } i = 0; for (String c : b.getEquivalentConcepts()) { bb[i++] = c; } int res = 0; for (i = 0; i < aa.length; i++) { if (aa[i].compareTo(bb[i]) < 0) { res = 1; break; } else if (aa[i].compareTo(bb[i]) > 0) { res = 2; break; } } if (res == 1) { this.a = a; this.b = b; } else if (res == 2) { this.a = b; this.b = a; } else { this.a = a; this.b = b; } } }
/** * Indicates if cn is a child of cn2. * * @param cn * @param cn2 * @return */
Indicates if cn is a child of cn2
isChild
{ "repo_name": "aehrc/snorocket", "path": "snorocket-core/src/main/java/au/csiro/snorocket/core/NormalisedOntology.java", "license": "apache-2.0", "size": 98552 }
[ "au.csiro.ontology.Node", "java.util.LinkedList", "java.util.Queue", "java.util.Set" ]
import au.csiro.ontology.Node; import java.util.LinkedList; import java.util.Queue; import java.util.Set;
import au.csiro.ontology.*; import java.util.*;
[ "au.csiro.ontology", "java.util" ]
au.csiro.ontology; java.util;
666,660
@Override public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(Resequencer.class)) { case EipPackage.RESEQUENCER__STREAM_SEQUENCES: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true)); return; } super.notifyChanged(notification); }
void function(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(Resequencer.class)) { case EipPackage.RESEQUENCER__STREAM_SEQUENCES: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true)); return; } super.notifyChanged(notification); }
/** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This handles model notifications by calling <code>#updateChildren</code> to update any cached children and by creating a viewer notification, which it passes to <code>#fireNotifyChanged</code>.
notifyChanged
{ "repo_name": "lbroudoux/eip-designer", "path": "plugins/com.github.lbroudoux.dsl.eip.edit/src/com/github/lbroudoux/dsl/eip/provider/ResequencerItemProvider.java", "license": "apache-2.0", "size": 4344 }
[ "com.github.lbroudoux.dsl.eip.EipPackage", "com.github.lbroudoux.dsl.eip.Resequencer", "org.eclipse.emf.common.notify.Notification", "org.eclipse.emf.edit.provider.ViewerNotification" ]
import com.github.lbroudoux.dsl.eip.EipPackage; import com.github.lbroudoux.dsl.eip.Resequencer; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.edit.provider.ViewerNotification;
import com.github.lbroudoux.dsl.eip.*; import org.eclipse.emf.common.notify.*; import org.eclipse.emf.edit.provider.*;
[ "com.github.lbroudoux", "org.eclipse.emf" ]
com.github.lbroudoux; org.eclipse.emf;
1,412,741
private static String formatCqlQuery(String query, List<Object> parms) { int marker, position = 0; StringBuilder result = new StringBuilder(); if (-1 == (marker = query.indexOf('?')) || parms.size() == 0) return query; for (Object parm : parms) { result.append(query.substring(position, marker)); if (parm instanceof ByteBuffer) result.append(getUnQuotedCqlBlob((ByteBuffer) parm)); else if (parm instanceof Long) result.append(parm); else throw new AssertionError(); position = marker + 1; if (-1 == (marker = query.indexOf('?', position + 1))) break; } if (position < query.length()) result.append(query.substring(position)); return result.toString(); }
static String function(String query, List<Object> parms) { int marker, position = 0; StringBuilder result = new StringBuilder(); if (-1 == (marker = query.indexOf('?')) parms.size() == 0) return query; for (Object parm : parms) { result.append(query.substring(position, marker)); if (parm instanceof ByteBuffer) result.append(getUnQuotedCqlBlob((ByteBuffer) parm)); else if (parm instanceof Long) result.append(parm); else throw new AssertionError(); position = marker + 1; if (-1 == (marker = query.indexOf('?', position + 1))) break; } if (position < query.length()) result.append(query.substring(position)); return result.toString(); }
/** * Constructs a CQL query string by replacing instances of the character * '?', with the corresponding parameter. * * @param query base query string to format * @param parms sequence of string query parameters * @return formatted CQL query string */
Constructs a CQL query string by replacing instances of the character '?', with the corresponding parameter
formatCqlQuery
{ "repo_name": "pthomaid/cassandra", "path": "tools/stress/src/org/apache/cassandra/stress/operations/predefined/CqlOperation.java", "license": "apache-2.0", "size": 21777 }
[ "java.nio.ByteBuffer", "java.util.List" ]
import java.nio.ByteBuffer; import java.util.List;
import java.nio.*; import java.util.*;
[ "java.nio", "java.util" ]
java.nio; java.util;
1,015,369
public String getTablespaceDDL( VariableSpace variables, DatabaseMeta databaseMeta, String tablespaceName ) { return ""; }
String function( VariableSpace variables, DatabaseMeta databaseMeta, String tablespaceName ) { return ""; }
/** * Returns an empty string as most databases do not support tablespaces. Subclasses can override this method to * generate the DDL. * * @param variables * variables needed for variable substitution. * @param databaseMeta * databaseMeta needed for it's quoteField method. Since we are doing variable substitution we need to meta * so that we can act on the variable substitution first and then the creation of the entire string that will * be retuned. * @param tablespaceName * tablespaceName name of the tablespace. * * @return String an empty String as most databases do not use tablespaces. */
Returns an empty string as most databases do not support tablespaces. Subclasses can override this method to generate the DDL
getTablespaceDDL
{ "repo_name": "IvanNikolaychuk/pentaho-kettle", "path": "core/src/org/pentaho/di/core/database/BaseDatabaseMeta.java", "license": "apache-2.0", "size": 68605 }
[ "org.pentaho.di.core.variables.VariableSpace" ]
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.variables.*;
[ "org.pentaho.di" ]
org.pentaho.di;
531,495
@Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); when(encryptor.decrypt("password")).thenReturn("noproblem"); }
void function() throws Exception { MockitoAnnotations.initMocks(this); when(encryptor.decrypt(STR)).thenReturn(STR); }
/** * JAVADOC Method Level Comments * * @throws Exception JAVADOC. */
JAVADOC Method Level Comments
setUp
{ "repo_name": "cucina/opencucina", "path": "nosql/security/src/test/java/org/cucina/security/crypto/PasswordDecryptorImplTest.java", "license": "apache-2.0", "size": 973 }
[ "org.mockito.Mockito", "org.mockito.MockitoAnnotations" ]
import org.mockito.Mockito; import org.mockito.MockitoAnnotations;
import org.mockito.*;
[ "org.mockito" ]
org.mockito;
1,701,731
private void delete() { int[] selRows = jTable.getSelectedRows(); if (selRows.length == 0) { return; } if (realDeleteOfFiles) { int response = JOptionPane.showConfirmDialog(parent, MessagesManager.get("do_you_realy_want_to_delete_selected_files"), ParmsConstants.APP_NAME, JOptionPane.YES_NO_OPTION); if (response != JOptionPane.YES_OPTION) { return; } } List<Integer> listRows = new Vector<Integer>(); for (int i = 0; i < selRows.length; i++) { listRows.add(jTable.convertRowIndexToModel(selRows[i])); } // get Table data and remove the selected rows DefaultTableModel tm = (DefaultTableModel) jTable.getModel(); for (int i = 0; i < listRows.size(); i++) { Integer rowIndex = listRows.get(i); File file = new File((String) tm.getValueAt(rowIndex.intValue(), 0)); if (realDeleteOfFiles) { file.delete(); } } // For values ro be sorted in asc Collections.sort(listRows); // Remove the row(s) for (int i = listRows.size() - 1; i >= 0; i--) { Integer rowIndex = listRows.get(i); tm.removeRow(rowIndex); } }
void function() { int[] selRows = jTable.getSelectedRows(); if (selRows.length == 0) { return; } if (realDeleteOfFiles) { int response = JOptionPane.showConfirmDialog(parent, MessagesManager.get(STR), ParmsConstants.APP_NAME, JOptionPane.YES_NO_OPTION); if (response != JOptionPane.YES_OPTION) { return; } } List<Integer> listRows = new Vector<Integer>(); for (int i = 0; i < selRows.length; i++) { listRows.add(jTable.convertRowIndexToModel(selRows[i])); } DefaultTableModel tm = (DefaultTableModel) jTable.getModel(); for (int i = 0; i < listRows.size(); i++) { Integer rowIndex = listRows.get(i); File file = new File((String) tm.getValueAt(rowIndex.intValue(), 0)); if (realDeleteOfFiles) { file.delete(); } } Collections.sort(listRows); for (int i = listRows.size() - 1; i >= 0; i--) { Integer rowIndex = listRows.get(i); tm.removeRow(rowIndex); } }
/** * Delete the files */
Delete the files
delete
{ "repo_name": "kawansoft/aceql-http-gui", "path": "src/main/java/com/kawansoft/app/util/table/FileTableClipboardManager.java", "license": "apache-2.0", "size": 15884 }
[ "com.kawansoft.app.parms.MessagesManager", "com.kawansoft.app.parms.ParmsConstants", "java.io.File", "java.util.Collections", "java.util.List", "java.util.Vector", "javax.swing.JOptionPane", "javax.swing.table.DefaultTableModel" ]
import com.kawansoft.app.parms.MessagesManager; import com.kawansoft.app.parms.ParmsConstants; import java.io.File; import java.util.Collections; import java.util.List; import java.util.Vector; import javax.swing.JOptionPane; import javax.swing.table.DefaultTableModel;
import com.kawansoft.app.parms.*; import java.io.*; import java.util.*; import javax.swing.*; import javax.swing.table.*;
[ "com.kawansoft.app", "java.io", "java.util", "javax.swing" ]
com.kawansoft.app; java.io; java.util; javax.swing;
1,339,605
@Test public void testRequestMessageStatement() throws Exception { Update.Where update = update("camel_user") .with(set("first_name", "Claus 2")) .and(set("last_name", "Ibsen 2")) .where(eq("login", "c_ibsen")); Object response = producerTemplate.requestBodyAndHeader(null, CassandraConstants.CQL_QUERY, update); Cluster cluster = CassandraUnitUtils.cassandraCluster(); Session session = cluster.connect(CassandraUnitUtils.KEYSPACE); ResultSet resultSet = session.execute("select login, first_name, last_name from camel_user where login = ?", "c_ibsen"); Row row = resultSet.one(); assertNotNull(row); assertEquals("Claus 2", row.getString("first_name")); assertEquals("Ibsen 2", row.getString("last_name")); session.close(); cluster.close(); }
void function() throws Exception { Update.Where update = update(STR) .with(set(STR, STR)) .and(set(STR, STR)) .where(eq("login", STR)); Object response = producerTemplate.requestBodyAndHeader(null, CassandraConstants.CQL_QUERY, update); Cluster cluster = CassandraUnitUtils.cassandraCluster(); Session session = cluster.connect(CassandraUnitUtils.KEYSPACE); ResultSet resultSet = session.execute(STR, STR); Row row = resultSet.one(); assertNotNull(row); assertEquals(STR, row.getString(STR)); assertEquals(STR, row.getString(STR)); session.close(); cluster.close(); }
/** * Test with incoming message containing a header with RegularStatement. */
Test with incoming message containing a header with RegularStatement
testRequestMessageStatement
{ "repo_name": "davidwilliams1978/camel", "path": "components/camel-cassandraql/src/test/java/org/apache/camel/component/cassandra/CassandraComponentProducerUnpreparedTest.java", "license": "apache-2.0", "size": 6012 }
[ "com.datastax.driver.core.Cluster", "com.datastax.driver.core.ResultSet", "com.datastax.driver.core.Row", "com.datastax.driver.core.Session", "com.datastax.driver.core.querybuilder.QueryBuilder", "com.datastax.driver.core.querybuilder.Update" ]
import com.datastax.driver.core.Cluster; import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.Row; import com.datastax.driver.core.Session; import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.Update;
import com.datastax.driver.core.*; import com.datastax.driver.core.querybuilder.*;
[ "com.datastax.driver" ]
com.datastax.driver;
653,596
protected void checkAndSendUnparsedEntity(UnparsedEntity ent) { if (isRootDocument()) { int index = fUnparsedEntities.indexOf(ent); if (index == -1) { // There is no unparsed entity with the same name that we have sent. // Calling unparsedEntityDecl() will add the entity to our local store, // and also send the unparsed entity to the DTDHandler XMLResourceIdentifier id = new XMLResourceIdentifierImpl( ent.publicId, ent.systemId, ent.baseURI, ent.expandedSystemId); addUnparsedEntity( ent.name, id, ent.notation, ent.augmentations); if (fSendUEAndNotationEvents && fDTDHandler != null) { fDTDHandler.unparsedEntityDecl( ent.name, id, ent.notation, ent.augmentations); } } else { UnparsedEntity localEntity = (UnparsedEntity)fUnparsedEntities.get(index); if (!ent.isDuplicate(localEntity)) { reportFatalError( "NonDuplicateUnparsedEntity", new Object[] { ent.name }); } } } else { fParentXIncludeHandler.checkAndSendUnparsedEntity(ent); } }
void function(UnparsedEntity ent) { if (isRootDocument()) { int index = fUnparsedEntities.indexOf(ent); if (index == -1) { XMLResourceIdentifier id = new XMLResourceIdentifierImpl( ent.publicId, ent.systemId, ent.baseURI, ent.expandedSystemId); addUnparsedEntity( ent.name, id, ent.notation, ent.augmentations); if (fSendUEAndNotationEvents && fDTDHandler != null) { fDTDHandler.unparsedEntityDecl( ent.name, id, ent.notation, ent.augmentations); } } else { UnparsedEntity localEntity = (UnparsedEntity)fUnparsedEntities.get(index); if (!ent.isDuplicate(localEntity)) { reportFatalError( STR, new Object[] { ent.name }); } } } else { fParentXIncludeHandler.checkAndSendUnparsedEntity(ent); } }
/** * The purpose of this method is to check if an UnparsedEntity conflicts with a previously * declared entity in the current pipeline stack. If there is no conflict, the * UnparsedEntity is sent by the root pipeline. * * @param ent the UnparsedEntity to check for conflicts */
The purpose of this method is to check if an UnparsedEntity conflicts with a previously declared entity in the current pipeline stack. If there is no conflict, the UnparsedEntity is sent by the root pipeline
checkAndSendUnparsedEntity
{ "repo_name": "wangsongpeng/jdk-src", "path": "src/main/java/com/sun/org/apache/xerces/internal/xinclude/XIncludeHandler.java", "license": "apache-2.0", "size": 118861 }
[ "com.sun.org.apache.xerces.internal.util.XMLResourceIdentifierImpl", "com.sun.org.apache.xerces.internal.xni.XMLResourceIdentifier" ]
import com.sun.org.apache.xerces.internal.util.XMLResourceIdentifierImpl; import com.sun.org.apache.xerces.internal.xni.XMLResourceIdentifier;
import com.sun.org.apache.xerces.internal.util.*; import com.sun.org.apache.xerces.internal.xni.*;
[ "com.sun.org" ]
com.sun.org;
2,020,738
public void assertDoesNotContain(AssertionInfo info, int[] actual, int value, Index index) { arrays.assertDoesNotContain(info, failures, actual, value, index); }
void function(AssertionInfo info, int[] actual, int value, Index index) { arrays.assertDoesNotContain(info, failures, actual, value, index); }
/** * Verifies that the given array does not contain the given value at the given index. * * @param info contains information about the assertion. * @param actual the given array. * @param value the value to look for. * @param index the index where the value should be stored in the given array. * @throws AssertionError if the given array is {@code null}. * @throws NullPointerException if the given {@code Index} is {@code null}. * @throws AssertionError if the given array contains the given value at the given index. */
Verifies that the given array does not contain the given value at the given index
assertDoesNotContain
{ "repo_name": "joel-costigliola/assertj-core", "path": "src/main/java/org/assertj/core/internal/IntArrays.java", "license": "apache-2.0", "size": 18030 }
[ "org.assertj.core.api.AssertionInfo", "org.assertj.core.data.Index" ]
import org.assertj.core.api.AssertionInfo; import org.assertj.core.data.Index;
import org.assertj.core.api.*; import org.assertj.core.data.*;
[ "org.assertj.core" ]
org.assertj.core;
2,228,860
ArmorStand am = this.getArmorStand(); if (am == null) { am = (ArmorStand) this.location.getWorld().spawnEntity(this.location, EntityType.ARMOR_STAND); this.uuid = am.getUniqueId(); am.setGravity(false); am.setSmall(true); am.setVisible(false); am.setCustomName(this.text); am.setCustomNameVisible(true); am.setNoDamageTicks(Integer.MAX_VALUE); am.setRemoveWhenFarAway(false); } else { am.teleport(this.location); am.setCustomName(this.text); } }
ArmorStand am = this.getArmorStand(); if (am == null) { am = (ArmorStand) this.location.getWorld().spawnEntity(this.location, EntityType.ARMOR_STAND); this.uuid = am.getUniqueId(); am.setGravity(false); am.setSmall(true); am.setVisible(false); am.setCustomName(this.text); am.setCustomNameVisible(true); am.setNoDamageTicks(Integer.MAX_VALUE); am.setRemoveWhenFarAway(false); } else { am.teleport(this.location); am.setCustomName(this.text); } }
/** * Updates the Hologram to display the text */
Updates the Hologram to display the text
update
{ "repo_name": "j0ach1mmall3/JLib", "path": "src/main/java/com/j0ach1mmall3/jlib/visual/Hologram.java", "license": "gpl-3.0", "size": 3192 }
[ "org.bukkit.entity.ArmorStand", "org.bukkit.entity.EntityType" ]
import org.bukkit.entity.ArmorStand; import org.bukkit.entity.EntityType;
import org.bukkit.entity.*;
[ "org.bukkit.entity" ]
org.bukkit.entity;
1,992,121
public CmsResource addTextFile(String relPath, String text) throws CmsException { String type = "plain"; return addFile(type, relPath, text); }
CmsResource function(String relPath, String text) throws CmsException { String type = "plain"; return addFile(type, relPath, text); }
/** * Adds te text file.<p> * * @param relPath the relative path * @param text the content * * @return the created resource * @throws CmsException if something goes wrong */
Adds te text file
addTextFile
{ "repo_name": "alkacon/opencms-core", "path": "test/org/opencms/module/CmsTestModuleBuilder.java", "license": "lgpl-2.1", "size": 17087 }
[ "org.opencms.file.CmsResource", "org.opencms.main.CmsException" ]
import org.opencms.file.CmsResource; import org.opencms.main.CmsException;
import org.opencms.file.*; import org.opencms.main.*;
[ "org.opencms.file", "org.opencms.main" ]
org.opencms.file; org.opencms.main;
322,387
protected Expression genStdOperation(Context ctx, Token token, String opname, Expression[] args) throws SemanticException { Expression res = null; try { // lookup operation res = ExpStdOp.create(opname, args); } catch (ExpInvalidException ex) { throw new SemanticException(token, ex); } return res; }
Expression function(Context ctx, Token token, String opname, Expression[] args) throws SemanticException { Expression res = null; try { res = ExpStdOp.create(opname, args); } catch (ExpInvalidException ex) { throw new SemanticException(token, ex); } return res; }
/** * Generates a predefined standard operation expression. */
Generates a predefined standard operation expression
genStdOperation
{ "repo_name": "anonymous100001/maxuse", "path": "src/main/org/tzi/use/parser/ocl/ASTExpression.java", "license": "gpl-2.0", "size": 11693 }
[ "org.antlr.runtime.Token", "org.tzi.use.parser.Context", "org.tzi.use.parser.SemanticException", "org.tzi.use.uml.ocl.expr.ExpInvalidException", "org.tzi.use.uml.ocl.expr.ExpStdOp", "org.tzi.use.uml.ocl.expr.Expression" ]
import org.antlr.runtime.Token; import org.tzi.use.parser.Context; import org.tzi.use.parser.SemanticException; import org.tzi.use.uml.ocl.expr.ExpInvalidException; import org.tzi.use.uml.ocl.expr.ExpStdOp; import org.tzi.use.uml.ocl.expr.Expression;
import org.antlr.runtime.*; import org.tzi.use.parser.*; import org.tzi.use.uml.ocl.expr.*;
[ "org.antlr.runtime", "org.tzi.use" ]
org.antlr.runtime; org.tzi.use;
49,681
public int peek() throws IOException { int outResult = read(); if (outResult != -1) { unread(outResult); } return outResult; }
int function() throws IOException { int outResult = read(); if (outResult != -1) { unread(outResult); } return outResult; }
/** * This will peek at the next byte. * * @return The next byte on the stream, leaving it as available to read. * @throws IOException If there is an error reading the next byte. */
This will peek at the next byte
peek
{ "repo_name": "aktion-hip/relations", "path": "org.elbe.relations.biblio.meta/src/org/elbe/relations/biblio/meta/internal/pdf/PushBackInputStream.java", "license": "gpl-3.0", "size": 3073 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,609,975
Set<ConfigProperty> getProperties(String componentName);
Set<ConfigProperty> getProperties(String componentName);
/** * Returns configuration properties of the named components. * * @param componentName component name * @return set of configuration properties */
Returns configuration properties of the named components
getProperties
{ "repo_name": "jinlongliu/onos", "path": "core/api/src/main/java/org/onosproject/cfg/ComponentConfigService.java", "license": "apache-2.0", "size": 2333 }
[ "java.util.Set" ]
import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
2,230,050
public static Number power(BigDecimal self, Integer exponent) { if (exponent >= 0) { return self.pow(exponent); } else { return power(self, (double) exponent); } }
static Number function(BigDecimal self, Integer exponent) { if (exponent >= 0) { return self.pow(exponent); } else { return power(self, (double) exponent); } }
/** * Power of a BigDecimal to an integer certain exponent. If the * exponent is positive, call the BigDecimal.pow(int) method to * maintain precision. Called by the '**' operator. * * @param self a BigDecimal * @param exponent an Integer exponent * @return a Number to the power of a the exponent */
Power of a BigDecimal to an integer certain exponent. If the exponent is positive, call the BigDecimal.pow(int) method to maintain precision. Called by the '**' operator
power
{ "repo_name": "xien777/yajsw", "path": "yajsw/wrapper/src/main/java/org/codehaus/groovy/runtime/DefaultGroovyMethods.java", "license": "lgpl-2.1", "size": 704150 }
[ "java.math.BigDecimal" ]
import java.math.BigDecimal;
import java.math.*;
[ "java.math" ]
java.math;
2,415,997
@Test public void testMultiFilesMultiMaps() throws IOException, SQLException { multiFileTest(2, 10, 2); }
void function() throws IOException, SQLException { multiFileTest(2, 10, 2); }
/** Make sure we can use CombineFileInputFormat to handle multiple * files and multiple maps. */
Make sure we can use CombineFileInputFormat to handle multiple files and multiple maps
testMultiFilesMultiMaps
{ "repo_name": "bonnetb/sqoop", "path": "src/test/com/cloudera/sqoop/TestExport.java", "license": "apache-2.0", "size": 34292 }
[ "java.io.IOException", "java.sql.SQLException" ]
import java.io.IOException; import java.sql.SQLException;
import java.io.*; import java.sql.*;
[ "java.io", "java.sql" ]
java.io; java.sql;
2,108,998
private void writeObject(ObjectOutputStream s) throws IOException { s.defaultWriteObject(); }// end writeObject
void function(ObjectOutputStream s) throws IOException { s.defaultWriteObject(); }
/** * Serialize and write the BigDate object. Only the ordinal will be written. Other fields are transient. * * @param s stream to write to * @throws IOException standard */
Serialize and write the BigDate object. Only the ordinal will be written. Other fields are transient
writeObject
{ "repo_name": "stockcode/kids-english", "path": "src/main/java/com/mindprod/common11/BigDate.java", "license": "gpl-3.0", "size": 87496 }
[ "java.io.IOException", "java.io.ObjectOutputStream" ]
import java.io.IOException; import java.io.ObjectOutputStream;
import java.io.*;
[ "java.io" ]
java.io;
2,201,917
public final Set<Entry<String, L[]>> getEntries() { return (this.map != null) ? this.map.entrySet() : Collections.<Entry<String, L[]>>emptySet(); }
final Set<Entry<String, L[]>> function() { return (this.map != null) ? this.map.entrySet() : Collections.<Entry<String, L[]>>emptySet(); }
/** * Returns a set of entries from the map. * Each entry is a pair consisted of the property name * and the corresponding list of listeners. * * @return a set of entries from the map */
Returns a set of entries from the map. Each entry is a pair consisted of the property name and the corresponding list of listeners
getEntries
{ "repo_name": "greghaskins/openjdk-jdk7u-jdk", "path": "src/share/classes/java/beans/ChangeListenerMap.java", "license": "gpl-2.0", "size": 8066 }
[ "java.util.Collections", "java.util.Map", "java.util.Set" ]
import java.util.Collections; import java.util.Map; import java.util.Set;
import java.util.*;
[ "java.util" ]
java.util;
1,681,600
public Subject newSubject(final NameID nameId, final NameID subjectConfNameId, final String recipient, final ZonedDateTime notOnOrAfter, final String inResponseTo, final ZonedDateTime notBefore) { LOGGER.debug("Building subject for NameID [{}] and recipient [{}], in response to [{}]", nameId, recipient, inResponseTo); val confirmation = newSamlObject(SubjectConfirmation.class); confirmation.setMethod(SubjectConfirmation.METHOD_BEARER); val data = newSamlObject(SubjectConfirmationData.class); if (StringUtils.isNotBlank(recipient)) { data.setRecipient(recipient); } if (notOnOrAfter != null) { data.setNotOnOrAfter(notOnOrAfter.toInstant()); } if (StringUtils.isNotBlank(inResponseTo)) { data.setInResponseTo(inResponseTo); val ip = InetAddressUtils.getByName(inResponseTo); if (ip != null) { data.setAddress(ip.getHostName()); } } if (notBefore != null) { data.setNotBefore(notBefore.toInstant()); } confirmation.setSubjectConfirmationData(data); val subject = newSamlObject(Subject.class); if (nameId != null) { subject.setNameID(nameId); if (subjectConfNameId != null) { confirmation.setNameID(subjectConfNameId); } subject.setEncryptedID(null); confirmation.setEncryptedID(null); } subject.getSubjectConfirmations().add(confirmation); LOGGER.debug("Built subject [{}]", subject); return subject; }
Subject function(final NameID nameId, final NameID subjectConfNameId, final String recipient, final ZonedDateTime notOnOrAfter, final String inResponseTo, final ZonedDateTime notBefore) { LOGGER.debug(STR, nameId, recipient, inResponseTo); val confirmation = newSamlObject(SubjectConfirmation.class); confirmation.setMethod(SubjectConfirmation.METHOD_BEARER); val data = newSamlObject(SubjectConfirmationData.class); if (StringUtils.isNotBlank(recipient)) { data.setRecipient(recipient); } if (notOnOrAfter != null) { data.setNotOnOrAfter(notOnOrAfter.toInstant()); } if (StringUtils.isNotBlank(inResponseTo)) { data.setInResponseTo(inResponseTo); val ip = InetAddressUtils.getByName(inResponseTo); if (ip != null) { data.setAddress(ip.getHostName()); } } if (notBefore != null) { data.setNotBefore(notBefore.toInstant()); } confirmation.setSubjectConfirmationData(data); val subject = newSamlObject(Subject.class); if (nameId != null) { subject.setNameID(nameId); if (subjectConfNameId != null) { confirmation.setNameID(subjectConfNameId); } subject.setEncryptedID(null); confirmation.setEncryptedID(null); } subject.getSubjectConfirmations().add(confirmation); LOGGER.debug(STR, subject); return subject; }
/** * New subject element. * * @param nameId the nameId * @param subjectConfNameId the subject conf name id * @param recipient the recipient * @param notOnOrAfter the not on or after * @param inResponseTo the in response to * @param notBefore the not before * @return the subject */
New subject element
newSubject
{ "repo_name": "pdrados/cas", "path": "support/cas-server-support-saml-core-api/src/main/java/org/apereo/cas/support/saml/util/AbstractSaml20ObjectBuilder.java", "license": "apache-2.0", "size": 20166 }
[ "java.time.ZonedDateTime", "org.apache.commons.lang3.StringUtils", "org.apereo.cas.util.InetAddressUtils", "org.opensaml.saml.saml2.core.NameID", "org.opensaml.saml.saml2.core.Subject", "org.opensaml.saml.saml2.core.SubjectConfirmation", "org.opensaml.saml.saml2.core.SubjectConfirmationData" ]
import java.time.ZonedDateTime; import org.apache.commons.lang3.StringUtils; import org.apereo.cas.util.InetAddressUtils; import org.opensaml.saml.saml2.core.NameID; import org.opensaml.saml.saml2.core.Subject; import org.opensaml.saml.saml2.core.SubjectConfirmation; import org.opensaml.saml.saml2.core.SubjectConfirmationData;
import java.time.*; import org.apache.commons.lang3.*; import org.apereo.cas.util.*; import org.opensaml.saml.saml2.core.*;
[ "java.time", "org.apache.commons", "org.apereo.cas", "org.opensaml.saml" ]
java.time; org.apache.commons; org.apereo.cas; org.opensaml.saml;
37,905
@Override public void receiveMessage(VoidMessage message) { }
void function(VoidMessage message) { }
/** * This method accepts message from network * * @param message */
This method accepts message from network
receiveMessage
{ "repo_name": "huitseeker/nd4j", "path": "nd4j-parameter-server-parent/nd4j-parameter-server-node/src/main/java/org/nd4j/parameterserver/distributed/transport/LocalTransport.java", "license": "apache-2.0", "size": 3457 }
[ "org.nd4j.parameterserver.distributed.messages.VoidMessage" ]
import org.nd4j.parameterserver.distributed.messages.VoidMessage;
import org.nd4j.parameterserver.distributed.messages.*;
[ "org.nd4j.parameterserver" ]
org.nd4j.parameterserver;
2,876,884
public SnmpValue set(SnmpValue x, long var, Object data) throws SnmpStatusException { switch((int)var) { case 9: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 8: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 7: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 6: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 5: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 4: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 3: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 11: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 2: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 10: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 1: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); default: break; } throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); }
SnmpValue function(SnmpValue x, long var, Object data) throws SnmpStatusException { switch((int)var) { case 9: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 8: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 7: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 6: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 5: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 4: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 3: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 11: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 2: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 10: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); case 1: throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); default: break; } throw new SnmpStatusException(SnmpStatusException.snmpRspNotWritable); }
/** * Set the value of a scalar variable */
Set the value of a scalar variable
set
{ "repo_name": "TheTypoMaster/Scaper", "path": "openjdk/jdk/src/share/classes/sun/management/snmp/jvmmib/JvmThreadInstanceEntryMeta.java", "license": "gpl-2.0", "size": 12007 }
[ "com.sun.jmx.snmp.SnmpStatusException", "com.sun.jmx.snmp.SnmpValue" ]
import com.sun.jmx.snmp.SnmpStatusException; import com.sun.jmx.snmp.SnmpValue;
import com.sun.jmx.snmp.*;
[ "com.sun.jmx" ]
com.sun.jmx;
750,718
public void startRecording(String file) { if (this.mPlayer != null) { Log.d(LOG_TAG, "AudioPlayer Error: Can't record in play mode."); this.handler.sendJavascript("PhoneGap.Media.onStatus('" + this.id + "', "+MEDIA_ERROR+", "+MEDIA_ERROR_PLAY_MODE_SET+");"); } // Make sure we're not already recording else if (this.recorder == null) { this.audioFile = file; this.recorder = new MediaRecorder(); this.recorder.setAudioSource(MediaRecorder.AudioSource.MIC); this.recorder.setOutputFormat(MediaRecorder.OutputFormat.DEFAULT); // THREE_GPP); this.recorder.setAudioEncoder(MediaRecorder.AudioEncoder.DEFAULT); //AMR_NB); this.recorder.setOutputFile(this.tempFile); try { this.recorder.prepare(); this.recorder.start(); this.setState(MEDIA_RUNNING); return; } catch (IllegalStateException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } this.handler.sendJavascript("PhoneGap.Media.onStatus('" + this.id + "', "+MEDIA_ERROR+", "+MEDIA_ERROR_STARTING_RECORDING+");"); } else { Log.d(LOG_TAG, "AudioPlayer Error: Already recording."); this.handler.sendJavascript("PhoneGap.Media.onStatus('" + this.id + "', "+MEDIA_ERROR+", "+MEDIA_ERROR_ALREADY_RECORDING+");"); } }
void function(String file) { if (this.mPlayer != null) { Log.d(LOG_TAG, STR); this.handler.sendJavascript(STR + this.id + STR+MEDIA_ERROR+STR+MEDIA_ERROR_PLAY_MODE_SET+");"); } else if (this.recorder == null) { this.audioFile = file; this.recorder = new MediaRecorder(); this.recorder.setAudioSource(MediaRecorder.AudioSource.MIC); this.recorder.setOutputFormat(MediaRecorder.OutputFormat.DEFAULT); this.recorder.setAudioEncoder(MediaRecorder.AudioEncoder.DEFAULT); this.recorder.setOutputFile(this.tempFile); try { this.recorder.prepare(); this.recorder.start(); this.setState(MEDIA_RUNNING); return; } catch (IllegalStateException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } this.handler.sendJavascript(STR + this.id + STR+MEDIA_ERROR+STR+MEDIA_ERROR_STARTING_RECORDING+");"); } else { Log.d(LOG_TAG, STR); this.handler.sendJavascript(STR + this.id + STR+MEDIA_ERROR+STR+MEDIA_ERROR_ALREADY_RECORDING+");"); } }
/** * Start recording the specified file. * * @param file The name of the file */
Start recording the specified file
startRecording
{ "repo_name": "kmshi/android", "path": "appMobiLib/src/com/phonegap/AudioPlayer.java", "license": "mit", "size": 12790 }
[ "android.media.MediaRecorder", "android.util.Log", "java.io.IOException" ]
import android.media.MediaRecorder; import android.util.Log; import java.io.IOException;
import android.media.*; import android.util.*; import java.io.*;
[ "android.media", "android.util", "java.io" ]
android.media; android.util; java.io;
2,628,610
@Override public IExpr mapMatrixColumns(int[] dim, Function<IExpr, IExpr> f);
IExpr function(int[] dim, Function<IExpr, IExpr> f);
/** * This method assumes that <code>this</code> is a list of list in matrix form. It combines the * column values in a list as argument for the given <code>function</code>. <b>Example</b> a * matrix <code>{{x1, y1,...}, {x2, y2, ...}, ...}</code> will be converted to <code> * {f.apply({x1, x2,...}), f.apply({y1, y2, ...}), ...}</code> * * @param dim the dimension of the matrix * @param f a unary function * @return */
This method assumes that <code>this</code> is a list of list in matrix form. It combines the column values in a list as argument for the given <code>function</code>. Example a matrix <code>{{x1, y1,...}, {x2, y2, ...}, ...}</code> will be converted to <code> {f.apply({x1, x2,...}), f.apply({y1, y2, ...}), ...}</code>
mapMatrixColumns
{ "repo_name": "axkr/symja_android_library", "path": "symja_android_library/matheclipse-core/src/main/java/org/matheclipse/core/interfaces/IAST.java", "license": "gpl-3.0", "size": 60090 }
[ "java.util.function.Function" ]
import java.util.function.Function;
import java.util.function.*;
[ "java.util" ]
java.util;
1,548,990
void doEviction(List<EntryCache> caches, long sizeToFree);
void doEviction(List<EntryCache> caches, long sizeToFree);
/** * Perform the cache eviction of at least sizeToFree bytes on the supplied list of caches. * * @param caches * the list of caches to consider * @param sizeToFree * the minimum size in bytes to be freed */
Perform the cache eviction of at least sizeToFree bytes on the supplied list of caches
doEviction
{ "repo_name": "saandrews/pulsar", "path": "managed-ledger/src/main/java/org/apache/bookkeeper/mledger/impl/EntryCacheEvictionPolicy.java", "license": "apache-2.0", "size": 1315 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,798,146
public Table getTable() { return (Table) super.get(TableColumn.PROPERTY.table.name()); }
Table function() { return (Table) super.get(TableColumn.PROPERTY.table.name()); }
/** * Returns the value of the <b>table</b> property. * * @return the value of the <b>table</b> property. */
Returns the value of the table property
getTable
{ "repo_name": "plasma-framework/plasma", "path": "plasma-provisioning/src/main/java/org/plasma/provisioning/rdb/mysql/v5_5/impl/TableColumnImpl.java", "license": "apache-2.0", "size": 21654 }
[ "org.plasma.provisioning.rdb.mysql.v5_5.Table", "org.plasma.provisioning.rdb.mysql.v5_5.TableColumn" ]
import org.plasma.provisioning.rdb.mysql.v5_5.Table; import org.plasma.provisioning.rdb.mysql.v5_5.TableColumn;
import org.plasma.provisioning.rdb.mysql.v5_5.*;
[ "org.plasma.provisioning" ]
org.plasma.provisioning;
667,099
private int[] evaluateIntervalLiteralAsSecond( RelDataTypeSystem typeSystem, int sign, String value, String originalValue, SqlParserPos pos) { BigDecimal second; BigDecimal secondFrac; boolean hasFractionalSecond; // validate as SECOND(startPrecision, fractionalSecondPrecision) // e.g. 'SS' or 'SS.SSS' // Note: must check two patterns, since fractional second is optional final int fractionalSecondPrecision = getFractionalSecondPrecision(typeSystem); String intervalPatternWithFracSec = "(\\d+)\\.(\\d{1," + fractionalSecondPrecision + "})"; String intervalPatternWithoutFracSec = "(\\d+)"; Matcher m = Pattern.compile(intervalPatternWithFracSec).matcher(value); if (m.matches()) { hasFractionalSecond = true; } else { m = Pattern.compile(intervalPatternWithoutFracSec).matcher(value); hasFractionalSecond = false; } if (m.matches()) { // Break out field values try { second = parseField(m, 1); } catch (NumberFormatException e) { throw invalidValueException(pos, originalValue); } if (hasFractionalSecond) { secondFrac = normalizeSecondFraction(castNonNull(m.group(2))); } else { secondFrac = ZERO; } // Validate individual fields checkLeadFieldInRange(typeSystem, sign, second, TimeUnit.SECOND, pos); if (!isFractionalSecondFieldInRange(secondFrac)) { throw invalidValueException(pos, originalValue); } // package values up for return return fillIntervalValueArray( sign, ZERO, ZERO, ZERO, second, secondFrac); } else { throw invalidValueException(pos, originalValue); } }
int[] function( RelDataTypeSystem typeSystem, int sign, String value, String originalValue, SqlParserPos pos) { BigDecimal second; BigDecimal secondFrac; boolean hasFractionalSecond; final int fractionalSecondPrecision = getFractionalSecondPrecision(typeSystem); String intervalPatternWithFracSec = STR + fractionalSecondPrecision + "})"; String intervalPatternWithoutFracSec = STR; Matcher m = Pattern.compile(intervalPatternWithFracSec).matcher(value); if (m.matches()) { hasFractionalSecond = true; } else { m = Pattern.compile(intervalPatternWithoutFracSec).matcher(value); hasFractionalSecond = false; } if (m.matches()) { try { second = parseField(m, 1); } catch (NumberFormatException e) { throw invalidValueException(pos, originalValue); } if (hasFractionalSecond) { secondFrac = normalizeSecondFraction(castNonNull(m.group(2))); } else { secondFrac = ZERO; } checkLeadFieldInRange(typeSystem, sign, second, TimeUnit.SECOND, pos); if (!isFractionalSecondFieldInRange(secondFrac)) { throw invalidValueException(pos, originalValue); } return fillIntervalValueArray( sign, ZERO, ZERO, ZERO, second, secondFrac); } else { throw invalidValueException(pos, originalValue); } }
/** * Validates an INTERVAL literal against an SECOND interval qualifier. * * @throws org.apache.calcite.runtime.CalciteContextException if the interval * value is illegal */
Validates an INTERVAL literal against an SECOND interval qualifier
evaluateIntervalLiteralAsSecond
{ "repo_name": "datametica/calcite", "path": "core/src/main/java/org/apache/calcite/sql/SqlIntervalQualifier.java", "license": "apache-2.0", "size": 37956 }
[ "java.math.BigDecimal", "java.util.regex.Matcher", "java.util.regex.Pattern", "org.apache.calcite.avatica.util.TimeUnit", "org.apache.calcite.rel.type.RelDataTypeSystem", "org.apache.calcite.sql.parser.SqlParserPos" ]
import java.math.BigDecimal; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.calcite.avatica.util.TimeUnit; import org.apache.calcite.rel.type.RelDataTypeSystem; import org.apache.calcite.sql.parser.SqlParserPos;
import java.math.*; import java.util.regex.*; import org.apache.calcite.avatica.util.*; import org.apache.calcite.rel.type.*; import org.apache.calcite.sql.parser.*;
[ "java.math", "java.util", "org.apache.calcite" ]
java.math; java.util; org.apache.calcite;
97,495
public DatagramChannel getChannel() { return null; }
DatagramChannel function() { return null; }
/** * Returns the datagram channel assoziated with this datagram socket. * * @return The associated <code>DatagramChannel</code> object or null * * @since 1.4 */
Returns the datagram channel assoziated with this datagram socket
getChannel
{ "repo_name": "aosm/gcc_40", "path": "libjava/java/net/DatagramSocket.java", "license": "gpl-2.0", "size": 25265 }
[ "java.nio.channels.DatagramChannel" ]
import java.nio.channels.DatagramChannel;
import java.nio.channels.*;
[ "java.nio" ]
java.nio;
2,250,936
@Test public void testGetRISAsFile() { try (Response response = target(urls.path(RECORDS_RECORD, RECORDS_RIS_FILE).params(PI).build()) .request() .get()) { assertEquals("Should return status 200", 200, response.getStatus()); assertNotNull("Should return user object as json", response.getEntity()); String entity = response.readEntity(String.class); assertTrue(entity.contains("TY - BOOK")); assertTrue(entity.contains("CN - 74241")); String fileName = PI + "_LOG_0000.ris"; assertEquals("attachment; filename=\"" + fileName + "\"", response.getHeaderString("Content-Disposition")); } }
void function() { try (Response response = target(urls.path(RECORDS_RECORD, RECORDS_RIS_FILE).params(PI).build()) .request() .get()) { assertEquals(STR, 200, response.getStatus()); assertNotNull(STR, response.getEntity()); String entity = response.readEntity(String.class); assertTrue(entity.contains(STR)); assertTrue(entity.contains(STR)); String fileName = PI + STR; assertEquals(STRSTR\STRContent-Disposition")); } }
/** * Test method for {@link io.goobi.viewer.api.rest.v1.records.RecordResource#getRISAsFile()}. */
Test method for <code>io.goobi.viewer.api.rest.v1.records.RecordResource#getRISAsFile()</code>
testGetRISAsFile
{ "repo_name": "intranda/goobi-viewer-core", "path": "goobi-viewer-core/src/test/java/io/goobi/viewer/api/rest/v1/records/RecordResourceTest.java", "license": "gpl-2.0", "size": 11345 }
[ "javax.ws.rs.core.Response", "org.junit.Assert" ]
import javax.ws.rs.core.Response; import org.junit.Assert;
import javax.ws.rs.core.*; import org.junit.*;
[ "javax.ws", "org.junit" ]
javax.ws; org.junit;
2,425,520
private short getFileDescriptor(final String filePathName) throws GjokiiException { final byte[] fileNameBytes = Utils.stringToBytes(filePathName, true); final byte[] getFileID = Utils.appendToByteArray(GET_FILE_ID, fileNameBytes); send((byte) 0x6d, getFileID); final byte[] result = receive(); final byte[] fileDescriptor = Utils.subByteArray(result, 14, 2); final short fileDesc = Utils.byteArrayToShort(fileDescriptor, 0); return fileDesc; }
short function(final String filePathName) throws GjokiiException { final byte[] fileNameBytes = Utils.stringToBytes(filePathName, true); final byte[] getFileID = Utils.appendToByteArray(GET_FILE_ID, fileNameBytes); send((byte) 0x6d, getFileID); final byte[] result = receive(); final byte[] fileDescriptor = Utils.subByteArray(result, 14, 2); final short fileDesc = Utils.byteArrayToShort(fileDescriptor, 0); return fileDesc; }
/** * Get a file descriptor. * * @param filePathName the file to get, make sure the entry exists and is a * file by using getFileInfo first * @return the file descriptor */
Get a file descriptor
getFileDescriptor
{ "repo_name": "mariotaku/anokicert", "path": "src/net/tuxed/gjokii/Gjokii.java", "license": "gpl-3.0", "size": 22660 }
[ "net.tuxed.misc.Utils" ]
import net.tuxed.misc.Utils;
import net.tuxed.misc.*;
[ "net.tuxed.misc" ]
net.tuxed.misc;
2,397,586
public void testUnknownObjectFieldnameExpection() throws IOException { { IllegalArgumentException e = expectParseThrows(IllegalArgumentException.class, "{\n" + " \"bad_fieldname\" : { \"field\" : \"value\" }\n \n" + "}\n"); assertEquals("[highlight] unknown field [bad_fieldname], parser not found", e.getMessage()); } { ParsingException e = expectParseThrows(ParsingException.class, "{\n" + " \"fields\" : {\n" + " \"body\" : {\n" + " \"bad_fieldname\" : { \"field\" : \"value\" }\n" + " }\n" + " }\n" + "}\n"); assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); assertEquals("[fields] failed to parse field [body]", e.getCause().getMessage()); assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); } }
void function() throws IOException { { IllegalArgumentException e = expectParseThrows(IllegalArgumentException.class, "{\n" + STRbad_fieldname\STRfield\STRvalue\STR + "}\n"); assertEquals(STR, e.getMessage()); } { ParsingException e = expectParseThrows(ParsingException.class, "{\n" + STRfields\STR + STRbody\STR + STRbad_fieldname\STRfield\STRvalue\STR + STR + STR + "}\n"); assertEquals(STR, e.getMessage()); assertEquals(STR, e.getCause().getMessage()); assertEquals(STR, e.getCause().getCause().getMessage()); } }
/** * test that unknown field name cause exception */
test that unknown field name cause exception
testUnknownObjectFieldnameExpection
{ "repo_name": "MaineC/elasticsearch", "path": "core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java", "license": "apache-2.0", "size": 33905 }
[ "java.io.IOException", "org.elasticsearch.common.ParsingException" ]
import java.io.IOException; import org.elasticsearch.common.ParsingException;
import java.io.*; import org.elasticsearch.common.*;
[ "java.io", "org.elasticsearch.common" ]
java.io; org.elasticsearch.common;
2,789,478
OpenCmsTestProperties.initialize(org.opencms.test.AllTests.TEST_PROPERTIES_PATH); TestSuite suite = new TestSuite(); suite.setName(TestLoginAndPasswordHandler.class.getName()); suite.addTest(new TestLoginAndPasswordHandler("testSCrypt")); suite.addTest(new TestLoginAndPasswordHandler("testUserDefaultPasswords")); suite.addTest(new TestLoginAndPasswordHandler("testCheckPasswordDigest")); suite.addTest(new TestLoginAndPasswordHandler("testPasswordConvesion")); suite.addTest(new TestLoginAndPasswordHandler("testLoginUser")); suite.addTest(new TestLoginAndPasswordHandler("testLoginMessage")); suite.addTest(new TestLoginAndPasswordHandler("testPasswordValidation")); suite.addTest(new TestLoginAndPasswordHandler("testSetResetPassword")); TestSetup wrapper = new TestSetup(suite) {
OpenCmsTestProperties.initialize(org.opencms.test.AllTests.TEST_PROPERTIES_PATH); TestSuite suite = new TestSuite(); suite.setName(TestLoginAndPasswordHandler.class.getName()); suite.addTest(new TestLoginAndPasswordHandler(STR)); suite.addTest(new TestLoginAndPasswordHandler(STR)); suite.addTest(new TestLoginAndPasswordHandler(STR)); suite.addTest(new TestLoginAndPasswordHandler(STR)); suite.addTest(new TestLoginAndPasswordHandler(STR)); suite.addTest(new TestLoginAndPasswordHandler(STR)); suite.addTest(new TestLoginAndPasswordHandler(STR)); suite.addTest(new TestLoginAndPasswordHandler(STR)); TestSetup wrapper = new TestSetup(suite) {
/** * Test suite for this test class.<p> * * @return the test suite */
Test suite for this test class
suite
{ "repo_name": "alkacon/opencms-core", "path": "test/org/opencms/security/TestLoginAndPasswordHandler.java", "license": "lgpl-2.1", "size": 19685 }
[ "junit.extensions.TestSetup", "junit.framework.TestSuite", "org.opencms.test.OpenCmsTestProperties" ]
import junit.extensions.TestSetup; import junit.framework.TestSuite; import org.opencms.test.OpenCmsTestProperties;
import junit.extensions.*; import junit.framework.*; import org.opencms.test.*;
[ "junit.extensions", "junit.framework", "org.opencms.test" ]
junit.extensions; junit.framework; org.opencms.test;
661,383
public PartitionRuntimeState createPartitionStateInternal() { lock.lock(); try { if (!partitionStateManager.isInitialized()) { return null; } List<MigrationInfo> completedMigrations = migrationManager.getCompletedMigrationsCopy(); InternalPartition[] partitions = partitionStateManager.getPartitions(); PartitionRuntimeState state = new PartitionRuntimeState(partitions, completedMigrations, getPartitionStateVersion()); state.setActiveMigration(migrationManager.getActiveMigration()); return state; } finally { lock.unlock(); } }
PartitionRuntimeState function() { lock.lock(); try { if (!partitionStateManager.isInitialized()) { return null; } List<MigrationInfo> completedMigrations = migrationManager.getCompletedMigrationsCopy(); InternalPartition[] partitions = partitionStateManager.getPartitions(); PartitionRuntimeState state = new PartitionRuntimeState(partitions, completedMigrations, getPartitionStateVersion()); state.setActiveMigration(migrationManager.getActiveMigration()); return state; } finally { lock.unlock(); } }
/** * Returns a copy of the partition table or {@code null} if not initialized. This method will acquire the partition service * lock. */
Returns a copy of the partition table or null if not initialized. This method will acquire the partition service lock
createPartitionStateInternal
{ "repo_name": "emrahkocaman/hazelcast", "path": "hazelcast/src/main/java/com/hazelcast/internal/partition/impl/InternalPartitionServiceImpl.java", "license": "apache-2.0", "size": 53026 }
[ "com.hazelcast.internal.partition.InternalPartition", "com.hazelcast.internal.partition.MigrationInfo", "com.hazelcast.internal.partition.PartitionRuntimeState", "java.util.List" ]
import com.hazelcast.internal.partition.InternalPartition; import com.hazelcast.internal.partition.MigrationInfo; import com.hazelcast.internal.partition.PartitionRuntimeState; import java.util.List;
import com.hazelcast.internal.partition.*; import java.util.*;
[ "com.hazelcast.internal", "java.util" ]
com.hazelcast.internal; java.util;
403,845
private void declareVar(Node n) { Preconditions.checkState(n.isName()); CompilerInput input = compiler.getInput(inputId); String name = n.getString(); if (scope.isDeclared(name, false) || (scope.isLocal() && name.equals(ARGUMENTS))) { redeclarationHandler.onRedeclaration( scope, name, n, input); } else { scope.declare(name, n, null, input); } }
void function(Node n) { Preconditions.checkState(n.isName()); CompilerInput input = compiler.getInput(inputId); String name = n.getString(); if (scope.isDeclared(name, false) (scope.isLocal() && name.equals(ARGUMENTS))) { redeclarationHandler.onRedeclaration( scope, name, n, input); } else { scope.declare(name, n, null, input); } }
/** * Declares a variable. * * @param n The node corresponding to the variable name. */
Declares a variable
declareVar
{ "repo_name": "abdullah38rcc/closure-compiler", "path": "src/com/google/javascript/jscomp/SyntacticScopeCreator.java", "license": "apache-2.0", "size": 8786 }
[ "com.google.common.base.Preconditions", "com.google.javascript.rhino.Node" ]
import com.google.common.base.Preconditions; import com.google.javascript.rhino.Node;
import com.google.common.base.*; import com.google.javascript.rhino.*;
[ "com.google.common", "com.google.javascript" ]
com.google.common; com.google.javascript;
1,366,798
public static <E> Queue<E> unmodifiableQueue(final Queue<? extends E> queue) { return UnmodifiableQueue.unmodifiableQueue(queue); }
static <E> Queue<E> function(final Queue<? extends E> queue) { return UnmodifiableQueue.unmodifiableQueue(queue); }
/** * Returns an unmodifiable queue backed by the given queue. * * @param <E> the type of the elements in the queue * @param queue the queue to make unmodifiable, must not be null * @return an unmodifiable queue backed by that queue * @throws NullPointerException if the queue is null */
Returns an unmodifiable queue backed by the given queue
unmodifiableQueue
{ "repo_name": "apache/commons-collections", "path": "src/main/java/org/apache/commons/collections4/QueueUtils.java", "license": "apache-2.0", "size": 5482 }
[ "java.util.Queue", "org.apache.commons.collections4.queue.UnmodifiableQueue" ]
import java.util.Queue; import org.apache.commons.collections4.queue.UnmodifiableQueue;
import java.util.*; import org.apache.commons.collections4.queue.*;
[ "java.util", "org.apache.commons" ]
java.util; org.apache.commons;
1,366,687
synchronized (INSTANCE_ID_FILE) { stop(); instance = new VersionCheck(); Common.timer.schedule(instance); } }
synchronized (INSTANCE_ID_FILE) { stop(); instance = new VersionCheck(); Common.timer.schedule(instance); } }
/** * This method will set up the version checking job. It assumes that the * corresponding system setting for running this job is true. */
This method will set up the version checking job. It assumes that the corresponding system setting for running this job is true
start
{ "repo_name": "sdtabilit/Scada-LTS", "path": "src/com/serotonin/mango/rt/maint/VersionCheck.java", "license": "gpl-2.0", "size": 8899 }
[ "com.serotonin.mango.Common" ]
import com.serotonin.mango.Common;
import com.serotonin.mango.*;
[ "com.serotonin.mango" ]
com.serotonin.mango;
650,766
public final Set<String> getFilterPath(final boolean isServlet, final ServletContext servletContext, final String filterName) { InputStream is = servletContext.getResourceAsStream("/WEB-INF/web.xml"); if (is != null) { try { return getFilterPath(isServlet, filterName, is); } catch (ParserConfigurationException | SAXException | IOException ex) { log.error("Error reading servlet/filter path from web.xml", ex); } catch (SecurityException e) { // Swallow this at INFO. log.info("Couldn't read web.xml to automatically pick up servlet/filter path: " + e.getMessage()); } } return Collections.emptySet(); }
final Set<String> function(final boolean isServlet, final ServletContext servletContext, final String filterName) { InputStream is = servletContext.getResourceAsStream(STR); if (is != null) { try { return getFilterPath(isServlet, filterName, is); } catch (ParserConfigurationException SAXException IOException ex) { log.error(STR, ex); } catch (SecurityException e) { log.info(STR + e.getMessage()); } } return Collections.emptySet(); }
/** * Gets Wicket filter path via ServletContext and the filter name * * @param isServlet * true if Servlet, false if Filter * @param servletContext * @param filterName * @return Filter paths retrieved from "url-pattern" */
Gets Wicket filter path via ServletContext and the filter name
getFilterPath
{ "repo_name": "dashorst/wicket", "path": "wicket-util/src/main/java/org/apache/wicket/util/file/WebXmlFile.java", "license": "apache-2.0", "size": 9548 }
[ "java.io.IOException", "java.io.InputStream", "java.util.Collections", "java.util.Set", "javax.servlet.ServletContext", "javax.xml.parsers.ParserConfigurationException", "org.xml.sax.SAXException" ]
import java.io.IOException; import java.io.InputStream; import java.util.Collections; import java.util.Set; import javax.servlet.ServletContext; import javax.xml.parsers.ParserConfigurationException; import org.xml.sax.SAXException;
import java.io.*; import java.util.*; import javax.servlet.*; import javax.xml.parsers.*; import org.xml.sax.*;
[ "java.io", "java.util", "javax.servlet", "javax.xml", "org.xml.sax" ]
java.io; java.util; javax.servlet; javax.xml; org.xml.sax;
32,551
public static List<NabuccoPropertyDescriptor> getPropertyDescriptorList() { return PropertyCache.getInstance().retrieve(TestConfigurationMsg.class).getAllProperties(); }
static List<NabuccoPropertyDescriptor> function() { return PropertyCache.getInstance().retrieve(TestConfigurationMsg.class).getAllProperties(); }
/** * Getter for the PropertyDescriptorList. * * @return the List<NabuccoPropertyDescriptor>. */
Getter for the PropertyDescriptorList
getPropertyDescriptorList
{ "repo_name": "NABUCCO/org.nabucco.testautomation.config", "path": "org.nabucco.testautomation.config.facade.message/src/main/gen/org/nabucco/testautomation/config/facade/message/TestConfigurationMsg.java", "license": "epl-1.0", "size": 7336 }
[ "java.util.List", "org.nabucco.framework.base.facade.datatype.property.NabuccoPropertyDescriptor", "org.nabucco.framework.base.facade.datatype.property.PropertyCache" ]
import java.util.List; import org.nabucco.framework.base.facade.datatype.property.NabuccoPropertyDescriptor; import org.nabucco.framework.base.facade.datatype.property.PropertyCache;
import java.util.*; import org.nabucco.framework.base.facade.datatype.property.*;
[ "java.util", "org.nabucco.framework" ]
java.util; org.nabucco.framework;
1,497,504
public PropertyValue getNodeProperty(int version, String path, QName name) { if (path == null || name == null) { throw new AVMBadArgumentException("Illegal null argument."); } return fAVMRepository.getNodeProperty(version, path, name); }
PropertyValue function(int version, String path, QName name) { if (path == null name == null) { throw new AVMBadArgumentException(STR); } return fAVMRepository.getNodeProperty(version, path, name); }
/** * Get a property of a node by QName. * @param version The version to look under. * @param path The path to the node. * @param name The QName. * @return The PropertyValue or null if it doesn't exist. */
Get a property of a node by QName
getNodeProperty
{ "repo_name": "loftuxab/community-edition-old", "path": "projects/repository/source/java/org/alfresco/repo/avm/AVMServiceImpl.java", "license": "lgpl-3.0", "size": 59118 }
[ "org.alfresco.repo.domain.PropertyValue", "org.alfresco.service.cmr.avm.AVMBadArgumentException", "org.alfresco.service.namespace.QName" ]
import org.alfresco.repo.domain.PropertyValue; import org.alfresco.service.cmr.avm.AVMBadArgumentException; import org.alfresco.service.namespace.QName;
import org.alfresco.repo.domain.*; import org.alfresco.service.cmr.avm.*; import org.alfresco.service.namespace.*;
[ "org.alfresco.repo", "org.alfresco.service" ]
org.alfresco.repo; org.alfresco.service;
118,896
public boolean matches(InventoryCrafting par1InventoryCrafting, World par2World) { ItemStack itemstack = null; ArrayList arraylist = new ArrayList(); for (int i = 0; i < par1InventoryCrafting.getSizeInventory(); ++i) { ItemStack itemstack1 = par1InventoryCrafting.getStackInSlot(i); if (itemstack1 != null) { if (itemstack1.getItem() instanceof ItemArmor) { ItemArmor itemarmor = (ItemArmor)itemstack1.getItem(); if (itemarmor.getArmorMaterial() != EnumArmorMaterial.CLOTH || itemstack != null) { return false; } itemstack = itemstack1; } else { if (itemstack1.itemID != Item.dyePowder.itemID) { return false; } arraylist.add(itemstack1); } } } return itemstack != null && !arraylist.isEmpty(); }
boolean function(InventoryCrafting par1InventoryCrafting, World par2World) { ItemStack itemstack = null; ArrayList arraylist = new ArrayList(); for (int i = 0; i < par1InventoryCrafting.getSizeInventory(); ++i) { ItemStack itemstack1 = par1InventoryCrafting.getStackInSlot(i); if (itemstack1 != null) { if (itemstack1.getItem() instanceof ItemArmor) { ItemArmor itemarmor = (ItemArmor)itemstack1.getItem(); if (itemarmor.getArmorMaterial() != EnumArmorMaterial.CLOTH itemstack != null) { return false; } itemstack = itemstack1; } else { if (itemstack1.itemID != Item.dyePowder.itemID) { return false; } arraylist.add(itemstack1); } } } return itemstack != null && !arraylist.isEmpty(); }
/** * Used to check if a recipe matches current crafting inventory */
Used to check if a recipe matches current crafting inventory
matches
{ "repo_name": "wildex999/stjerncraft_mcpc", "path": "src/minecraft/net/minecraft/item/crafting/RecipesArmorDyes.java", "license": "gpl-3.0", "size": 5157 }
[ "java.util.ArrayList", "net.minecraft.inventory.InventoryCrafting", "net.minecraft.item.EnumArmorMaterial", "net.minecraft.item.Item", "net.minecraft.item.ItemArmor", "net.minecraft.item.ItemStack", "net.minecraft.world.World" ]
import java.util.ArrayList; import net.minecraft.inventory.InventoryCrafting; import net.minecraft.item.EnumArmorMaterial; import net.minecraft.item.Item; import net.minecraft.item.ItemArmor; import net.minecraft.item.ItemStack; import net.minecraft.world.World;
import java.util.*; import net.minecraft.inventory.*; import net.minecraft.item.*; import net.minecraft.world.*;
[ "java.util", "net.minecraft.inventory", "net.minecraft.item", "net.minecraft.world" ]
java.util; net.minecraft.inventory; net.minecraft.item; net.minecraft.world;
1,016,002
public void loginWith3Pid(final String medium, final String address, final String password, final String deviceName, @Nullable final String deviceId, final ApiCallback<Credentials> callback) { final String description = "loginWith3pid : " + address; PasswordLoginParams params = new PasswordLoginParams(); params.setThirdPartyIdentifier(medium, address, password); params.setDeviceName(deviceName); params.setDeviceId(deviceId); login(params, callback, description); }
void function(final String medium, final String address, final String password, final String deviceName, @Nullable final String deviceId, final ApiCallback<Credentials> callback) { final String description = STR + address; PasswordLoginParams params = new PasswordLoginParams(); params.setThirdPartyIdentifier(medium, address, password); params.setDeviceName(deviceName); params.setDeviceId(deviceId); login(params, callback, description); }
/** * Attempt to login with 3pid/password * * @param medium the medium of the 3pid * @param address the address of the 3pid * @param password the password * @param deviceName the device name * @param deviceId the device id, used for e2e encryption * @param callback the callback success and failure callback */
Attempt to login with 3pid/password
loginWith3Pid
{ "repo_name": "matrix-org/matrix-android-sdk", "path": "matrix-sdk/src/main/java/org/matrix/androidsdk/rest/client/LoginRestClient.java", "license": "apache-2.0", "size": 16397 }
[ "androidx.annotation.Nullable", "org.matrix.androidsdk.core.callback.ApiCallback", "org.matrix.androidsdk.rest.model.login.Credentials", "org.matrix.androidsdk.rest.model.login.PasswordLoginParams" ]
import androidx.annotation.Nullable; import org.matrix.androidsdk.core.callback.ApiCallback; import org.matrix.androidsdk.rest.model.login.Credentials; import org.matrix.androidsdk.rest.model.login.PasswordLoginParams;
import androidx.annotation.*; import org.matrix.androidsdk.core.callback.*; import org.matrix.androidsdk.rest.model.login.*;
[ "androidx.annotation", "org.matrix.androidsdk" ]
androidx.annotation; org.matrix.androidsdk;
2,441,854
boolean execute(final PlayerPoints plugin, final CommandSender sender, final Command command, final String label, String[] args, EnumMap<Flag, String> info);
boolean execute(final PlayerPoints plugin, final CommandSender sender, final Command command, final String label, String[] args, EnumMap<Flag, String> info);
/** * Execution method for the command. * * @param sender * - Sender of the command. * @param command * - Command used. * @param label * - Label. * @param args * - Command arguments. * @return True if valid command and executed. Else false. */
Execution method for the command
execute
{ "repo_name": "Mitsugaru/PlayerPoints", "path": "src/org/black_ixx/playerpoints/services/PointsCommand.java", "license": "gpl-3.0", "size": 879 }
[ "java.util.EnumMap", "org.black_ixx.playerpoints.PlayerPoints", "org.black_ixx.playerpoints.models.Flag", "org.bukkit.command.Command", "org.bukkit.command.CommandSender" ]
import java.util.EnumMap; import org.black_ixx.playerpoints.PlayerPoints; import org.black_ixx.playerpoints.models.Flag; import org.bukkit.command.Command; import org.bukkit.command.CommandSender;
import java.util.*; import org.black_ixx.playerpoints.*; import org.black_ixx.playerpoints.models.*; import org.bukkit.command.*;
[ "java.util", "org.black_ixx.playerpoints", "org.bukkit.command" ]
java.util; org.black_ixx.playerpoints; org.bukkit.command;
376,371
// ---------------------------------------- // InputPort methods // ---------------------------------------- PortEntity createInputPort(Revision revision, String groupId, PortDTO inputPortDTO);
PortEntity createInputPort(Revision revision, String groupId, PortDTO inputPortDTO);
/** * Creates a new input port. * * @param revision revision * @param groupId The id of the group this port should be create in * @param inputPortDTO The input PortDTO * @return snapshot */
Creates a new input port
createInputPort
{ "repo_name": "WilliamNouet/nifi", "path": "nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/NiFiServiceFacade.java", "license": "apache-2.0", "size": 53848 }
[ "org.apache.nifi.web.api.dto.PortDTO", "org.apache.nifi.web.api.entity.PortEntity" ]
import org.apache.nifi.web.api.dto.PortDTO; import org.apache.nifi.web.api.entity.PortEntity;
import org.apache.nifi.web.api.dto.*; import org.apache.nifi.web.api.entity.*;
[ "org.apache.nifi" ]
org.apache.nifi;
125,551
public void attachToActivity(Activity activity, int slideStyle, boolean actionbarOverlay) { if (slideStyle != SLIDING_WINDOW && slideStyle != SLIDING_CONTENT) throw new IllegalArgumentException( "slideStyle must be either SLIDING_WINDOW or SLIDING_CONTENT"); if (getParent() != null) throw new IllegalStateException( "This SlidingMenu appears to already be attached"); // get the window background TypedArray a = activity.getTheme().obtainStyledAttributes(new int[] { android.R.attr.windowBackground }); int background = a.getResourceId(0, 0); a.recycle(); switch (slideStyle) { case SLIDING_WINDOW: mActionbarOverlay = false; ViewGroup decor = (ViewGroup) activity.getWindow().getDecorView(); ViewGroup decorChild = (ViewGroup) decor.getChildAt(0); // save ActionBar themes that have transparent assets decorChild.setBackgroundResource(background); decor.removeView(decorChild); decor.addView(this); setContent(decorChild); break; case SLIDING_CONTENT: mActionbarOverlay = actionbarOverlay; // take the above view out of ViewGroup contentParent = (ViewGroup) activity .findViewById(android.R.id.content); View content = contentParent.getChildAt(0); contentParent.removeView(content); contentParent.addView(this); setContent(content); // save people from having transparent backgrounds if (content.getBackground() == null) content.setBackgroundResource(background); break; } }
void function(Activity activity, int slideStyle, boolean actionbarOverlay) { if (slideStyle != SLIDING_WINDOW && slideStyle != SLIDING_CONTENT) throw new IllegalArgumentException( STR); if (getParent() != null) throw new IllegalStateException( STR); TypedArray a = activity.getTheme().obtainStyledAttributes(new int[] { android.R.attr.windowBackground }); int background = a.getResourceId(0, 0); a.recycle(); switch (slideStyle) { case SLIDING_WINDOW: mActionbarOverlay = false; ViewGroup decor = (ViewGroup) activity.getWindow().getDecorView(); ViewGroup decorChild = (ViewGroup) decor.getChildAt(0); decorChild.setBackgroundResource(background); decor.removeView(decorChild); decor.addView(this); setContent(decorChild); break; case SLIDING_CONTENT: mActionbarOverlay = actionbarOverlay; ViewGroup contentParent = (ViewGroup) activity .findViewById(android.R.id.content); View content = contentParent.getChildAt(0); contentParent.removeView(content); contentParent.addView(this); setContent(content); if (content.getBackground() == null) content.setBackgroundResource(background); break; } }
/** * Attaches the SlidingMenu to an entire Activity * * @param activity * the Activity * @param slideStyle * either SLIDING_CONTENT or SLIDING_WINDOW * @param actionbarOverlay * whether or not the ActionBar is overlaid */
Attaches the SlidingMenu to an entire Activity
attachToActivity
{ "repo_name": "ikantech/IkantechSupport", "path": "src/com/jeremyfeinstein/slidingmenu/lib/SlidingMenu.java", "license": "gpl-2.0", "size": 30080 }
[ "android.app.Activity", "android.content.res.TypedArray", "android.view.View", "android.view.ViewGroup" ]
import android.app.Activity; import android.content.res.TypedArray; import android.view.View; import android.view.ViewGroup;
import android.app.*; import android.content.res.*; import android.view.*;
[ "android.app", "android.content", "android.view" ]
android.app; android.content; android.view;
674,265
public void testNSStack() throws Exception { StringReader strReader3 = new StringReader(msg3); DeserializationContext dser = new DeserializationContext( new InputSource(strReader3), null, org.apache.axis.Message.REQUEST); dser.parse(); org.apache.axis.message.SOAPEnvelope env = dser.getEnvelope(); String xml = env.toString(); boolean oldIgnore = XMLUnit.getIgnoreWhitespace(); XMLUnit.setIgnoreWhitespace(true); try { assertXMLEqual(xml,msg3); } finally { XMLUnit.setIgnoreWhitespace(oldIgnore); } }
void function() throws Exception { StringReader strReader3 = new StringReader(msg3); DeserializationContext dser = new DeserializationContext( new InputSource(strReader3), null, org.apache.axis.Message.REQUEST); dser.parse(); org.apache.axis.message.SOAPEnvelope env = dser.getEnvelope(); String xml = env.toString(); boolean oldIgnore = XMLUnit.getIgnoreWhitespace(); XMLUnit.setIgnoreWhitespace(true); try { assertXMLEqual(xml,msg3); } finally { XMLUnit.setIgnoreWhitespace(oldIgnore); } }
/** * Test for Bug 22980 * @throws Exception */
Test for Bug 22980
testNSStack
{ "repo_name": "apache/axis1-java", "path": "axis-rt-core/src/test/java/test/utils/TestXMLUtils.java", "license": "apache-2.0", "size": 17175 }
[ "java.io.StringReader", "javax.xml.soap.SOAPEnvelope", "org.apache.axis.encoding.DeserializationContext", "org.custommonkey.xmlunit.XMLUnit", "org.xml.sax.InputSource" ]
import java.io.StringReader; import javax.xml.soap.SOAPEnvelope; import org.apache.axis.encoding.DeserializationContext; import org.custommonkey.xmlunit.XMLUnit; import org.xml.sax.InputSource;
import java.io.*; import javax.xml.soap.*; import org.apache.axis.encoding.*; import org.custommonkey.xmlunit.*; import org.xml.sax.*;
[ "java.io", "javax.xml", "org.apache.axis", "org.custommonkey.xmlunit", "org.xml.sax" ]
java.io; javax.xml; org.apache.axis; org.custommonkey.xmlunit; org.xml.sax;
1,133,832
@Override boolean equals(@CheckForNull Object object);
boolean equals(@CheckForNull Object object);
/** * Returns <em>true</em> if {@code object} is the same as this document; * <em>false</em> otherwise. * <p> * {@code object} is the same as this document <em>iff</em>: * <ul> * <li>{@code this == object} or</li> * <li>{@code object instanceof Map} <em>and</em></li> * <li>{@code this.entrySet().equals(object.entrySet())}.</li> * </ul> * </p> * * @param object the reference object with which to compare * @return <em>true</em> if {@code object} is the same as this document; * <em>false</em> otherwise */
Returns true if object is the same as this document; false otherwise. object is the same as this document iff: this == object or object instanceof Map and this.entrySet().equals(object.entrySet()).
equals
{ "repo_name": "kohanyirobert/ebson", "path": "src/main/java/com/github/kohanyirobert/ebson/BsonDocument.java", "license": "mit", "size": 7157 }
[ "javax.annotation.CheckForNull" ]
import javax.annotation.CheckForNull;
import javax.annotation.*;
[ "javax.annotation" ]
javax.annotation;
1,708,018
private void removeCircuitLookups(Chip c) { for (Location l : c.structure) structureLookupMap.remove(l); for (OutputPin o : c.outputPins) { outputPinLookupMap.remove(o.getLocation()); } for (InputPin i : c.inputPins) { inputPinLookupMap.remove(i.getLocation()); } List<Location> inputBlocksToRemove = new ArrayList<>(); for (Location l : sourceLookupMap.keySet()) { List<InputPin> pins = sourceLookupMap.get(l); List<InputPin> toRemove = new ArrayList<>(); for (InputPin pin : pins) { if (pin.getChip()==c) toRemove.add(pin); } pins.removeAll(toRemove); if (pins.isEmpty()) inputBlocksToRemove.add(l); } for (Location l : inputBlocksToRemove) sourceLookupMap.remove(l); List<Location> outputBlocksToRemove = new ArrayList<>(); for (Location l : outputLookupMap.keySet()) { List<OutputPin> pins = outputLookupMap.get(l); List<OutputPin> toRemove = new ArrayList<>(); for (OutputPin pin : pins) { if (pin.getChip()==c) toRemove.add(pin); } pins.removeAll(toRemove); if (pins.isEmpty()) outputBlocksToRemove.add(l); } for (Location l : outputBlocksToRemove) outputLookupMap.remove(l); activationLookupMap.remove(c.activationBlock); List<ChunkLocation> emptyChunks = new ArrayList<>(); for (ChunkLocation loc : c.chunks) { if (chunkLookupMap.containsKey(loc)) { List<Chip> ccircuits = chunkLookupMap.get(loc); if (ccircuits!=null) { ccircuits.remove(c); if (ccircuits.isEmpty()) emptyChunks.add(loc); } } } for (ChunkLocation loc : emptyChunks) chunkLookupMap.remove(loc); }
void function(Chip c) { for (Location l : c.structure) structureLookupMap.remove(l); for (OutputPin o : c.outputPins) { outputPinLookupMap.remove(o.getLocation()); } for (InputPin i : c.inputPins) { inputPinLookupMap.remove(i.getLocation()); } List<Location> inputBlocksToRemove = new ArrayList<>(); for (Location l : sourceLookupMap.keySet()) { List<InputPin> pins = sourceLookupMap.get(l); List<InputPin> toRemove = new ArrayList<>(); for (InputPin pin : pins) { if (pin.getChip()==c) toRemove.add(pin); } pins.removeAll(toRemove); if (pins.isEmpty()) inputBlocksToRemove.add(l); } for (Location l : inputBlocksToRemove) sourceLookupMap.remove(l); List<Location> outputBlocksToRemove = new ArrayList<>(); for (Location l : outputLookupMap.keySet()) { List<OutputPin> pins = outputLookupMap.get(l); List<OutputPin> toRemove = new ArrayList<>(); for (OutputPin pin : pins) { if (pin.getChip()==c) toRemove.add(pin); } pins.removeAll(toRemove); if (pins.isEmpty()) outputBlocksToRemove.add(l); } for (Location l : outputBlocksToRemove) outputLookupMap.remove(l); activationLookupMap.remove(c.activationBlock); List<ChunkLocation> emptyChunks = new ArrayList<>(); for (ChunkLocation loc : c.chunks) { if (chunkLookupMap.containsKey(loc)) { List<Chip> ccircuits = chunkLookupMap.get(loc); if (ccircuits!=null) { ccircuits.remove(c); if (ccircuits.isEmpty()) emptyChunks.add(loc); } } } for (ChunkLocation loc : emptyChunks) chunkLookupMap.remove(loc); }
/** * Removes a chip from all lookup tables. * * @param c A chip. */
Removes a chip from all lookup tables
removeCircuitLookups
{ "repo_name": "eisental/RedstoneChips", "path": "src/main/java/org/redstonechips/chip/ChipCollection.java", "license": "gpl-3.0", "size": 8847 }
[ "java.util.ArrayList", "java.util.List", "org.bukkit.Location", "org.redstonechips.chip.io.InputPin", "org.redstonechips.chip.io.OutputPin", "org.redstonechips.util.ChunkLocation" ]
import java.util.ArrayList; import java.util.List; import org.bukkit.Location; import org.redstonechips.chip.io.InputPin; import org.redstonechips.chip.io.OutputPin; import org.redstonechips.util.ChunkLocation;
import java.util.*; import org.bukkit.*; import org.redstonechips.chip.io.*; import org.redstonechips.util.*;
[ "java.util", "org.bukkit", "org.redstonechips.chip", "org.redstonechips.util" ]
java.util; org.bukkit; org.redstonechips.chip; org.redstonechips.util;
2,242,834
@Test public void testNextTreatmentHashedRoutingV4() throws Exception { TrafficTreatment treatment = DefaultTrafficTreatment.builder() .setEthSrc(SRC_MAC) .setEthDst(DST_MAC) .setOutput(PORT_1) .build(); PiAction mappedAction = interpreter.mapTreatment( treatment, FabricConstants.FABRIC_INGRESS_NEXT_HASHED); PiActionParam ethSrcParam = new PiActionParam(FabricConstants.SMAC, SRC_MAC.toBytes()); PiActionParam ethDstParam = new PiActionParam(FabricConstants.DMAC, DST_MAC.toBytes()); PiActionParam portParam = new PiActionParam(FabricConstants.PORT_NUM, PORT_1.toLong()); PiAction expectedAction = PiAction.builder() .withId(FabricConstants.FABRIC_INGRESS_NEXT_ROUTING_HASHED) .withParameters(ImmutableList.of(ethSrcParam, ethDstParam, portParam)) .build(); assertEquals(expectedAction, mappedAction); }
void function() throws Exception { TrafficTreatment treatment = DefaultTrafficTreatment.builder() .setEthSrc(SRC_MAC) .setEthDst(DST_MAC) .setOutput(PORT_1) .build(); PiAction mappedAction = interpreter.mapTreatment( treatment, FabricConstants.FABRIC_INGRESS_NEXT_HASHED); PiActionParam ethSrcParam = new PiActionParam(FabricConstants.SMAC, SRC_MAC.toBytes()); PiActionParam ethDstParam = new PiActionParam(FabricConstants.DMAC, DST_MAC.toBytes()); PiActionParam portParam = new PiActionParam(FabricConstants.PORT_NUM, PORT_1.toLong()); PiAction expectedAction = PiAction.builder() .withId(FabricConstants.FABRIC_INGRESS_NEXT_ROUTING_HASHED) .withParameters(ImmutableList.of(ethSrcParam, ethDstParam, portParam)) .build(); assertEquals(expectedAction, mappedAction); }
/** * Map treatment for hashed table to routing v4 action. */
Map treatment for hashed table to routing v4 action
testNextTreatmentHashedRoutingV4
{ "repo_name": "kuujo/onos", "path": "pipelines/fabric/src/test/java/org/onosproject/pipelines/fabric/FabricInterpreterTest.java", "license": "apache-2.0", "size": 9040 }
[ "com.google.common.collect.ImmutableList", "org.junit.Assert", "org.onosproject.net.flow.DefaultTrafficTreatment", "org.onosproject.net.flow.TrafficTreatment", "org.onosproject.net.pi.runtime.PiAction", "org.onosproject.net.pi.runtime.PiActionParam" ]
import com.google.common.collect.ImmutableList; import org.junit.Assert; import org.onosproject.net.flow.DefaultTrafficTreatment; import org.onosproject.net.flow.TrafficTreatment; import org.onosproject.net.pi.runtime.PiAction; import org.onosproject.net.pi.runtime.PiActionParam;
import com.google.common.collect.*; import org.junit.*; import org.onosproject.net.flow.*; import org.onosproject.net.pi.runtime.*;
[ "com.google.common", "org.junit", "org.onosproject.net" ]
com.google.common; org.junit; org.onosproject.net;
2,690,987
public void addBusinessFilterToIncomingLinks(ViewerFilter filter);
void function(ViewerFilter filter);
/** * Adds the given filter to the incomingLinks edition editor. * * @param filter * a viewer filter * @see org.eclipse.jface.viewers.StructuredViewer#addFilter(ViewerFilter) * */
Adds the given filter to the incomingLinks edition editor
addBusinessFilterToIncomingLinks
{ "repo_name": "prabushi/devstudio-tooling-esb", "path": "plugins/org.wso2.developerstudio.eclipse.gmf.esb.edit/src-gen/org/wso2/developerstudio/eclipse/gmf/esb/parts/CloudConnectorInputConnectorPropertiesEditionPart.java", "license": "apache-2.0", "size": 1637 }
[ "org.eclipse.jface.viewers.ViewerFilter" ]
import org.eclipse.jface.viewers.ViewerFilter;
import org.eclipse.jface.viewers.*;
[ "org.eclipse.jface" ]
org.eclipse.jface;
1,541,613
private void writeObject(ObjectOutputStream s) throws IOException { s.defaultWriteObject(); for (E e : this) { if (e instanceof Serializable) { s.writeObject(e); } } s.writeObject(null); }
void function(ObjectOutputStream s) throws IOException { s.defaultWriteObject(); for (E e : this) { if (e instanceof Serializable) { s.writeObject(e); } } s.writeObject(null); }
/** * Write the listeners to a stream. */
Write the listeners to a stream
writeObject
{ "repo_name": "d2fn/passage", "path": "src/main/java/com/bbn/openmap/event/ListenerSupport.java", "license": "mit", "size": 3912 }
[ "java.io.IOException", "java.io.ObjectOutputStream", "java.io.Serializable" ]
import java.io.IOException; import java.io.ObjectOutputStream; import java.io.Serializable;
import java.io.*;
[ "java.io" ]
java.io;
2,454,619
public ResourceId getResourceId();
ResourceId function();
/** * Get the resource to rebalance * @return resource id */
Get the resource to rebalance
getResourceId
{ "repo_name": "AyolaJayamaha/apachehelix", "path": "helix-core/src/main/java/org/apache/helix/controller/rebalancer/config/RebalancerConfig.java", "license": "apache-2.0", "size": 1462 }
[ "org.apache.helix.api.id.ResourceId" ]
import org.apache.helix.api.id.ResourceId;
import org.apache.helix.api.id.*;
[ "org.apache.helix" ]
org.apache.helix;
1,782,343
Locale getFormattingLocale();
Locale getFormattingLocale();
/** * Get locale based on language and country */
Get locale based on language and country
getFormattingLocale
{ "repo_name": "momogentoo/ehour", "path": "eHour-common/src/main/java/net/rrm/ehour/config/EhourConfig.java", "license": "gpl-2.0", "size": 3382 }
[ "java.util.Locale" ]
import java.util.Locale;
import java.util.*;
[ "java.util" ]
java.util;
693,683
public List<Question> getQuickQuestionsByListCatetory(List<String> listCategoryId, boolean isNotYetAnswer) throws Exception;
List<Question> function(List<String> listCategoryId, boolean isNotYetAnswer) throws Exception;
/** * Gets quick questions from a list of categories. * * @param listCategoryId Id of the categories. * @param isNotYetAnswer If this value is "true", only quick questions which are not yet answered are got. * @return Quick questions. * @throws Exception * @LevelAPI Platform */
Gets quick questions from a list of categories
getQuickQuestionsByListCatetory
{ "repo_name": "exoplatform/answers", "path": "service/src/main/java/org/exoplatform/faq/service/FAQService.java", "license": "lgpl-3.0", "size": 36675 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,007,679
public Observable<ServiceResponse<Page<EventSubscriptionInner>>> listRegionalByResourceGroupNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); }
Observable<ServiceResponse<Page<EventSubscriptionInner>>> function(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException(STR); }
/** * List all regional event subscriptions under an Azure subscription and resource group. * List all event subscriptions from the given location under a specific Azure subscription and resource group. * ServiceResponse<PageImpl<EventSubscriptionInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;EventSubscriptionInner&gt; object wrapped in {@link ServiceResponse} if successful. */
List all regional event subscriptions under an Azure subscription and resource group. List all event subscriptions from the given location under a specific Azure subscription and resource group
listRegionalByResourceGroupNextSinglePageAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/eventgrid/mgmt-v2019_06_01/src/main/java/com/microsoft/azure/management/eventgrid/v2019_06_01/implementation/EventSubscriptionsInner.java", "license": "mit", "size": 345238 }
[ "com.microsoft.azure.Page", "com.microsoft.rest.ServiceResponse" ]
import com.microsoft.azure.Page; import com.microsoft.rest.ServiceResponse;
import com.microsoft.azure.*; import com.microsoft.rest.*;
[ "com.microsoft.azure", "com.microsoft.rest" ]
com.microsoft.azure; com.microsoft.rest;
1,321,248
boolean statusUpdate(TaskAttemptID taskId, TaskStatus taskStatus) throws IOException, InterruptedException;
boolean statusUpdate(TaskAttemptID taskId, TaskStatus taskStatus) throws IOException, InterruptedException;
/** * Report child's progress to parent. * * @param taskId * task-id of the child * @param taskStatus * status of the child * @throws IOException * @throws InterruptedException * @return True if the task is known */
Report child's progress to parent
statusUpdate
{ "repo_name": "dongpf/hadoop-0.19.1", "path": "src/mapred/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java", "license": "apache-2.0", "size": 6399 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
621,978
public ArrayList<ProductModel> getProductsInCategory(int category) { ArrayList<ProductModel> products; if (category == R.string.baby){ products = mBabyProducts; } else if (category == R.string.bakery) { products = mBakeryProducts; } else { products = new ArrayList<>(); } return products; }
ArrayList<ProductModel> function(int category) { ArrayList<ProductModel> products; if (category == R.string.baby){ products = mBabyProducts; } else if (category == R.string.bakery) { products = mBakeryProducts; } else { products = new ArrayList<>(); } return products; }
/** * Get products in a given category * @param category : int * @return ArrayList<ProductModel> */
Get products in a given category
getProductsInCategory
{ "repo_name": "hubjac1/smartShoppingList", "path": "app/src/main/java/hubjac1/mysmartshoppinglist/DAO/ProductDao.java", "license": "gpl-3.0", "size": 3376 }
[ "java.util.ArrayList" ]
import java.util.ArrayList;
import java.util.*;
[ "java.util" ]
java.util;
885,738
public static ServiceInfo create(final Map<Fields, String> qualifiedNameMap, final int port, final int weight, final int priority, final boolean persistent, final Map<String, ?> props) { return new ServiceInfoImpl(qualifiedNameMap, port, weight, priority, persistent, props); }
static ServiceInfo function(final Map<Fields, String> qualifiedNameMap, final int port, final int weight, final int priority, final boolean persistent, final Map<String, ?> props) { return new ServiceInfoImpl(qualifiedNameMap, port, weight, priority, persistent, props); }
/** * Construct a service description for registering with JmDNS. The properties hashtable must map property names to either Strings or byte arrays describing the property values. * * @param qualifiedNameMap * dictionary of values to build the fully qualified service name. Mandatory keys are Application and Instance. The Domain default is local, the Protocol default is tcp and the subtype default is none. * @param port * the local port on which the service runs * @param weight * weight of the service * @param priority * priority of the service * @param persistent * if <code>true</code> ServiceListener.resolveService will be called whenever new new information is received. * @param props * properties describing the service * @return new service info */
Construct a service description for registering with JmDNS. The properties hashtable must map property names to either Strings or byte arrays describing the property values
create
{ "repo_name": "thunderace/mpd-control", "path": "src/org/thunder/jmdns/ServiceInfo.java", "license": "apache-2.0", "size": 27316 }
[ "java.util.Map", "org.thunder.jmdns.impl.ServiceInfoImpl" ]
import java.util.Map; import org.thunder.jmdns.impl.ServiceInfoImpl;
import java.util.*; import org.thunder.jmdns.impl.*;
[ "java.util", "org.thunder.jmdns" ]
java.util; org.thunder.jmdns;
446,520
StopwatchSample getStopwatchSample(String name);
StopwatchSample getStopwatchSample(String name);
/** * Retrieves sample data object for a particular Stopwatch. * * @param name name of the Simon * @return sample object or null if Simon with entered name doesn't exist * @see org.javasimon.StopwatchSample */
Retrieves sample data object for a particular Stopwatch
getStopwatchSample
{ "repo_name": "virgo47/javasimon", "path": "core/src/main/java/org/javasimon/jmx/SimonManagerMXBean.java", "license": "bsd-3-clause", "size": 7042 }
[ "org.javasimon.StopwatchSample" ]
import org.javasimon.StopwatchSample;
import org.javasimon.*;
[ "org.javasimon" ]
org.javasimon;
1,997,493
public ModelCheckerResult computeReachProbs(CTMDP ctmdp, BitSet target, boolean min) throws PrismException { throw new PrismNotSupportedException("Not implemented yet"); }
ModelCheckerResult function(CTMDP ctmdp, BitSet target, boolean min) throws PrismException { throw new PrismNotSupportedException(STR); }
/** * Compute reachability probabilities. * i.e. compute the min/max probability of reaching a state in {@code target}. * @param ctmdp The CTMDP * @param target Target states * @param min Min or max probabilities (true=min, false=max) */
Compute reachability probabilities. i.e. compute the min/max probability of reaching a state in target
computeReachProbs
{ "repo_name": "nicodelpiano/prism", "path": "src/explicit/CTMDPModelChecker.java", "license": "gpl-2.0", "size": 10400 }
[ "java.util.BitSet" ]
import java.util.BitSet;
import java.util.*;
[ "java.util" ]
java.util;
2,535,391
ISetOrderCursor setOrderCursor(ExecutionContext context, ISchematicCursor cursor, List<IOrderBy> ordersInRequest) throws TddlException;
ISetOrderCursor setOrderCursor(ExecutionContext context, ISchematicCursor cursor, List<IOrderBy> ordersInRequest) throws TddlException;
/** * set request order by when cursor's orderBy tableName is not equals * request orderBy tableName * * @param cursor * @param ordersInRequest * @return */
set request order by when cursor's orderBy tableName is not equals request orderBy tableName
setOrderCursor
{ "repo_name": "sdgdsffdsfff/tddl", "path": "tddl-executor/src/main/java/com/taobao/tddl/executor/spi/ICursorFactory.java", "license": "apache-2.0", "size": 10664 }
[ "com.taobao.tddl.common.exception.TddlException", "com.taobao.tddl.executor.common.ExecutionContext", "com.taobao.tddl.executor.cursor.ISchematicCursor", "com.taobao.tddl.executor.cursor.ISetOrderCursor", "com.taobao.tddl.optimizer.core.expression.IOrderBy", "java.util.List" ]
import com.taobao.tddl.common.exception.TddlException; import com.taobao.tddl.executor.common.ExecutionContext; import com.taobao.tddl.executor.cursor.ISchematicCursor; import com.taobao.tddl.executor.cursor.ISetOrderCursor; import com.taobao.tddl.optimizer.core.expression.IOrderBy; import java.util.List;
import com.taobao.tddl.common.exception.*; import com.taobao.tddl.executor.common.*; import com.taobao.tddl.executor.cursor.*; import com.taobao.tddl.optimizer.core.expression.*; import java.util.*;
[ "com.taobao.tddl", "java.util" ]
com.taobao.tddl; java.util;
718,186
shell = new Shell(); composite = new Composite(shell, SWT.NORMAL); }
shell = new Shell(); composite = new Composite(shell, SWT.NORMAL); }
/** * Creating UI Elements to be used in the Tests. */
Creating UI Elements to be used in the Tests
setUpSWT
{ "repo_name": "test-editor/test-editor", "path": "ui/org.testeditor.ui.test/src/org/testeditor/ui/wizardpages/teamshare/TeamShareImportProjectWizardPageTest.java", "license": "epl-1.0", "size": 4630 }
[ "org.eclipse.swt.widgets.Composite", "org.eclipse.swt.widgets.Shell" ]
import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.*;
[ "org.eclipse.swt" ]
org.eclipse.swt;
2,279,883
public static List<DataflowPackage> stageClasspathElements( Collection<String> classpathElements, String stagingPath) { return stageClasspathElements(classpathElements, stagingPath, Sleeper.DEFAULT); }
static List<DataflowPackage> function( Collection<String> classpathElements, String stagingPath) { return stageClasspathElements(classpathElements, stagingPath, Sleeper.DEFAULT); }
/** * Transfers the classpath elements to the staging location. * * @param classpathElements The elements to stage. * @param stagingPath The base location to stage the elements to. * @return A list of cloud workflow packages, each representing a classpath element. */
Transfers the classpath elements to the staging location
stageClasspathElements
{ "repo_name": "yafengguo/Apache-beam", "path": "runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/util/PackageUtil.java", "license": "apache-2.0", "size": 13040 }
[ "com.google.api.client.util.Sleeper", "com.google.api.services.dataflow.model.DataflowPackage", "java.util.Collection", "java.util.List" ]
import com.google.api.client.util.Sleeper; import com.google.api.services.dataflow.model.DataflowPackage; import java.util.Collection; import java.util.List;
import com.google.api.client.util.*; import com.google.api.services.dataflow.model.*; import java.util.*;
[ "com.google.api", "java.util" ]
com.google.api; java.util;
2,685,321
public void updateUserInProject(final Project project, final User... users) { notNull(project, "project"); notNull(users, "users"); notEmpty(project.getId(), "project.id"); doPostProjectUsersUpdate(project, users); }
void function(final Project project, final User... users) { notNull(project, STR); notNull(users, "users"); notEmpty(project.getId(), STR); doPostProjectUsersUpdate(project, users); }
/** * Update user in the project * * @param project in which to update user * @param users to update * @throws ProjectUsersUpdateException in case of failure */
Update user in the project
updateUserInProject
{ "repo_name": "martiner/gooddata-java", "path": "gooddata-java/src/main/java/com/gooddata/sdk/service/project/ProjectService.java", "license": "bsd-3-clause", "size": 25129 }
[ "com.gooddata.sdk.common.util.Validate", "com.gooddata.sdk.model.project.Project", "com.gooddata.sdk.model.project.User" ]
import com.gooddata.sdk.common.util.Validate; import com.gooddata.sdk.model.project.Project; import com.gooddata.sdk.model.project.User;
import com.gooddata.sdk.common.util.*; import com.gooddata.sdk.model.project.*;
[ "com.gooddata.sdk" ]
com.gooddata.sdk;
1,190,103
public Map<String, Object> getMetadata() { return metadata; }
Map<String, Object> function() { return metadata; }
/** * Any additional metadata that was provided at authentication time. The set of keys will * vary according to the authenticating realm. */
Any additional metadata that was provided at authentication time. The set of keys will vary according to the authenticating realm
getMetadata
{ "repo_name": "ern/elasticsearch", "path": "x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UserRoleMapper.java", "license": "apache-2.0", "size": 9982 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
667,506
public List<RoutingTable> getRoutingTableList() { return routingTableList; }
List<RoutingTable> function() { return routingTableList; }
/** * Gets Routing Table List. * * @return the Routing Table List */
Gets Routing Table List
getRoutingTableList
{ "repo_name": "zsmartsystems/com.zsmartsystems.zigbee", "path": "com.zsmartsystems.zigbee/src/main/java/com/zsmartsystems/zigbee/zdo/command/ManagementRoutingResponse.java", "license": "epl-1.0", "size": 6627 }
[ "com.zsmartsystems.zigbee.zdo.field.RoutingTable", "java.util.List" ]
import com.zsmartsystems.zigbee.zdo.field.RoutingTable; import java.util.List;
import com.zsmartsystems.zigbee.zdo.field.*; import java.util.*;
[ "com.zsmartsystems.zigbee", "java.util" ]
com.zsmartsystems.zigbee; java.util;
715,356
public void setCity(String city) { if ((this.city == null)) { if ((city == null)) { return; } this.city = new City(); } this.city.setValue(city); }
void function(String city) { if ((this.city == null)) { if ((city == null)) { return; } this.city = new City(); } this.city.setValue(city); }
/** * The city of the post box. * * @param city the String. */
The city of the post box
setCity
{ "repo_name": "NABUCCO/org.nabucco.business.address", "path": "org.nabucco.business.address.facade.datatype/src/main/gen/org/nabucco/business/address/facade/datatype/PostBoxAddress.java", "license": "epl-1.0", "size": 14318 }
[ "org.nabucco.framework.base.facade.datatype.business.address.City" ]
import org.nabucco.framework.base.facade.datatype.business.address.City;
import org.nabucco.framework.base.facade.datatype.business.address.*;
[ "org.nabucco.framework" ]
org.nabucco.framework;
1,746,142
private List<ItemHistoryBean> readItemModel(ItemModel modelItems, String modelName) { List<ItemHistoryBean> beanList = new ArrayList<ItemHistoryBean>(); if(modelItems == null) return beanList; EList<ModelItem> modelList = modelItems.getItems(); for (ModelItem item : modelList) { ItemHistoryBean bean = new ItemHistoryBean(); if (item.getLabel() != null) { LabelSplitHelper label = new LabelSplitHelper(item.getLabel()); // start PROSERV String labelRaw = label.getLabel(); bean.label = getTransformedLabel(labelRaw); //bean.label = label.getLabel(); // end PROSERV bean.format = label.getFormat(); bean.units = label.getUnit(); } bean.icon = item.getIcon(); bean.name = item.getName(); if (item.getType() == null) bean.type = "GroupItem"; else bean.type = item.getType() + "Item"; bean.groups = new ArrayList<String>(); EList<String> groupList = item.getGroups(); for (String group : groupList) { bean.groups.add(group.toString()); } ModelRepository repo = HABminApplication.getModelRepository(); if (repo == null) return null; // Loop through all the registered persistence models and read their // data... bean.services = new ArrayList<String>(); for (Map.Entry<String, PersistenceService> service : HABminApplication.getPersistenceServices().entrySet()) { PersistenceModelHelper helper = new PersistenceModelHelper(service.getKey()); ItemPersistenceBean p = helper.getItemPersistence(item.getName(), item.getGroups()); if (p != null) bean.services.add(p.service); } // We're only interested in items with persistence enabled if (bean.services.size() > 0) beanList.add(bean); } return beanList; }
List<ItemHistoryBean> function(ItemModel modelItems, String modelName) { List<ItemHistoryBean> beanList = new ArrayList<ItemHistoryBean>(); if(modelItems == null) return beanList; EList<ModelItem> modelList = modelItems.getItems(); for (ModelItem item : modelList) { ItemHistoryBean bean = new ItemHistoryBean(); if (item.getLabel() != null) { LabelSplitHelper label = new LabelSplitHelper(item.getLabel()); String labelRaw = label.getLabel(); bean.label = getTransformedLabel(labelRaw); bean.format = label.getFormat(); bean.units = label.getUnit(); } bean.icon = item.getIcon(); bean.name = item.getName(); if (item.getType() == null) bean.type = STR; else bean.type = item.getType() + "Item"; bean.groups = new ArrayList<String>(); EList<String> groupList = item.getGroups(); for (String group : groupList) { bean.groups.add(group.toString()); } ModelRepository repo = HABminApplication.getModelRepository(); if (repo == null) return null; bean.services = new ArrayList<String>(); for (Map.Entry<String, PersistenceService> service : HABminApplication.getPersistenceServices().entrySet()) { PersistenceModelHelper helper = new PersistenceModelHelper(service.getKey()); ItemPersistenceBean p = helper.getItemPersistence(item.getName(), item.getGroups()); if (p != null) bean.services.add(p.service); } if (bean.services.size() > 0) beanList.add(bean); } return beanList; }
/** * Read through an items model. Get all the items and provide the * information that's of use for graphing/stats etc. Only items with * persistence services configured are returned. * * @param modelItems * the item model * @param modelName * the model name * @return */
Read through an items model. Get all the items and provide the information that's of use for graphing/stats etc. Only items with persistence services configured are returned
readItemModel
{ "repo_name": "jenskastensson/openhab", "path": "bundles/io/org.openhab.io.habmin/src/main/java/org/openhab/io/habmin/services/chart/PersistenceResource.java", "license": "epl-1.0", "size": 27684 }
[ "java.util.ArrayList", "java.util.List", "java.util.Map", "org.eclipse.emf.common.util.EList", "org.openhab.core.items.Item", "org.openhab.core.persistence.PersistenceService", "org.openhab.io.habmin.HABminApplication", "org.openhab.io.habmin.internal.resources.LabelSplitHelper", "org.openhab.io.habmin.services.persistence.ItemPersistenceBean", "org.openhab.io.habmin.services.persistence.PersistenceModelHelper", "org.openhab.model.core.ModelRepository", "org.openhab.model.items.ItemModel", "org.openhab.model.items.ModelItem" ]
import java.util.ArrayList; import java.util.List; import java.util.Map; import org.eclipse.emf.common.util.EList; import org.openhab.core.items.Item; import org.openhab.core.persistence.PersistenceService; import org.openhab.io.habmin.HABminApplication; import org.openhab.io.habmin.internal.resources.LabelSplitHelper; import org.openhab.io.habmin.services.persistence.ItemPersistenceBean; import org.openhab.io.habmin.services.persistence.PersistenceModelHelper; import org.openhab.model.core.ModelRepository; import org.openhab.model.items.ItemModel; import org.openhab.model.items.ModelItem;
import java.util.*; import org.eclipse.emf.common.util.*; import org.openhab.core.items.*; import org.openhab.core.persistence.*; import org.openhab.io.habmin.*; import org.openhab.io.habmin.internal.resources.*; import org.openhab.io.habmin.services.persistence.*; import org.openhab.model.core.*; import org.openhab.model.items.*;
[ "java.util", "org.eclipse.emf", "org.openhab.core", "org.openhab.io", "org.openhab.model" ]
java.util; org.eclipse.emf; org.openhab.core; org.openhab.io; org.openhab.model;
1,394,693
public static String decryptByAES(final String content, final String key) { try { final byte[] data = Hex.decodeHex(content.toCharArray()); final KeyGenerator kgen = KeyGenerator.getInstance("AES"); final SecureRandom secureRandom = SecureRandom.getInstance("SHA1PRNG"); secureRandom.setSeed(key.getBytes()); kgen.init(128, secureRandom); final SecretKey secretKey = kgen.generateKey(); final byte[] enCodeFormat = secretKey.getEncoded(); final SecretKeySpec keySpec = new SecretKeySpec(enCodeFormat, "AES"); final Cipher cipher = Cipher.getInstance("AES"); cipher.init(Cipher.DECRYPT_MODE, keySpec); final byte[] result = cipher.doFinal(data); return new String(result, StandardCharsets.UTF_8); } catch (final Exception e) { LOGGER.log(Level.WARN, "Decrypt failed"); return null; } } private Crypts() { }
static String function(final String content, final String key) { try { final byte[] data = Hex.decodeHex(content.toCharArray()); final KeyGenerator kgen = KeyGenerator.getInstance("AES"); final SecureRandom secureRandom = SecureRandom.getInstance(STR); secureRandom.setSeed(key.getBytes()); kgen.init(128, secureRandom); final SecretKey secretKey = kgen.generateKey(); final byte[] enCodeFormat = secretKey.getEncoded(); final SecretKeySpec keySpec = new SecretKeySpec(enCodeFormat, "AES"); final Cipher cipher = Cipher.getInstance("AES"); cipher.init(Cipher.DECRYPT_MODE, keySpec); final byte[] result = cipher.doFinal(data); return new String(result, StandardCharsets.UTF_8); } catch (final Exception e) { LOGGER.log(Level.WARN, STR); return null; } } private Crypts() { }
/** * Decrypts by AES. * * @param content the specified content to decrypt * @param key the specified key * @return original content * @see #encryptByAES(java.lang.String, java.lang.String) */
Decrypts by AES
decryptByAES
{ "repo_name": "b3log/latke", "path": "latke-core/src/main/java/org/b3log/latke/util/Crypts.java", "license": "apache-2.0", "size": 4567 }
[ "java.nio.charset.StandardCharsets", "java.security.SecureRandom", "javax.crypto.Cipher", "javax.crypto.KeyGenerator", "javax.crypto.SecretKey", "javax.crypto.spec.SecretKeySpec", "org.apache.commons.codec.binary.Hex", "org.b3log.latke.logging.Level" ]
import java.nio.charset.StandardCharsets; import java.security.SecureRandom; import javax.crypto.Cipher; import javax.crypto.KeyGenerator; import javax.crypto.SecretKey; import javax.crypto.spec.SecretKeySpec; import org.apache.commons.codec.binary.Hex; import org.b3log.latke.logging.Level;
import java.nio.charset.*; import java.security.*; import javax.crypto.*; import javax.crypto.spec.*; import org.apache.commons.codec.binary.*; import org.b3log.latke.logging.*;
[ "java.nio", "java.security", "javax.crypto", "org.apache.commons", "org.b3log.latke" ]
java.nio; java.security; javax.crypto; org.apache.commons; org.b3log.latke;
143,781
private void sendResult(NginxLookupRequest req, String addr, String port, String authUser) throws UnknownHostException { ZimbraLog.nginxlookup.debug("mailhost=" + addr); ZimbraLog.nginxlookup.debug("port=" + port); HttpServletResponse resp = req.httpResp; resp.setStatus(HttpServletResponse.SC_OK); resp.addHeader(AUTH_STATUS, "OK"); resp.addHeader(AUTH_SERVER, addr); resp.addHeader(AUTH_PORT, port); if (authUser != null) { ZimbraLog.nginxlookup.debug("rewrite " + AUTH_USER + " to: " + authUser); authUser = authUser.replace(" ", "%20"); authUser = authUser.replace("%", "%25"); resp.addHeader(AUTH_USER, authUser); } if (req.authMethod.equalsIgnoreCase(AUTHMETH_GSSAPI)) { // For GSSAPI, we also need to send back the overriden authenticating ID and the auth-token as password resp.addHeader(AUTH_ID, req.cuser); resp.addHeader(AUTH_PASS, req.pass); } else if (req.authMethod.equalsIgnoreCase(AUTHMETH_CERTAUTH)) { // For CERTAUTH, we also need to send back the auth-token as password resp.addHeader(AUTH_PASS, req.pass); } }
void function(NginxLookupRequest req, String addr, String port, String authUser) throws UnknownHostException { ZimbraLog.nginxlookup.debug(STR + addr); ZimbraLog.nginxlookup.debug("port=" + port); HttpServletResponse resp = req.httpResp; resp.setStatus(HttpServletResponse.SC_OK); resp.addHeader(AUTH_STATUS, "OK"); resp.addHeader(AUTH_SERVER, addr); resp.addHeader(AUTH_PORT, port); if (authUser != null) { ZimbraLog.nginxlookup.debug(STR + AUTH_USER + STR + authUser); authUser = authUser.replace(" ", "%20"); authUser = authUser.replace("%", "%25"); resp.addHeader(AUTH_USER, authUser); } if (req.authMethod.equalsIgnoreCase(AUTHMETH_GSSAPI)) { resp.addHeader(AUTH_ID, req.cuser); resp.addHeader(AUTH_PASS, req.pass); } else if (req.authMethod.equalsIgnoreCase(AUTHMETH_CERTAUTH)) { resp.addHeader(AUTH_PASS, req.pass); } }
/** * Send the routing information HTTP response back to the NGINX IMAP proxy * @param req The HTTP request object * @param mailhost The requested mail server name * @param port The requested mail server port * @param authUser If not null, then this value is sent back to override the login * user name, (usually) with a domain suffix added */
Send the routing information HTTP response back to the NGINX IMAP proxy
sendResult
{ "repo_name": "nico01f/z-pec", "path": "ZimbraNginxLookup/src/java/com/zimbra/cs/nginx/NginxLookupExtension.java", "license": "mit", "size": 70407 }
[ "com.zimbra.common.util.ZimbraLog", "java.net.UnknownHostException", "javax.servlet.http.HttpServletResponse" ]
import com.zimbra.common.util.ZimbraLog; import java.net.UnknownHostException; import javax.servlet.http.HttpServletResponse;
import com.zimbra.common.util.*; import java.net.*; import javax.servlet.http.*;
[ "com.zimbra.common", "java.net", "javax.servlet" ]
com.zimbra.common; java.net; javax.servlet;
2,389,998
void respondHead( Resource resource, Response response, Request request );
void respondHead( Resource resource, Response response, Request request );
/** * Generate a HEAD response * * @param resource * @param response * @param request */
Generate a HEAD response
respondHead
{ "repo_name": "FullMetal210/milton2", "path": "milton-server/src/main/java/io/milton/http/http11/Http11ResponseHandler.java", "license": "agpl-3.0", "size": 5459 }
[ "io.milton.http.Request", "io.milton.http.Response", "io.milton.resource.Resource" ]
import io.milton.http.Request; import io.milton.http.Response; import io.milton.resource.Resource;
import io.milton.http.*; import io.milton.resource.*;
[ "io.milton.http", "io.milton.resource" ]
io.milton.http; io.milton.resource;
1,427,579
@Generated @StructureField(order = 2, isGetter = false) public native void setDestinationRowOffset(int value);
@StructureField(order = 2, isGetter = false) native void function(int value);
/** * < offset to start of destination region to read in rows */
< offset to start of destination region to read in rows
setDestinationRowOffset
{ "repo_name": "multi-os-engine/moe-core", "path": "moe.apple/moe.platform.ios/src/main/java/apple/metalperformanceshaders/struct/MPSMatrixCopyOffsets.java", "license": "apache-2.0", "size": 2618 }
[ "org.moe.natj.c.ann.StructureField" ]
import org.moe.natj.c.ann.StructureField;
import org.moe.natj.c.ann.*;
[ "org.moe.natj" ]
org.moe.natj;
2,885,764
@ServiceMethod(returns = ReturnType.SINGLE) public void delete(String resourceGroupName, String attestationName) { deleteAsync(resourceGroupName, attestationName).block(); }
@ServiceMethod(returns = ReturnType.SINGLE) void function(String resourceGroupName, String attestationName) { deleteAsync(resourceGroupName, attestationName).block(); }
/** * Deletes an existing attestation at resource group scope. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param attestationName The name of the attestation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */
Deletes an existing attestation at resource group scope
delete
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/policyinsights/azure-resourcemanager-policyinsights/src/main/java/com/azure/resourcemanager/policyinsights/implementation/AttestationsClientImpl.java", "license": "mit", "size": 124635 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.*;
[ "com.azure.core" ]
com.azure.core;
82,839
private static File getLogFile(String name) { return new File(System.getProperty("hadoop.log.dir"), name); }
static File function(String name) { return new File(System.getProperty(STR), name); }
/** * Returns a File for the given name inside the log directory. * * @param name String file name * @return File for given name inside log directory */
Returns a File for the given name inside the log directory
getLogFile
{ "repo_name": "jaypatil/hadoop", "path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestMetaSave.java", "license": "gpl-3.0", "size": 8832 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
2,515,089
@Override protected InputStream doGetInputStream() throws Exception { // VFS-210: zip allows to gather an input stream even from a directory and will // return -1 on the first read. getType should not be expensive and keeps the tests // running if (!getType().hasContent()) { throw new FileSystemException("vfs.provider/read-not-file.error", getName()); } return ((CpioFileSystem)getFileSystem()).getInputStream(entry); }
InputStream function() throws Exception { if (!getType().hasContent()) { throw new FileSystemException(STR, getName()); } return ((CpioFileSystem)getFileSystem()).getInputStream(entry); }
/** * Creates an input stream to read the file content from. Is only called * if {@link #doGetType} returns {@link FileType#FILE}. The input stream * returned by this method is guaranteed to be closed before this * method is called again. */
Creates an input stream to read the file content from. Is only called if <code>#doGetType</code> returns <code>FileType#FILE</code>. The input stream returned by this method is guaranteed to be closed before this method is called again
doGetInputStream
{ "repo_name": "ysb33r/groovy-vfs", "path": "cpio-provider/src/main/java/org/ysb33r/groovy/vfsplugin/cpio/CpioFileObject.java", "license": "apache-2.0", "size": 5764 }
[ "java.io.InputStream", "org.apache.commons.vfs2.FileSystemException" ]
import java.io.InputStream; import org.apache.commons.vfs2.FileSystemException;
import java.io.*; import org.apache.commons.vfs2.*;
[ "java.io", "org.apache.commons" ]
java.io; org.apache.commons;
656,854
public static JSONObject getJSONObject(String jsonData, String key, JSONObject defaultValue) { if (StringUtils.isEmpty(jsonData)) { return defaultValue; } try { JSONObject jsonObject = new JSONObject(jsonData); return getJSONObject(jsonObject, key, defaultValue); } catch (JSONException e) { if (isPrintException) { e.printStackTrace(); } return defaultValue; } }
static JSONObject function(String jsonData, String key, JSONObject defaultValue) { if (StringUtils.isEmpty(jsonData)) { return defaultValue; } try { JSONObject jsonObject = new JSONObject(jsonData); return getJSONObject(jsonObject, key, defaultValue); } catch (JSONException e) { if (isPrintException) { e.printStackTrace(); } return defaultValue; } }
/** * get JSONObject from jsonData * * @param jsonData * @param key * @param defaultValue * @return <ul> * <li>if jsonData is null, return defaultValue</li> * <li>if jsonData {@link JSONObject#JSONObject(String)} exception, * return defaultValue</li> * <li>return * {@link JSONUtils#getJSONObject(JSONObject, String, JSONObject)}</li> * </ul> */
get JSONObject from jsonData
getJSONObject
{ "repo_name": "zhangjining9517/zhanglibrary", "path": "zhangLibrary/src/com/zhang/zhanglibrary/util/JSONUtils.java", "license": "gpl-2.0", "size": 23381 }
[ "org.json.JSONException", "org.json.JSONObject" ]
import org.json.JSONException; import org.json.JSONObject;
import org.json.*;
[ "org.json" ]
org.json;
2,661,954
private void registerEvaluator(CMISScope scope, CMISActionEvaluator<? extends Object> evaluator) { Map<CMISAllowedActionEnum, CMISActionEvaluator<? extends Object>> evaluators = actionEvaluators.get(scope); if (evaluators == null) { evaluators = new LinkedHashMap<CMISAllowedActionEnum, CMISActionEvaluator<? extends Object>>(); actionEvaluators.put(scope, evaluators); } if (evaluators.get(evaluator.getAction()) != null) { throw new AlfrescoRuntimeException("Already registered Action Evaluator " + evaluator.getAction() + " for scope " + scope); } evaluators.put(evaluator.getAction(), evaluator); if (logger.isDebugEnabled()) logger.debug("Registered Action Evaluator: scope=" + scope + ", evaluator=" + evaluator); }
void function(CMISScope scope, CMISActionEvaluator<? extends Object> evaluator) { Map<CMISAllowedActionEnum, CMISActionEvaluator<? extends Object>> evaluators = actionEvaluators.get(scope); if (evaluators == null) { evaluators = new LinkedHashMap<CMISAllowedActionEnum, CMISActionEvaluator<? extends Object>>(); actionEvaluators.put(scope, evaluators); } if (evaluators.get(evaluator.getAction()) != null) { throw new AlfrescoRuntimeException(STR + evaluator.getAction() + STR + scope); } evaluators.put(evaluator.getAction(), evaluator); if (logger.isDebugEnabled()) logger.debug(STR + scope + STR + evaluator); }
/** * Register an Action Evaluator * * @param scope * @param evaluator */
Register an Action Evaluator
registerEvaluator
{ "repo_name": "loftuxab/community-edition-old", "path": "projects/repository/source/java/org/alfresco/cmis/mapping/CMISMapping.java", "license": "lgpl-3.0", "size": 46480 }
[ "java.util.LinkedHashMap", "java.util.Map", "org.alfresco.cmis.CMISActionEvaluator", "org.alfresco.cmis.CMISAllowedActionEnum", "org.alfresco.cmis.CMISScope", "org.alfresco.error.AlfrescoRuntimeException" ]
import java.util.LinkedHashMap; import java.util.Map; import org.alfresco.cmis.CMISActionEvaluator; import org.alfresco.cmis.CMISAllowedActionEnum; import org.alfresco.cmis.CMISScope; import org.alfresco.error.AlfrescoRuntimeException;
import java.util.*; import org.alfresco.cmis.*; import org.alfresco.error.*;
[ "java.util", "org.alfresco.cmis", "org.alfresco.error" ]
java.util; org.alfresco.cmis; org.alfresco.error;
747,045
public TfvcChangeset getChangeset( final UUID project, final int id, final Integer maxChangeCount, final Boolean includeDetails, final Boolean includeWorkItems, final Integer maxCommentLength, final Boolean includeSourceRename, final Integer skip, final Integer top, final String orderby, final TfvcChangesetSearchCriteria searchCriteria) { final UUID locationId = UUID.fromString("0bc8f0a4-6bfb-42a9-ba84-139da7b99c49"); //$NON-NLS-1$ final ApiResourceVersion apiVersion = new ApiResourceVersion("3.1-preview.2"); //$NON-NLS-1$ final Map<String, Object> routeValues = new HashMap<String, Object>(); routeValues.put("project", project); //$NON-NLS-1$ routeValues.put("id", id); //$NON-NLS-1$ final NameValueCollection queryParameters = new NameValueCollection(); queryParameters.addIfNotNull("maxChangeCount", maxChangeCount); //$NON-NLS-1$ queryParameters.addIfNotNull("includeDetails", includeDetails); //$NON-NLS-1$ queryParameters.addIfNotNull("includeWorkItems", includeWorkItems); //$NON-NLS-1$ queryParameters.addIfNotNull("maxCommentLength", maxCommentLength); //$NON-NLS-1$ queryParameters.addIfNotNull("includeSourceRename", includeSourceRename); //$NON-NLS-1$ queryParameters.addIfNotNull("$skip", skip); //$NON-NLS-1$ queryParameters.addIfNotNull("$top", top); //$NON-NLS-1$ queryParameters.addIfNotEmpty("$orderby", orderby); //$NON-NLS-1$ addModelAsQueryParams(queryParameters, searchCriteria); final VssRestRequest httpRequest = super.createRequest(HttpMethod.GET, locationId, routeValues, apiVersion, queryParameters, VssMediaTypes.APPLICATION_JSON_TYPE); return super.sendRequest(httpRequest, TfvcChangeset.class); }
TfvcChangeset function( final UUID project, final int id, final Integer maxChangeCount, final Boolean includeDetails, final Boolean includeWorkItems, final Integer maxCommentLength, final Boolean includeSourceRename, final Integer skip, final Integer top, final String orderby, final TfvcChangesetSearchCriteria searchCriteria) { final UUID locationId = UUID.fromString(STR); final ApiResourceVersion apiVersion = new ApiResourceVersion(STR); final Map<String, Object> routeValues = new HashMap<String, Object>(); routeValues.put(STR, project); routeValues.put("id", id); final NameValueCollection queryParameters = new NameValueCollection(); queryParameters.addIfNotNull(STR, maxChangeCount); queryParameters.addIfNotNull(STR, includeDetails); queryParameters.addIfNotNull(STR, includeWorkItems); queryParameters.addIfNotNull(STR, maxCommentLength); queryParameters.addIfNotNull(STR, includeSourceRename); queryParameters.addIfNotNull("$skip", skip); queryParameters.addIfNotNull("$top", top); queryParameters.addIfNotEmpty(STR, orderby); addModelAsQueryParams(queryParameters, searchCriteria); final VssRestRequest httpRequest = super.createRequest(HttpMethod.GET, locationId, routeValues, apiVersion, queryParameters, VssMediaTypes.APPLICATION_JSON_TYPE); return super.sendRequest(httpRequest, TfvcChangeset.class); }
/** * [Preview API 3.1-preview.2] Retrieve a Tfvc Changeset * * @param project * Project ID * @param id * * @param maxChangeCount * * @param includeDetails * * @param includeWorkItems * * @param maxCommentLength * * @param includeSourceRename * * @param skip * * @param top * * @param orderby * * @param searchCriteria * * @return TfvcChangeset */
[Preview API 3.1-preview.2] Retrieve a Tfvc Changeset
getChangeset
{ "repo_name": "Microsoft/vso-httpclient-java", "path": "Rest/alm-tfs-client/src/main/generated/com/microsoft/alm/teamfoundation/sourcecontrol/webapi/TfvcHttpClientBase.java", "license": "mit", "size": 99570 }
[ "com.microsoft.alm.client.HttpMethod", "com.microsoft.alm.client.VssMediaTypes", "com.microsoft.alm.client.VssRestRequest", "com.microsoft.alm.client.model.NameValueCollection", "com.microsoft.alm.teamfoundation.sourcecontrol.webapi.TfvcChangeset", "com.microsoft.alm.teamfoundation.sourcecontrol.webapi.TfvcChangesetSearchCriteria", "com.microsoft.alm.visualstudio.services.webapi.ApiResourceVersion", "java.util.HashMap", "java.util.Map", "java.util.UUID" ]
import com.microsoft.alm.client.HttpMethod; import com.microsoft.alm.client.VssMediaTypes; import com.microsoft.alm.client.VssRestRequest; import com.microsoft.alm.client.model.NameValueCollection; import com.microsoft.alm.teamfoundation.sourcecontrol.webapi.TfvcChangeset; import com.microsoft.alm.teamfoundation.sourcecontrol.webapi.TfvcChangesetSearchCriteria; import com.microsoft.alm.visualstudio.services.webapi.ApiResourceVersion; import java.util.HashMap; import java.util.Map; import java.util.UUID;
import com.microsoft.alm.client.*; import com.microsoft.alm.client.model.*; import com.microsoft.alm.teamfoundation.sourcecontrol.webapi.*; import com.microsoft.alm.visualstudio.services.webapi.*; import java.util.*;
[ "com.microsoft.alm", "java.util" ]
com.microsoft.alm; java.util;
1,896,298
public final void findMinMaxBoundaries() { int a; // fixed_t int b; min_x = min_y = MAXINT; max_x = max_y = -MAXINT; for (int i = 0; i < LL.numvertexes; i++) { if (LL.vertexes[i].x < min_x) min_x = LL.vertexes[i].x; else if (LL.vertexes[i].x > max_x) max_x = LL.vertexes[i].x; if (LL.vertexes[i].y < min_y) min_y = LL.vertexes[i].y; else if (LL.vertexes[i].y > max_y) max_y = LL.vertexes[i].y; } max_w = max_x - min_x; max_h = max_y - min_y; min_w = 2 * PLAYERRADIUS; // const? never changed? min_h = 2 * PLAYERRADIUS; a = FixedDiv(f_w << FRACBITS, max_w); b = FixedDiv(f_h << FRACBITS, max_h); min_scale_mtof = a < b ? a : b; if (min_scale_mtof < 0) { // MAES: safeguard against negative scaling e.g. in Europe.wad // This seems to be the limit. min_scale_mtof = MINIMUM_VIABLE_SCALE; } max_scale_mtof = FixedDiv(f_h << FRACBITS, 2 * PLAYERRADIUS); }
final void function() { int a; int b; min_x = min_y = MAXINT; max_x = max_y = -MAXINT; for (int i = 0; i < LL.numvertexes; i++) { if (LL.vertexes[i].x < min_x) min_x = LL.vertexes[i].x; else if (LL.vertexes[i].x > max_x) max_x = LL.vertexes[i].x; if (LL.vertexes[i].y < min_y) min_y = LL.vertexes[i].y; else if (LL.vertexes[i].y > max_y) max_y = LL.vertexes[i].y; } max_w = max_x - min_x; max_h = max_y - min_y; min_w = 2 * PLAYERRADIUS; min_h = 2 * PLAYERRADIUS; a = FixedDiv(f_w << FRACBITS, max_w); b = FixedDiv(f_h << FRACBITS, max_h); min_scale_mtof = a < b ? a : b; if (min_scale_mtof < 0) { min_scale_mtof = MINIMUM_VIABLE_SCALE; } max_scale_mtof = FixedDiv(f_h << FRACBITS, 2 * PLAYERRADIUS); }
/** * Determines bounding box of all vertices, sets global variables * controlling zoom range. */
Determines bounding box of all vertices, sets global variables controlling zoom range
findMinMaxBoundaries
{ "repo_name": "jendave/mochadoom", "path": "src/main/java/net/sourceforge/mochadoom/automap/Map.java", "license": "gpl-3.0", "size": 50228 }
[ "net.sourceforge.mochadoom.menu.fixed_t.FixedDiv" ]
import net.sourceforge.mochadoom.menu.fixed_t.FixedDiv;
import net.sourceforge.mochadoom.menu.fixed_t.*;
[ "net.sourceforge.mochadoom" ]
net.sourceforge.mochadoom;
334,189
public void setReporter(Processor reporter) { this.reporter = reporter; } /** * Specifies to only retain the first n'th number of received {@link Exchange}s. * <p/> * This is used when testing with big data, to reduce memory consumption by not storing * copies of every {@link Exchange} this mock endpoint receives. * <p/> * <b>Important:</b> When using this limitation, then the {@link #getReceivedCounter()}
void function(Processor reporter) { this.reporter = reporter; } /** * Specifies to only retain the first n'th number of received {@link Exchange}s. * <p/> * This is used when testing with big data, to reduce memory consumption by not storing * copies of every {@link Exchange} this mock endpoint receives. * <p/> * <b>Important:</b> When using this limitation, then the {@link #getReceivedCounter()}
/** * Allows a processor to added to the endpoint to report on progress of the test */
Allows a processor to added to the endpoint to report on progress of the test
setReporter
{ "repo_name": "logzio/camel", "path": "camel-core/src/main/java/org/apache/camel/component/mock/MockEndpoint.java", "license": "apache-2.0", "size": 55338 }
[ "org.apache.camel.Exchange", "org.apache.camel.Processor" ]
import org.apache.camel.Exchange; import org.apache.camel.Processor;
import org.apache.camel.*;
[ "org.apache.camel" ]
org.apache.camel;
1,739,130
@Test public void test3686b() throws Exception { HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TABLE_NAME); Scan scan = new Scan(); scan.setCaching(SCANNER_CACHING); // Set a very high timeout, we want to test what happens when a RS // fails but the region is recovered before the lease times out. // Since the RS is already created, this conf is client-side only for // this new table Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); conf.setInt( HConstants.HBASE_REGIONSERVER_LEASE_PERIOD_KEY, SCANNER_TIMEOUT*100); HTable higherScanTimeoutTable = new HTable(conf, TABLE_NAME); ResultScanner r = higherScanTimeoutTable.getScanner(scan); int count = 1; r.next(); // Sleep, allowing the scan to timeout on the server but not on the client. Thread.sleep(SCANNER_TIMEOUT+2000); while(r.next() != null) { count ++; } assertEquals(NB_ROWS, count); r.close(); }
void function() throws Exception { HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(TABLE_NAME); Scan scan = new Scan(); scan.setCaching(SCANNER_CACHING); Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); conf.setInt( HConstants.HBASE_REGIONSERVER_LEASE_PERIOD_KEY, SCANNER_TIMEOUT*100); HTable higherScanTimeoutTable = new HTable(conf, TABLE_NAME); ResultScanner r = higherScanTimeoutTable.getScanner(scan); int count = 1; r.next(); Thread.sleep(SCANNER_TIMEOUT+2000); while(r.next() != null) { count ++; } assertEquals(NB_ROWS, count); r.close(); }
/** * Make sure that no rows are lost if the scanner timeout is longer on the * client than the server, and the scan times out on the server but not the * client. * @throws Exception */
Make sure that no rows are lost if the scanner timeout is longer on the client than the server, and the scan times out on the server but not the client
test3686b
{ "repo_name": "Shmuma/hbase", "path": "src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java", "license": "apache-2.0", "size": 6512 }
[ "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.hbase.HConstants", "org.apache.hadoop.hbase.regionserver.HRegionServer", "org.junit.Assert" ]
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.junit.Assert;
import org.apache.hadoop.conf.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.regionserver.*; import org.junit.*;
[ "org.apache.hadoop", "org.junit" ]
org.apache.hadoop; org.junit;
81,646
public void setProgressBarValue(final int value) { if (SwingUtilities.isEventDispatchThread()) { progressBar.setValue(value); } else { SwingUtilities.invokeLater(() -> progressBar.setValue(value)); } }
void function(final int value) { if (SwingUtilities.isEventDispatchThread()) { progressBar.setValue(value); } else { SwingUtilities.invokeLater(() -> progressBar.setValue(value)); } }
/** * Sets the current value of the progress bar. * <p> * If not called on the event dispatch thread, this method uses * SwingUtilities.invokeLater() to do the actual operation on the EDT. */
Sets the current value of the progress bar. If not called on the event dispatch thread, this method uses SwingUtilities.invokeLater() to do the actual operation on the EDT
setProgressBarValue
{ "repo_name": "RodrigoRubino/DC-UFSCar-ES2-201601-Grupo-Brainstorm", "path": "src/main/java/net/sf/jabref/gui/JabRefFrame.java", "license": "gpl-2.0", "size": 95021 }
[ "javax.swing.SwingUtilities" ]
import javax.swing.SwingUtilities;
import javax.swing.*;
[ "javax.swing" ]
javax.swing;
287,347
public String encodeImage(Drawable drawable) { Bitmap bitmap = ((BitmapDrawable) drawable).getBitmap(); ByteArrayOutputStream outStream = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.PNG, COMPRESSION_LEVEL, outStream); byte[] bitmapContent = outStream.toByteArray(); String encodedImage = Base64.encodeToString(bitmapContent, Base64.NO_WRAP); StreamHandler.closeOutputStream(outStream, TAG); return encodedImage; }
String function(Drawable drawable) { Bitmap bitmap = ((BitmapDrawable) drawable).getBitmap(); ByteArrayOutputStream outStream = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.PNG, COMPRESSION_LEVEL, outStream); byte[] bitmapContent = outStream.toByteArray(); String encodedImage = Base64.encodeToString(bitmapContent, Base64.NO_WRAP); StreamHandler.closeOutputStream(outStream, TAG); return encodedImage; }
/** * Returns a base64 encoded string for a particular image. * * @param drawable - Image as a Drawable object. * @return - Base64 encoded value of the drawable. */
Returns a base64 encoded string for a particular image
encodeImage
{ "repo_name": "Malintha/product-emm", "path": "modules/mobile-agents/android/app-catalog/app/src/main/java/org/wso2/app/catalog/api/ApplicationManager.java", "license": "apache-2.0", "size": 19506 }
[ "android.graphics.Bitmap", "android.graphics.drawable.BitmapDrawable", "android.graphics.drawable.Drawable", "android.util.Base64", "java.io.ByteArrayOutputStream", "org.wso2.app.catalog.utils.StreamHandler" ]
import android.graphics.Bitmap; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.util.Base64; import java.io.ByteArrayOutputStream; import org.wso2.app.catalog.utils.StreamHandler;
import android.graphics.*; import android.graphics.drawable.*; import android.util.*; import java.io.*; import org.wso2.app.catalog.utils.*;
[ "android.graphics", "android.util", "java.io", "org.wso2.app" ]
android.graphics; android.util; java.io; org.wso2.app;
29,937
public String collectDriverStatistics(List<QueryStatistic> statisticsList, String queryId) { String sql_parse_time = ""; String load_meta_time = ""; String load_blocks_time = ""; String block_allocation_time = ""; String block_identification_time = ""; long driver_part_time_tmp = 0L; long driver_part_time_tmp2 = 0L; long load_blocks_time_tmp = 0L; String splitChar = " "; try { // get statistic time from the QueryStatistic for (QueryStatistic statistic : statisticsList) { switch (statistic.getMessage()) { case QueryStatisticsConstants.SQL_PARSE: sql_parse_time += statistic.getTimeTaken() + splitChar; driver_part_time_tmp += statistic.getTimeTaken(); break; case QueryStatisticsConstants.LOAD_META: load_meta_time += statistic.getTimeTaken() + splitChar; driver_part_time_tmp += statistic.getTimeTaken(); break; case QueryStatisticsConstants.LOAD_BLOCKS_DRIVER: // multi segments will generate multi load_blocks_time load_blocks_time_tmp += statistic.getTimeTaken(); driver_part_time_tmp += statistic.getTimeTaken(); driver_part_time_tmp2 += statistic.getTimeTaken(); break; case QueryStatisticsConstants.BLOCK_ALLOCATION: block_allocation_time += statistic.getTimeTaken() + splitChar; driver_part_time_tmp += statistic.getTimeTaken(); driver_part_time_tmp2 += statistic.getTimeTaken(); break; case QueryStatisticsConstants.BLOCK_IDENTIFICATION: block_identification_time += statistic.getTimeTaken() + splitChar; driver_part_time_tmp += statistic.getTimeTaken(); driver_part_time_tmp2 += statistic.getTimeTaken(); break; default: break; } } load_blocks_time = load_blocks_time_tmp + splitChar; String driver_part_time = driver_part_time_tmp + splitChar; // structure the query statistics info table StringBuilder tableInfo = new StringBuilder(); int len1 = 8; int len2 = 20; int len3 = 21; int len4 = 24; String line = "+" + printLine("-", len1) + "+" + printLine("-", len2) + "+" + printLine("-", len3) + "+" + printLine("-", len4) + "+"; String line2 = "|" + printLine(" ", len1) + "+" + printLine("-", len2) + "+" + printLine(" ", len3) + "+" + printLine("-", len4) + "+"; // table header tableInfo.append(line).append("\n"); tableInfo.append("|" + printLine(" ", (len1 - "Module".length())) + "Module" + "|" + printLine(" ", (len2 - "Operation Step".length())) + "Operation Step" + "|" + printLine(" ", (len3 - "Total Query Cost".length())) + "Total Query Cost" + "|" + printLine(" ", (len4 - "Query Cost".length())) + "Query Cost" + "|" + "\n"); tableInfo.append(line).append("\n"); // print sql_parse_t,load_meta_t,block_allocation_t,block_identification_t if (!StringUtils.isEmpty(sql_parse_time) && !StringUtils.isEmpty(load_meta_time) && !StringUtils.isEmpty(block_allocation_time) && !StringUtils.isEmpty(block_identification_time)) { tableInfo.append("|" + printLine(" ", len1) + "|" + printLine(" ", (len2 - "SQL parse".length())) + "SQL parse" + "|" + printLine(" ", len3) + "|" + printLine(" ", (len4 - sql_parse_time.length())) + sql_parse_time + "|" + "\n"); tableInfo.append(line2).append("\n"); tableInfo.append("|" + printLine(" ", (len1 - "Driver".length())) + "Driver" + "|" + printLine(" ", (len2 - "Load meta data".length())) + "Load meta data" + "|" + printLine(" ", (len3 - driver_part_time.length())) + driver_part_time + "|" + printLine(" ", (len4 - load_meta_time.length())) + load_meta_time + "|" + "\n"); tableInfo.append(line2).append("\n"); tableInfo.append("|" + printLine(" ", (len1 - "Part".length())) + "Part" + "|" + printLine(" ", (len2 - "Load blocks driver".length())) + "Load blocks driver" + "|" + printLine(" ", len3) + "|" + printLine(" ", (len4 - load_blocks_time.length())) + load_blocks_time + "|" + "\n"); tableInfo.append(line2).append("\n"); tableInfo.append("|" + printLine(" ", len1) + "|" + printLine(" ", (len2 - "Block allocation".length())) + "Block allocation" + "|" + printLine(" ", len3) + "|" + printLine(" ", (len4 - block_allocation_time.length())) + block_allocation_time + "|" + "\n"); tableInfo.append(line2).append("\n"); tableInfo.append("|" + printLine(" ", len1) + "|" + printLine(" ", (len2 - "Block identification".length())) + "Block identification" + "|" + printLine(" ", len3) + "|" + printLine(" ", (len4 - block_identification_time.length())) + block_identification_time + "|" + "\n"); tableInfo.append(line).append("\n"); // show query statistic as "query id" + "table" return "Print query statistic for query id: " + queryId + "\n" + tableInfo.toString(); } else if (!StringUtils.isEmpty(block_allocation_time) && !StringUtils.isEmpty(block_identification_time)) { // when we can't get sql parse time, we only print the last two driver_part_time = driver_part_time_tmp2 + splitChar; tableInfo.append("|" + printLine(" ", (len1 - "Driver".length())) + "Driver" + "|" + printLine(" ", (len2 - "Load blocks driver".length())) + "Load blocks driver" + "|" + printLine(" ", len3) + "|" + printLine(" ", (len4 - load_blocks_time.length())) + load_blocks_time + "|" + "\n"); tableInfo.append(line2).append("\n"); tableInfo.append("|" + printLine(" ", (len1 - "Part".length())) + "Part" + "|" + printLine(" ", (len2 - "Block allocation".length())) + "Block allocation" + "|" + printLine(" ", (len3 - driver_part_time.length())) + driver_part_time + "|" + printLine(" ", (len4 - block_allocation_time.length())) + block_allocation_time + "|" + "\n"); tableInfo.append(line2).append("\n"); tableInfo.append("|" + printLine(" ", len1) + "|" + printLine(" ", (len2 - "Block identification".length())) + "Block identification" + "|" + printLine(" ", len3) + "|" + printLine(" ", (len4 - block_identification_time.length())) + block_identification_time + "|" + "\n"); tableInfo.append(line).append("\n"); // show query statistic as "query id" + "table" return "Print query statistic for query id: " + queryId + "\n" + tableInfo.toString(); } return null; } catch (Exception ex) { return "Put statistics into table failed, catch exception: " + ex.getMessage(); } }
String function(List<QueryStatistic> statisticsList, String queryId) { String sql_parse_time = STRSTRSTRSTRSTR STR+STR-STR+STR-STR+STR-STR+STR-STR+STR STR STR+STR-STR+STR STR+STR-STR+STR\nSTR STR STRModuleSTRModuleSTR STR STROperation StepSTROperation StepSTR STR STRTotal Query CostSTRTotal Query CostSTR STR STRQuery CostSTRQuery CostSTR STR\nSTR\nSTR STR STR STR STRSQL parseSTRSQL parseSTR STR STR STR STR STR\nSTR\nSTR STR STRDriverSTRDriverSTR STR STRLoad meta dataSTRLoad meta dataSTR STR STR STR STR STR\nSTR\nSTR STR STRPartSTRPartSTR STR STRLoad blocks driverSTRLoad blocks driverSTR STR STR STR STR STR\nSTR\nSTR STR STR STR STRBlock allocationSTRBlock allocationSTR STR STR STR STR STR\nSTR\nSTR STR STR STR STRBlock identificationSTRBlock identificationSTR STR STR STR STR STR\nSTR\nSTRPrint query statistic for query id: STR\nSTR STR STRDriverSTRDriverSTR STR STRLoad blocks driverSTRLoad blocks driverSTR STR STR STR STR STR\nSTR\nSTR STR STRPartSTRPartSTR STR STRBlock allocationSTRBlock allocationSTR STR STR STR STR STR\nSTR\nSTR STR STR STR STRBlock identificationSTRBlock identificationSTR STR STR STR STR STR\nSTR\nSTRPrint query statistic for query id: STR\nSTRPut statistics into table failed, catch exception: " + ex.getMessage(); } }
/** * Below method will parse queryStatisticsMap and put time into table */
Below method will parse queryStatisticsMap and put time into table
collectDriverStatistics
{ "repo_name": "manishgupta88/carbondata", "path": "core/src/main/java/org/apache/carbondata/core/stats/DriverQueryStatisticsRecorderImpl.java", "license": "apache-2.0", "size": 11730 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,522,647
@Deprecated public NearCacheConfiguration<K, V> setNearEvictionPolicy(EvictionPolicy<K, V> nearEvictPlc) { this.nearEvictPlc = nearEvictPlc; return this; } /** * Gets cache eviction policy factory. By default, returns {@code null}
NearCacheConfiguration<K, V> function(EvictionPolicy<K, V> nearEvictPlc) { this.nearEvictPlc = nearEvictPlc; return this; } /** * Gets cache eviction policy factory. By default, returns {@code null}
/** * Sets near eviction policy. * * @param nearEvictPlc Near eviction policy. * @return {@code this} for chaining. * * @deprecated Use {@link #setNearEvictionPolicyFactory(Factory)} instead. */
Sets near eviction policy
setNearEvictionPolicy
{ "repo_name": "ilantukh/ignite", "path": "modules/core/src/main/java/org/apache/ignite/configuration/NearCacheConfiguration.java", "license": "apache-2.0", "size": 5073 }
[ "org.apache.ignite.cache.eviction.EvictionPolicy" ]
import org.apache.ignite.cache.eviction.EvictionPolicy;
import org.apache.ignite.cache.eviction.*;
[ "org.apache.ignite" ]
org.apache.ignite;
1,217,736
public int isValid() { // Return true if we have a hash. if (this.completed) { // Check if we are configured to assume validity for a period of time. if (this.assumedValidityDelay > 0) { // Check if we should assume validitity. if (System.currentTimeMillis() < this.assumedValidityTime) { // Assume the cache is valid with out rechecking another validity object. return SourceValidity.VALID; } } return SourceValidity.UNKNOWN; } else { // This is an error, state. We are being asked whether we are valid before // we have been initialized. return SourceValidity.INVALID; } }
int function() { if (this.completed) { if (this.assumedValidityDelay > 0) { if (System.currentTimeMillis() < this.assumedValidityTime) { return SourceValidity.VALID; } } return SourceValidity.UNKNOWN; } else { return SourceValidity.INVALID; } }
/** * Determine if the cache is still valid */
Determine if the cache is still valid
isValid
{ "repo_name": "jamie-dryad/dryad-repo", "path": "dspace-xmlui/dspace-xmlui-api/src/main/java/org/dspace/app/xmlui/utils/DSpaceValidity.java", "license": "bsd-3-clause", "size": 17442 }
[ "org.apache.excalibur.source.SourceValidity" ]
import org.apache.excalibur.source.SourceValidity;
import org.apache.excalibur.source.*;
[ "org.apache.excalibur" ]
org.apache.excalibur;
867,743
public InstructionHandle getExceptionThrower() { return exceptionThrower; }
InstructionHandle function() { return exceptionThrower; }
/** * Get the instruction for which this block is an exception thrower. * * @return the instruction, or null if this block is not an exception * thrower */
Get the instruction for which this block is an exception thrower
getExceptionThrower
{ "repo_name": "sewe/spotbugs", "path": "spotbugs/src/main/java/edu/umd/cs/findbugs/ba/BasicBlock.java", "license": "lgpl-2.1", "size": 14741 }
[ "org.apache.bcel.generic.InstructionHandle" ]
import org.apache.bcel.generic.InstructionHandle;
import org.apache.bcel.generic.*;
[ "org.apache.bcel" ]
org.apache.bcel;
216,669
protected String extractUserId(ServletRequest request) { return null; }
String function(ServletRequest request) { return null; }
/** * Returns the user id, used to track this connection. * This SHOULD be overridden by subclasses. * * @param request * @return a unique user id, if logged in; otherwise null. */
Returns the user id, used to track this connection. This SHOULD be overridden by subclasses
extractUserId
{ "repo_name": "jamiepg1/jetty.project", "path": "jetty-servlets/src/main/java/org/eclipse/jetty/servlets/DoSFilter.java", "license": "apache-2.0", "size": 35196 }
[ "javax.servlet.ServletRequest" ]
import javax.servlet.ServletRequest;
import javax.servlet.*;
[ "javax.servlet" ]
javax.servlet;
662,545
public static boolean getIsJavaRecordWriter(JobConf conf) { return conf.getBoolean(Submitter.IS_JAVA_RW, false); }
static boolean function(JobConf conf) { return conf.getBoolean(Submitter.IS_JAVA_RW, false); }
/** * Will the reduce use a Java RecordWriter? * @param conf the configuration to check * @return true, if the output of the job will be written by Java */
Will the reduce use a Java RecordWriter
getIsJavaRecordWriter
{ "repo_name": "apache/hadoop-mapreduce", "path": "src/java/org/apache/hadoop/mapred/pipes/Submitter.java", "license": "apache-2.0", "size": 19474 }
[ "org.apache.hadoop.mapred.JobConf" ]
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
338,851
private String popPath(String path) throws SAXException { final StringBuffer buffer = new StringBuffer(); final String last = (String) this.stack.pop(); if (path.toLowerCase().equals(last)) { for (int x = 0; x < this.stack.size(); x ++) buffer.append('/').append(this.stack.get(x)); return buffer.append('/').append(last).toString(); } throw new SAXException("Tag <" + path + "/> unbalanced at path \"" + pushPath(last) + "\""); }
String function(String path) throws SAXException { final StringBuffer buffer = new StringBuffer(); final String last = (String) this.stack.pop(); if (path.toLowerCase().equals(last)) { for (int x = 0; x < this.stack.size(); x ++) buffer.append('/').append(this.stack.get(x)); return buffer.append('/').append(last).toString(); } throw new SAXException(STR + path + STRSTR\""); }
/** * <p>Pop the last element name from the pseudo-XPath lookup stack.</p> * * @return a {@link String} like <code>/element/element/element</code>. */
Pop the last element name from the pseudo-XPath lookup stack
popPath
{ "repo_name": "dkulp/couldit-autoexport", "path": "src/main/java/it/could/util/http/WebDavClient.java", "license": "bsd-3-clause", "size": 38649 }
[ "org.xml.sax.SAXException" ]
import org.xml.sax.SAXException;
import org.xml.sax.*;
[ "org.xml.sax" ]
org.xml.sax;
700,943
public Room roomFromRoomSummary(RoomSummary roomSummary) { // sanity check if ((null == roomSummary) || (null == roomSummary.getMatrixId())) { return null; } MXSession session = Matrix.getMXSession(mContext, roomSummary.getMatrixId()); // check if the session is active if ((null == session) || (!session.isAlive())) { return null; } return Matrix.getMXSession(mContext, roomSummary.getMatrixId()).getDataHandler().getStore().getRoom(roomSummary.getRoomId()); }
Room function(RoomSummary roomSummary) { if ((null == roomSummary) (null == roomSummary.getMatrixId())) { return null; } MXSession session = Matrix.getMXSession(mContext, roomSummary.getMatrixId()); if ((null == session) (!session.isAlive())) { return null; } return Matrix.getMXSession(mContext, roomSummary.getMatrixId()).getDataHandler().getStore().getRoom(roomSummary.getRoomId()); }
/** * Retrieve a Room from a room summary * @param roomSummary the room roomId to retrieve. * @return the Room. */
Retrieve a Room from a room summary
roomFromRoomSummary
{ "repo_name": "matrix-org/matrix-android-console", "path": "console/src/main/java/org/matrix/console/adapters/ConsoleRoomSummaryAdapter.java", "license": "apache-2.0", "size": 4530 }
[ "org.matrix.androidsdk.MXSession", "org.matrix.androidsdk.data.Room", "org.matrix.androidsdk.data.RoomSummary", "org.matrix.console.Matrix" ]
import org.matrix.androidsdk.MXSession; import org.matrix.androidsdk.data.Room; import org.matrix.androidsdk.data.RoomSummary; import org.matrix.console.Matrix;
import org.matrix.androidsdk.*; import org.matrix.androidsdk.data.*; import org.matrix.console.*;
[ "org.matrix.androidsdk", "org.matrix.console" ]
org.matrix.androidsdk; org.matrix.console;
762,344
public static Optional<SamlRegisteredServiceServiceProviderMetadataFacade> get(final SamlRegisteredServiceCachingMetadataResolver resolver, final SamlRegisteredService registeredService, final String entityID) { return get(resolver, registeredService, entityID, new CriteriaSet()); }
static Optional<SamlRegisteredServiceServiceProviderMetadataFacade> function(final SamlRegisteredServiceCachingMetadataResolver resolver, final SamlRegisteredService registeredService, final String entityID) { return get(resolver, registeredService, entityID, new CriteriaSet()); }
/** * Adapt saml metadata and parse. Acts as a facade. * * @param resolver the resolver * @param registeredService the service * @param entityID the entity id * @return the saml metadata adaptor */
Adapt saml metadata and parse. Acts as a facade
get
{ "repo_name": "robertoschwald/cas", "path": "support/cas-server-support-saml-idp-core/src/main/java/org/apereo/cas/support/saml/services/idp/metadata/SamlRegisteredServiceServiceProviderMetadataFacade.java", "license": "apache-2.0", "size": 11268 }
[ "java.util.Optional", "net.shibboleth.utilities.java.support.resolver.CriteriaSet", "org.apereo.cas.support.saml.services.SamlRegisteredService", "org.apereo.cas.support.saml.services.idp.metadata.cache.SamlRegisteredServiceCachingMetadataResolver" ]
import java.util.Optional; import net.shibboleth.utilities.java.support.resolver.CriteriaSet; import org.apereo.cas.support.saml.services.SamlRegisteredService; import org.apereo.cas.support.saml.services.idp.metadata.cache.SamlRegisteredServiceCachingMetadataResolver;
import java.util.*; import net.shibboleth.utilities.java.support.resolver.*; import org.apereo.cas.support.saml.services.*; import org.apereo.cas.support.saml.services.idp.metadata.cache.*;
[ "java.util", "net.shibboleth.utilities", "org.apereo.cas" ]
java.util; net.shibboleth.utilities; org.apereo.cas;
1,532,785