method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
@SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { java.awt.GridBagConstraints gridBagConstraints; jTagsPanel = new javax.swing.JPanel(); jTagTablePanel = new org.jdesktop.swingx.JXPanel(); infoPanel = new org.jdesktop.swingx.JXCollapsiblePane(); jXLabel1 = new org.jdesktop.swingx.JXLabel(); jScrollPane4 = new javax.swing.JScrollPane(); villageListPanel = new javax.swing.JPanel(); jScrollPane5 = new javax.swing.JScrollPane(); jVillageList = new javax.swing.JList(); jAlwaysOnTopBox = new javax.swing.JCheckBox(); jTagPanel = new org.jdesktop.swingx.JXPanel(); capabilityInfoPanel1 = new de.tor.tribes.ui.components.CapabilityInfoPanel(); jTagsPanel.setLayout(new java.awt.BorderLayout(10, 0)); jTagTablePanel.setLayout(new java.awt.BorderLayout()); infoPanel.setCollapsed(true); infoPanel.setInheritAlpha(false);
@SuppressWarnings(STR) void function() { java.awt.GridBagConstraints gridBagConstraints; jTagsPanel = new javax.swing.JPanel(); jTagTablePanel = new org.jdesktop.swingx.JXPanel(); infoPanel = new org.jdesktop.swingx.JXCollapsiblePane(); jXLabel1 = new org.jdesktop.swingx.JXLabel(); jScrollPane4 = new javax.swing.JScrollPane(); villageListPanel = new javax.swing.JPanel(); jScrollPane5 = new javax.swing.JScrollPane(); jVillageList = new javax.swing.JList(); jAlwaysOnTopBox = new javax.swing.JCheckBox(); jTagPanel = new org.jdesktop.swingx.JXPanel(); capabilityInfoPanel1 = new de.tor.tribes.ui.components.CapabilityInfoPanel(); jTagsPanel.setLayout(new java.awt.BorderLayout(10, 0)); jTagTablePanel.setLayout(new java.awt.BorderLayout()); infoPanel.setCollapsed(true); infoPanel.setInheritAlpha(false);
/** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */
This method is called from within the constructor to initialize the form. always regenerated by the Form Editor
initComponents
{ "repo_name": "Akeshihiro/dsworkbench", "path": "Core/src/main/java/de/tor/tribes/ui/views/DSWorkbenchTagFrame.java", "license": "apache-2.0", "size": 33841 }
[ "java.awt.BorderLayout" ]
import java.awt.BorderLayout;
import java.awt.*;
[ "java.awt" ]
java.awt;
426,396
public static void deleteCrash(User user, Long crashId) { Crash crash = lookupCrashByUserAndId(user, crashId); // FIXME: async deletion via taskomatic? if (crash.getStoragePath() != null) { File storageDir = new File(Config.get().getString("web.mount_point"), crash.getStoragePath()); for (CrashFile cf : crash.getCrashFiles()) { File crashFile = new File(storageDir, cf.getFilename()); if (crashFile.exists() && crashFile.isFile()) { crashFile.delete(); } } storageDir.delete(); } CrashFactory.delete(crash); }
static void function(User user, Long crashId) { Crash crash = lookupCrashByUserAndId(user, crashId); if (crash.getStoragePath() != null) { File storageDir = new File(Config.get().getString(STR), crash.getStoragePath()); for (CrashFile cf : crash.getCrashFiles()) { File crashFile = new File(storageDir, cf.getFilename()); if (crashFile.exists() && crashFile.isFile()) { crashFile.delete(); } } storageDir.delete(); } CrashFactory.delete(crash); }
/** * Delete a crash from database and filer. * @param user User to check the permissions for. * @param crashId The id of the crash to delete. */
Delete a crash from database and filer
deleteCrash
{ "repo_name": "xkollar/spacewalk", "path": "java/code/src/com/redhat/rhn/manager/system/CrashManager.java", "license": "gpl-2.0", "size": 4807 }
[ "com.redhat.rhn.common.conf.Config", "com.redhat.rhn.domain.server.Crash", "com.redhat.rhn.domain.server.CrashFactory", "com.redhat.rhn.domain.server.CrashFile", "com.redhat.rhn.domain.user.User", "java.io.File" ]
import com.redhat.rhn.common.conf.Config; import com.redhat.rhn.domain.server.Crash; import com.redhat.rhn.domain.server.CrashFactory; import com.redhat.rhn.domain.server.CrashFile; import com.redhat.rhn.domain.user.User; import java.io.File;
import com.redhat.rhn.common.conf.*; import com.redhat.rhn.domain.server.*; import com.redhat.rhn.domain.user.*; import java.io.*;
[ "com.redhat.rhn", "java.io" ]
com.redhat.rhn; java.io;
1,327,470
ServiceCall<Void> beginDeleteAsyncRelativeRetryNoStatusAsync(final ServiceCallback<Void> serviceCallback) throws IllegalArgumentException;
ServiceCall<Void> beginDeleteAsyncRelativeRetryNoStatusAsync(final ServiceCallback<Void> serviceCallback) throws IllegalArgumentException;
/** * Long running delete request, service returns a 202 to the initial request. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if callback is null * @return the {@link ServiceCall} object */
Long running delete request, service returns a 202 to the initial request. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status
beginDeleteAsyncRelativeRetryNoStatusAsync
{ "repo_name": "yaqiyang/autorest", "path": "src/generator/AutoRest.Java.Azure.Tests/src/main/java/fixtures/lro/LROSADs.java", "license": "mit", "size": 104951 }
[ "com.microsoft.rest.ServiceCall", "com.microsoft.rest.ServiceCallback" ]
import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.*;
[ "com.microsoft.rest" ]
com.microsoft.rest;
2,216,154
@Test public void testSelect() { assertAndLog("$('div').select();", jsStatement.chain(EventsHelper.select()).render()); } /** * Test method for * {@link org.odlabs.wiquery.core.javascript.helper.EventsHelper#select(org.odlabs.wiquery.core.javascript.JsScope)}
void function() { assertAndLog(STR, jsStatement.chain(EventsHelper.select()).render()); } /** * Test method for * {@link org.odlabs.wiquery.core.javascript.helper.EventsHelper#select(org.odlabs.wiquery.core.javascript.JsScope)}
/** * Test method for * {@link org.odlabs.wiquery.core.javascript.helper.EventsHelper#select()}. */
Test method for <code>org.odlabs.wiquery.core.javascript.helper.EventsHelper#select()</code>
testSelect
{ "repo_name": "openengsb-attic/forks-org.odlabs.wiquery", "path": "src/test/java/org/odlabs/wiquery/core/javascript/helper/EventsHelperTestCase.java", "license": "mit", "size": 18755 }
[ "org.junit.Test", "org.odlabs.wiquery.core.javascript.JsScope" ]
import org.junit.Test; import org.odlabs.wiquery.core.javascript.JsScope;
import org.junit.*; import org.odlabs.wiquery.core.javascript.*;
[ "org.junit", "org.odlabs.wiquery" ]
org.junit; org.odlabs.wiquery;
447,215
@Message(id = Message.NONE, value = "Added user '%s' with groups %s to file '%s'") String addedGroups(String username, String groups, String fileName);
@Message(id = Message.NONE, value = STR) String addedGroups(String username, String groups, String fileName);
/** * Message to inform user that the new user has been added to the groups file identified. * * @param username - The new username. * @param groups - The new groups. * @param fileName - The file the user has been added to. * * @return a {@link String} for the message. */
Message to inform user that the new user has been added to the groups file identified
addedGroups
{ "repo_name": "luck3y/wildfly-core", "path": "domain-management/src/main/java/org/jboss/as/domain/management/logging/DomainManagementLogger.java", "license": "lgpl-2.1", "size": 66410 }
[ "org.jboss.logging.annotations.Message" ]
import org.jboss.logging.annotations.Message;
import org.jboss.logging.annotations.*;
[ "org.jboss.logging" ]
org.jboss.logging;
707,536
public Observable<ServiceResponse<Void>> putEmptyWithServiceResponseAsync(DictionaryWrapper complexBody) { if (complexBody == null) { throw new IllegalArgumentException("Parameter complexBody is required and cannot be null."); }
Observable<ServiceResponse<Void>> function(DictionaryWrapper complexBody) { if (complexBody == null) { throw new IllegalArgumentException(STR); }
/** * Put complex types with dictionary property which is empty. * * @param complexBody Please put an empty dictionary * @return the {@link ServiceResponse} object if successful. */
Put complex types with dictionary property which is empty
putEmptyWithServiceResponseAsync
{ "repo_name": "matthchr/autorest", "path": "src/generator/AutoRest.Java.Tests/src/main/java/fixtures/bodycomplex/implementation/DictionarysImpl.java", "license": "mit", "size": 20041 }
[ "com.microsoft.rest.ServiceResponse" ]
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.*;
[ "com.microsoft.rest" ]
com.microsoft.rest;
224,981
protected void enterProductions(Token node) throws ParseException { }
void function(Token node) throws ParseException { }
/** * Called when entering a parse tree node. * * @param node the node being entered * * @throws ParseException if the node analysis discovered errors */
Called when entering a parse tree node
enterProductions
{ "repo_name": "runner-mei/mibble", "path": "src/main/java/net/percederberg/grammatica/GrammarAnalyzer.java", "license": "gpl-2.0", "size": 36326 }
[ "net.percederberg.grammatica.parser.ParseException", "net.percederberg.grammatica.parser.Token" ]
import net.percederberg.grammatica.parser.ParseException; import net.percederberg.grammatica.parser.Token;
import net.percederberg.grammatica.parser.*;
[ "net.percederberg.grammatica" ]
net.percederberg.grammatica;
2,618,256
public void deserialize(ByteBuf buf) { // User settings setActive(buf.readBoolean()); setInductorEngaged(buf.readBoolean(), false); setVentStatus(s_VentStatuses[buf.readInt()], false); setMaxIntakeRate(buf.readInt()); // Basic data setEnergyStored(buf.readFloat()); setRotorEnergy(buf.readFloat()); // Reportage energyGeneratedLastTick = buf.readFloat(); fluidConsumedLastTick = buf.readInt(); rotorEfficiencyLastTick = buf.readFloat(); // Fluid data int inputFluidID = buf.readInt(); int inputFluidAmt = buf.readInt(); int outputFluidID = buf.readInt(); int outputFluidAmt = buf.readInt(); if(inputFluidID == FLUID_NONE || inputFluidAmt <= 0) { tanks[TANK_INPUT].setFluid(null); } else { Fluid fluid = FluidRegistry.getFluid(inputFluidID); if(fluid == null) { BRLog.warning("[CLIENT] Multiblock Turbine received an unknown fluid of type %d, setting input tank to empty", inputFluidID); tanks[TANK_INPUT].setFluid(null); } else { tanks[TANK_INPUT].setFluid(new FluidStack(fluid, inputFluidAmt)); } } if(outputFluidID == FLUID_NONE || outputFluidAmt <= 0) { tanks[TANK_OUTPUT].setFluid(null); } else { Fluid fluid = FluidRegistry.getFluid(outputFluidID); if(fluid == null) { BRLog.warning("[CLIENT] Multiblock Turbine received an unknown fluid of type %d, setting output tank to empty", outputFluidID); tanks[TANK_OUTPUT].setFluid(null); } else { tanks[TANK_OUTPUT].setFluid(new FluidStack(fluid, outputFluidAmt)); } } }
void function(ByteBuf buf) { setActive(buf.readBoolean()); setInductorEngaged(buf.readBoolean(), false); setVentStatus(s_VentStatuses[buf.readInt()], false); setMaxIntakeRate(buf.readInt()); setEnergyStored(buf.readFloat()); setRotorEnergy(buf.readFloat()); energyGeneratedLastTick = buf.readFloat(); fluidConsumedLastTick = buf.readInt(); rotorEfficiencyLastTick = buf.readFloat(); int inputFluidID = buf.readInt(); int inputFluidAmt = buf.readInt(); int outputFluidID = buf.readInt(); int outputFluidAmt = buf.readInt(); if(inputFluidID == FLUID_NONE inputFluidAmt <= 0) { tanks[TANK_INPUT].setFluid(null); } else { Fluid fluid = FluidRegistry.getFluid(inputFluidID); if(fluid == null) { BRLog.warning(STR, inputFluidID); tanks[TANK_INPUT].setFluid(null); } else { tanks[TANK_INPUT].setFluid(new FluidStack(fluid, inputFluidAmt)); } } if(outputFluidID == FLUID_NONE outputFluidAmt <= 0) { tanks[TANK_OUTPUT].setFluid(null); } else { Fluid fluid = FluidRegistry.getFluid(outputFluidID); if(fluid == null) { BRLog.warning(STR, outputFluidID); tanks[TANK_OUTPUT].setFluid(null); } else { tanks[TANK_OUTPUT].setFluid(new FluidStack(fluid, outputFluidAmt)); } } }
/** * Used when a status packet arrives on the client. * @param buf ByteBuf containing serialized turbine data */
Used when a status packet arrives on the client
deserialize
{ "repo_name": "erogenousbeef/BigReactors", "path": "src/main/java/erogenousbeef/bigreactors/common/multiblock/MultiblockTurbine.java", "license": "mit", "size": 40143 }
[ "io.netty.buffer.ByteBuf", "net.minecraftforge.fluids.Fluid", "net.minecraftforge.fluids.FluidRegistry", "net.minecraftforge.fluids.FluidStack" ]
import io.netty.buffer.ByteBuf; import net.minecraftforge.fluids.Fluid; import net.minecraftforge.fluids.FluidRegistry; import net.minecraftforge.fluids.FluidStack;
import io.netty.buffer.*; import net.minecraftforge.fluids.*;
[ "io.netty.buffer", "net.minecraftforge.fluids" ]
io.netty.buffer; net.minecraftforge.fluids;
2,417,972
public void testupdateByRearrangeNodes() throws Exception { String testName = "updateByRearrangeNodes"; if (logger.isInfoEnabled()) { logger.info("\n\t\tRunning Test: " + testName); } // 1. find a tree: PhyloTree tree = (PhyloTree) loadObject(PhyloTree.class); //PhyloTree tree = (PhyloTree) loadObject(PhyloTree.class, 41L); String newick = tree.getNewickString(); Long id = tree.getId(); logger.info("tree found: id= " + id + "\n" + newick); // 2. test use the same newick: getFixture().updateByRearrangeNodes(id, newick); // 3. verify tree = (PhyloTree) loadObject(PhyloTree.class, id); String newick2 = tree.getNewickString(); logger.info("new newick:\n" + newick2); assertTrue(newick.equalsIgnoreCase(newick2)); if (logger.isInfoEnabled()) { logger.info(testName + " - end "); //$NON-NLS-1$ } }
void function() throws Exception { String testName = STR; if (logger.isInfoEnabled()) { logger.info(STR + testName); } PhyloTree tree = (PhyloTree) loadObject(PhyloTree.class); String newick = tree.getNewickString(); Long id = tree.getId(); logger.info(STR + id + "\n" + newick); getFixture().updateByRearrangeNodes(id, newick); tree = (PhyloTree) loadObject(PhyloTree.class, id); String newick2 = tree.getNewickString(); logger.info(STR + newick2); assertTrue(newick.equalsIgnoreCase(newick2)); if (logger.isInfoEnabled()) { logger.info(testName + STR); } }
/** * Test updateByRearrangeNodes */
Test updateByRearrangeNodes
testupdateByRearrangeNodes
{ "repo_name": "TreeBASE/treebasetest", "path": "treebase-core/src/test/java/org/cipres/treebase/service/tree/PhyloTreeServiceImplTest.java", "license": "bsd-3-clause", "size": 4998 }
[ "org.cipres.treebase.domain.tree.PhyloTree" ]
import org.cipres.treebase.domain.tree.PhyloTree;
import org.cipres.treebase.domain.tree.*;
[ "org.cipres.treebase" ]
org.cipres.treebase;
1,362,040
public List<String> getCallResults(String issueId, String contactId) { contactId = sanitizeContactId(contactId); Cursor c = getReadableDatabase().rawQuery("SELECT " + CallsColumns.RESULT + " FROM " + CALLS_TABLE_NAME + " WHERE " + CallsColumns.ISSUE_ID + " = ? AND " + CallsColumns.CONTACT_ID + " = ? GROUP BY " + CallsColumns.RESULT, new String[] {issueId, contactId}); List<String> result = new ArrayList<>(); while (c.moveToNext()) { result.add(c.getString(0)); } c.close(); return result; }
List<String> function(String issueId, String contactId) { contactId = sanitizeContactId(contactId); Cursor c = getReadableDatabase().rawQuery(STR + CallsColumns.RESULT + STR + CALLS_TABLE_NAME + STR + CallsColumns.ISSUE_ID + STR + CallsColumns.CONTACT_ID + STR + CallsColumns.RESULT, new String[] {issueId, contactId}); List<String> result = new ArrayList<>(); while (c.moveToNext()) { result.add(c.getString(0)); } c.close(); return result; }
/** * The types of calls made for a particular issue and contact. * @param issueId * @param contactId * @return A list of the call results for this issue and contact. */
The types of calls made for a particular issue and contact
getCallResults
{ "repo_name": "5calls/android", "path": "5calls/app/src/main/java/org/a5calls/android/a5calls/model/DatabaseHelper.java", "license": "mit", "size": 13134 }
[ "android.database.Cursor", "java.util.ArrayList", "java.util.List" ]
import android.database.Cursor; import java.util.ArrayList; import java.util.List;
import android.database.*; import java.util.*;
[ "android.database", "java.util" ]
android.database; java.util;
1,427,049
private void processForEnd() { byte eventProc = slaCalc.getEventProcessed(); LOG.debug("Job {0} has ended. endtime = [{1}]", slaCalc.getId(), slaCalc.getActualEnd()); if (isEndMiss()) { slaCalc.setSLAStatus(SLAStatus.MISS); } else { slaCalc.setSLAStatus(SLAStatus.MET); } if (eventProc != 8 && slaCalc.getActualStart() != null) { if ((eventProc & 1) == 0) { if (slaCalc.getExpectedStart() != null) { if (slaCalc.getExpectedStart().getTime() < slaCalc.getActualStart().getTime()) { slaCalc.setEventStatus(EventStatus.START_MISS); } else { slaCalc.setEventStatus(EventStatus.START_MET); } if (shouldAlert(slaCalc)) { queueEvent(new SLACalcStatus(slaCalc)); } } } slaCalc.setActualDuration(slaCalc.getActualEnd().getTime() - slaCalc.getActualStart().getTime()); if (((eventProc >> 1) & 1) == 0) { processDurationSLA(slaCalc.getExpectedDuration(), slaCalc.getActualDuration(), slaCalc); } } if (eventProc != 8 && eventProc < 4) { if (isEndMiss()) { slaCalc.setEventStatus(EventStatus.END_MISS); } else { slaCalc.setEventStatus(EventStatus.END_MET); } if (shouldAlert(slaCalc)) { queueEvent(new SLACalcStatus(slaCalc)); } } slaCalc.setEventProcessed(8); }
void function() { byte eventProc = slaCalc.getEventProcessed(); LOG.debug(STR, slaCalc.getId(), slaCalc.getActualEnd()); if (isEndMiss()) { slaCalc.setSLAStatus(SLAStatus.MISS); } else { slaCalc.setSLAStatus(SLAStatus.MET); } if (eventProc != 8 && slaCalc.getActualStart() != null) { if ((eventProc & 1) == 0) { if (slaCalc.getExpectedStart() != null) { if (slaCalc.getExpectedStart().getTime() < slaCalc.getActualStart().getTime()) { slaCalc.setEventStatus(EventStatus.START_MISS); } else { slaCalc.setEventStatus(EventStatus.START_MET); } if (shouldAlert(slaCalc)) { queueEvent(new SLACalcStatus(slaCalc)); } } } slaCalc.setActualDuration(slaCalc.getActualEnd().getTime() - slaCalc.getActualStart().getTime()); if (((eventProc >> 1) & 1) == 0) { processDurationSLA(slaCalc.getExpectedDuration(), slaCalc.getActualDuration(), slaCalc); } } if (eventProc != 8 && eventProc < 4) { if (isEndMiss()) { slaCalc.setEventStatus(EventStatus.END_MISS); } else { slaCalc.setEventStatus(EventStatus.END_MET); } if (shouldAlert(slaCalc)) { queueEvent(new SLACalcStatus(slaCalc)); } } slaCalc.setEventProcessed(8); }
/** * Process for end. */
Process for end
processForEnd
{ "repo_name": "cbaenziger/oozie", "path": "core/src/main/java/org/apache/oozie/command/sla/SLAJobEventXCommand.java", "license": "apache-2.0", "size": 10161 }
[ "org.apache.oozie.client.event.SLAEvent", "org.apache.oozie.sla.SLACalcStatus" ]
import org.apache.oozie.client.event.SLAEvent; import org.apache.oozie.sla.SLACalcStatus;
import org.apache.oozie.client.event.*; import org.apache.oozie.sla.*;
[ "org.apache.oozie" ]
org.apache.oozie;
442,603
private int processReceipt(SDataCfd cfd, double dSubsidy, double dTax, final int nAction) { boolean annuled = false; try { double dXmlSubsidy = this.getSubsidyFromXml(cfd.getDocXml()); double dXmlTax = this.getTaxFromXml(cfd.getDocXml()); int iValid = this.validateSubsidyAndTax(dSubsidy, dTax, dXmlSubsidy, dXmlTax); if (iValid == ERROR) { return ERROR; } int receiptKey [] = new int[] { cfd.getFkPayrollReceiptPayrollId_n(), cfd.getFkPayrollReceiptEmployeeId_n(), cfd.getFkPayrollReceiptIssueId_n() }; SDbPayrollReceipt payrollReceipt = new SDbPayrollReceipt(); payrollReceipt.read(miClient.getSession(), receiptKey); SDbPayrollReceiptIssue issue = payrollReceipt.getChildPayrollReceiptIssue(); int number = SHrsUtils.getPayrollReceiptNextNumber(miClient.getSession(), issue.getNumberSeries()); SDbPayrollReceiptIssue issuen = issue.clone(); writeXml(cfd.getUuid(), cfd.getDocXml()); System.out.println(cfd.getUuid()); //anular if (! this.annulCfd(cfd, issue)) { System.out.println("ERROR, NO ANULADO"); } else { System.out.println("ANULADO"); } if (nAction == CANCEL_RECEIPTS) { return ANNULED; } annuled = true; switch (iValid) { case CHANGE_CAUSED_SUBSIDY: break; case ADD_OTHER_SUBSIDY: dXmlSubsidy = 0.01d; issuen.setPayment_r(issuen.getPayment_r() + dXmlSubsidy); issuen.setUuidRelated(cfd.getUuid()); issuen.setDateOfIssue(new Date()); break; } //rIssue issuen.setPkIssueId(0); issuen.setRegistryNew(true); issuen.setNumber(number); issuen.setFkReceiptStatusId(SDataConstantsSys.TRNS_ST_DPS_EMITED); issuen.save(miClient.getSession()); receiptKey[2] = issuen.getPkIssueId(); //timbrar y enviar this.computeSignCfdi(miClient.getSession(), receiptKey, dSubsidy, dXmlSubsidy, cfd.getUuid()); return SUCCESS; } catch (Exception ex) { Logger.getLogger(SReceiptsR.class.getName()).log(Level.SEVERE, null, ex); if (annuled) { return ANNULED; } return ERROR; } }
int function(SDataCfd cfd, double dSubsidy, double dTax, final int nAction) { boolean annuled = false; try { double dXmlSubsidy = this.getSubsidyFromXml(cfd.getDocXml()); double dXmlTax = this.getTaxFromXml(cfd.getDocXml()); int iValid = this.validateSubsidyAndTax(dSubsidy, dTax, dXmlSubsidy, dXmlTax); if (iValid == ERROR) { return ERROR; } int receiptKey [] = new int[] { cfd.getFkPayrollReceiptPayrollId_n(), cfd.getFkPayrollReceiptEmployeeId_n(), cfd.getFkPayrollReceiptIssueId_n() }; SDbPayrollReceipt payrollReceipt = new SDbPayrollReceipt(); payrollReceipt.read(miClient.getSession(), receiptKey); SDbPayrollReceiptIssue issue = payrollReceipt.getChildPayrollReceiptIssue(); int number = SHrsUtils.getPayrollReceiptNextNumber(miClient.getSession(), issue.getNumberSeries()); SDbPayrollReceiptIssue issuen = issue.clone(); writeXml(cfd.getUuid(), cfd.getDocXml()); System.out.println(cfd.getUuid()); if (! this.annulCfd(cfd, issue)) { System.out.println(STR); } else { System.out.println(STR); } if (nAction == CANCEL_RECEIPTS) { return ANNULED; } annuled = true; switch (iValid) { case CHANGE_CAUSED_SUBSIDY: break; case ADD_OTHER_SUBSIDY: dXmlSubsidy = 0.01d; issuen.setPayment_r(issuen.getPayment_r() + dXmlSubsidy); issuen.setUuidRelated(cfd.getUuid()); issuen.setDateOfIssue(new Date()); break; } issuen.setPkIssueId(0); issuen.setRegistryNew(true); issuen.setNumber(number); issuen.setFkReceiptStatusId(SDataConstantsSys.TRNS_ST_DPS_EMITED); issuen.save(miClient.getSession()); receiptKey[2] = issuen.getPkIssueId(); this.computeSignCfdi(miClient.getSession(), receiptKey, dSubsidy, dXmlSubsidy, cfd.getUuid()); return SUCCESS; } catch (Exception ex) { Logger.getLogger(SReceiptsR.class.getName()).log(Level.SEVERE, null, ex); if (annuled) { return ANNULED; } return ERROR; } }
/** * annul and issue the cfd * * @param cfd * @param dSubsidy in file * @param dTax in file * * @return integer: SUCCESS if the cfd was annuled and issued ANNULED if the process only was annuled but not issued ERROR */
annul and issue the cfd
processReceipt
{ "repo_name": "swaplicado/siie32", "path": "src/erp/mod/hrs/utils/SReceiptsR.java", "license": "mit", "size": 29177 }
[ "java.util.Date", "java.util.logging.Level", "java.util.logging.Logger" ]
import java.util.Date; import java.util.logging.Level; import java.util.logging.Logger;
import java.util.*; import java.util.logging.*;
[ "java.util" ]
java.util;
1,444,770
public synchronized DefaultExtractorsFactory setTsExtractorMode(@TsExtractor.Mode int mode) { tsMode = mode; return this; }
synchronized DefaultExtractorsFactory function(@TsExtractor.Mode int mode) { tsMode = mode; return this; }
/** * Sets the mode for {@link TsExtractor} instances created by the factory. * * @see TsExtractor#TsExtractor(int, TimestampAdjuster, TsPayloadReader.Factory) * @param mode The mode to use. * @return The factory, for convenience. */
Sets the mode for <code>TsExtractor</code> instances created by the factory
setTsExtractorMode
{ "repo_name": "DrKLO/Telegram", "path": "TMessagesProj/src/main/java/com/google/android/exoplayer2/extractor/DefaultExtractorsFactory.java", "license": "gpl-2.0", "size": 10081 }
[ "com.google.android.exoplayer2.extractor.ts.TsExtractor" ]
import com.google.android.exoplayer2.extractor.ts.TsExtractor;
import com.google.android.exoplayer2.extractor.ts.*;
[ "com.google.android" ]
com.google.android;
2,689,105
public boolean save(@NonNull final OptionalUpdate optionalUpdate) { if (StringUtils.isBlank(optionalUpdate.getOptionalVersion())) { return false; } return prefs.edit().putString(KEY_OPTIONAL_UPDATE, optionalUpdate.getOptionalVersion()).commit(); }
boolean function(@NonNull final OptionalUpdate optionalUpdate) { if (StringUtils.isBlank(optionalUpdate.getOptionalVersion())) { return false; } return prefs.edit().putString(KEY_OPTIONAL_UPDATE, optionalUpdate.getOptionalVersion()).commit(); }
/** * Saves the provided {@link OptionalUpdate} to shared preferences. * * @param optionalUpdate the provided current optional update information * @return {@code true} if {@code optionalUpdate} was successfully saved */
Saves the provided <code>OptionalUpdate</code> to shared preferences
save
{ "repo_name": "btkelly/gandalf", "path": "gandalf/src/main/java/io/github/btkelly/gandalf/checker/DefaultHistoryChecker.java", "license": "apache-2.0", "size": 3582 }
[ "androidx.annotation.NonNull", "io.github.btkelly.gandalf.models.OptionalUpdate", "io.github.btkelly.gandalf.utils.StringUtils" ]
import androidx.annotation.NonNull; import io.github.btkelly.gandalf.models.OptionalUpdate; import io.github.btkelly.gandalf.utils.StringUtils;
import androidx.annotation.*; import io.github.btkelly.gandalf.models.*; import io.github.btkelly.gandalf.utils.*;
[ "androidx.annotation", "io.github.btkelly" ]
androidx.annotation; io.github.btkelly;
1,557,349
public static Collection<ClusterNode> aliveRemoteServerNodesWithCaches(final GridCacheSharedContext ctx, AffinityTopologyVersion topOrder) { return ctx.discovery().aliveRemoteServerNodesWithCaches(topOrder); }
static Collection<ClusterNode> function(final GridCacheSharedContext ctx, AffinityTopologyVersion topOrder) { return ctx.discovery().aliveRemoteServerNodesWithCaches(topOrder); }
/** * Gets alive remote nodes with at least one cache configured. * * @param ctx Cache context. * @param topOrder Maximum allowed node order. * @return Affinity nodes. */
Gets alive remote nodes with at least one cache configured
aliveRemoteServerNodesWithCaches
{ "repo_name": "DoudTechData/ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/GridCacheUtils.java", "license": "apache-2.0", "size": 59639 }
[ "java.util.Collection", "org.apache.ignite.cluster.ClusterNode", "org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion" ]
import java.util.Collection; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import java.util.*; import org.apache.ignite.cluster.*; import org.apache.ignite.internal.processors.affinity.*;
[ "java.util", "org.apache.ignite" ]
java.util; org.apache.ignite;
50,061
private void addDummiesAndEdges(ICrossMinimizerGraph graph) { int dummyCtr = 0; Set<ISugiyamaEdge> edges = graph.getEdgeSet(); Set<ISugiyamaEdge> replacedEdges = new HashSet<>(); for(ISugiyamaEdge edge : edges){ ISugiyamaVertex source = edge.getSource(); ISugiyamaVertex target = edge.getTarget(); if (Objects.equals(source.getID(), target.getID())) { continue; } int lowerLayer = source.getLayer(); int upperLayer = target.getLayer(); int diff = upperLayer - lowerLayer; assert(diff >= 0); //diff must not be lower than one, both vertices must not be on the same layer! assert(graph.getLayer(lowerLayer).contains(source)); assert(graph.getLayer(upperLayer).contains(target)); if(diff>1){ //need to add #diff-1 dummy vertices List<ISugiyamaVertex> dummies = new LinkedList<>(); List<ISugiyamaEdge> supplementEdges = new LinkedList<>(); replacedEdges.add(edge); // the distance of both vertices of this edge is greater than 1 so it must be replaced ISugiyamaVertex nv = null; // through dummy vertices and supplement edges. add it here to remove it later from the original edge set. ISugiyamaEdge ne; int c = 0; for(int l = lowerLayer + 1; l <= upperLayer; l++){ c++; ISugiyamaVertex dummy = null; if(l==lowerLayer+1){ nv = graph.createDummy("d"+c+ '(' +source.getName()+"->"+target.getName()+ ')', "", lowerLayer + 1); //first dummy vertex created dummyCtr++; dummy = nv; ne = graph.createSupplementEdge(edge.getName()+ '(' +c+ ')', "", source, nv); //first dummy edge created supplementEdges.add(ne); graph.assignToLayer(nv, l); }else if(l==upperLayer){ ne = graph.createSupplementEdge(edge.getName() + "(e" + c + ')', "", nv, target); supplementEdges.add(ne); }else{ ISugiyamaVertex temp = nv; //temporary ISugiyamaVertex so that the new created vertex is always the one with the variable nv nv = graph.createDummy("d"+c+ '(' +source.getName()+"->"+target.getName()+ ')', "", c); dummyCtr++; dummy = nv; ne = graph.createSupplementEdge(edge.getName()+ '(' +c+ ')', "", temp, nv); supplementEdges.add(ne); graph.assignToLayer(nv, l); } if (dummy != null) { dummies.add(dummy); } } graph.createSupplementPath(edge, dummies, supplementEdges); } } logger.info("created " + dummyCtr + " dummy vertices"); edges.removeAll(replacedEdges); //remove all replaced edges from the original edge set }
void function(ICrossMinimizerGraph graph) { int dummyCtr = 0; Set<ISugiyamaEdge> edges = graph.getEdgeSet(); Set<ISugiyamaEdge> replacedEdges = new HashSet<>(); for(ISugiyamaEdge edge : edges){ ISugiyamaVertex source = edge.getSource(); ISugiyamaVertex target = edge.getTarget(); if (Objects.equals(source.getID(), target.getID())) { continue; } int lowerLayer = source.getLayer(); int upperLayer = target.getLayer(); int diff = upperLayer - lowerLayer; assert(diff >= 0); assert(graph.getLayer(lowerLayer).contains(source)); assert(graph.getLayer(upperLayer).contains(target)); if(diff>1){ List<ISugiyamaVertex> dummies = new LinkedList<>(); List<ISugiyamaEdge> supplementEdges = new LinkedList<>(); replacedEdges.add(edge); ISugiyamaVertex nv = null; ISugiyamaEdge ne; int c = 0; for(int l = lowerLayer + 1; l <= upperLayer; l++){ c++; ISugiyamaVertex dummy = null; if(l==lowerLayer+1){ nv = graph.createDummy("dSTR->"+target.getName()+ ')', STRSTR(eSTRSTRdSTR->STRSTRSTRcreated STR dummy vertices"); edges.removeAll(replacedEdges); }
/** * This method adds dummy vertices between the two vertices of an edge, on every layer that this edge skips. * the dummy vertices are connected through supplement edges with each other and the source and target vertex of the edge. * * @param graph input graph to add dummy vertices and supplement edges to */
This method adds dummy vertices between the two vertices of an edge, on every layer that this edge skips. the dummy vertices are connected through supplement edges with each other and the source and target vertex of the edge
addDummiesAndEdges
{ "repo_name": "HiWiGAns/GraphVonAnsicht", "path": "plugins/sugiyama/src/main/java/edu/kit/student/sugiyama/steps/CrossMinimizer.java", "license": "lgpl-3.0", "size": 14378 }
[ "edu.kit.student.sugiyama.graph.ICrossMinimizerGraph", "edu.kit.student.sugiyama.graph.ISugiyamaEdge", "edu.kit.student.sugiyama.graph.ISugiyamaVertex", "java.util.HashSet", "java.util.LinkedList", "java.util.List", "java.util.Objects", "java.util.Set" ]
import edu.kit.student.sugiyama.graph.ICrossMinimizerGraph; import edu.kit.student.sugiyama.graph.ISugiyamaEdge; import edu.kit.student.sugiyama.graph.ISugiyamaVertex; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Objects; import java.util.Set;
import edu.kit.student.sugiyama.graph.*; import java.util.*;
[ "edu.kit.student", "java.util" ]
edu.kit.student; java.util;
1,996,721
public static nonBroadcastIndFlagType fromPerAligned(byte[] encodedBytes) { nonBroadcastIndFlagType result = new nonBroadcastIndFlagType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; }
static nonBroadcastIndFlagType function(byte[] encodedBytes) { nonBroadcastIndFlagType result = new nonBroadcastIndFlagType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; }
/** * Creates a new nonBroadcastIndFlagType from encoded stream. */
Creates a new nonBroadcastIndFlagType from encoded stream
fromPerAligned
{ "repo_name": "google/supl-client", "path": "src/main/java/com/google/location/suplclient/asn1/supl2/rrlp_components_ver12/GANSSNavModel.java", "license": "apache-2.0", "size": 10343 }
[ "com.google.location.suplclient.asn1.base.BitStreamReader" ]
import com.google.location.suplclient.asn1.base.BitStreamReader;
import com.google.location.suplclient.asn1.base.*;
[ "com.google.location" ]
com.google.location;
2,040,754
public void enableEvents() { m_eventsEnabled = true; } private Object[] m_array = new Object[AssignmentField.MAX_VALUE]; private boolean m_eventsEnabled = true; private TimephasedWorkData m_timephasedWork; private List<TimephasedCost> m_timephasedCost; private TimephasedWorkData m_timephasedActualWork; private List<TimephasedCost> m_timephasedActualCost; private TimephasedWorkData m_timephasedOvertimeWork; private TimephasedWorkData m_timephasedActualOvertimeWork; private List<FieldListener> m_listeners; private TimephasedWorkData[] m_timephasedBaselineWork = new TimephasedWorkData[11]; private TimephasedCostData[] m_timephasedBaselineCost = new TimephasedCostData[11]; private Task m_task; private ResourceAssignmentWorkgroupFields m_workgroup; public static final Double DEFAULT_UNITS = Double.valueOf(100); private static final AssignmentField[] BASELINE_COSTS = { AssignmentField.BASELINE1_COST, AssignmentField.BASELINE2_COST, AssignmentField.BASELINE3_COST, AssignmentField.BASELINE4_COST, AssignmentField.BASELINE5_COST, AssignmentField.BASELINE6_COST, AssignmentField.BASELINE7_COST, AssignmentField.BASELINE8_COST, AssignmentField.BASELINE9_COST, AssignmentField.BASELINE10_COST }; private static final AssignmentField[] BASELINE_WORKS = { AssignmentField.BASELINE1_WORK, AssignmentField.BASELINE2_WORK, AssignmentField.BASELINE3_WORK, AssignmentField.BASELINE4_WORK, AssignmentField.BASELINE5_WORK, AssignmentField.BASELINE6_WORK, AssignmentField.BASELINE7_WORK, AssignmentField.BASELINE8_WORK, AssignmentField.BASELINE9_WORK, AssignmentField.BASELINE10_WORK }; private static final AssignmentField[] BASELINE_STARTS = { AssignmentField.BASELINE1_START, AssignmentField.BASELINE2_START, AssignmentField.BASELINE3_START, AssignmentField.BASELINE4_START, AssignmentField.BASELINE5_START, AssignmentField.BASELINE6_START, AssignmentField.BASELINE7_START, AssignmentField.BASELINE8_START, AssignmentField.BASELINE9_START, AssignmentField.BASELINE10_START }; private static final AssignmentField[] BASELINE_FINISHES = { AssignmentField.BASELINE1_FINISH, AssignmentField.BASELINE2_FINISH, AssignmentField.BASELINE3_FINISH, AssignmentField.BASELINE4_FINISH, AssignmentField.BASELINE5_FINISH, AssignmentField.BASELINE6_FINISH, AssignmentField.BASELINE7_FINISH, AssignmentField.BASELINE8_FINISH, AssignmentField.BASELINE9_FINISH, AssignmentField.BASELINE10_FINISH }; private static final AssignmentField[] BASELINE_BUDGET_COSTS = { AssignmentField.BASELINE1_BUDGET_COST, AssignmentField.BASELINE2_BUDGET_COST, AssignmentField.BASELINE3_BUDGET_COST, AssignmentField.BASELINE4_BUDGET_COST, AssignmentField.BASELINE5_BUDGET_COST, AssignmentField.BASELINE6_BUDGET_COST, AssignmentField.BASELINE7_BUDGET_COST, AssignmentField.BASELINE8_BUDGET_COST, AssignmentField.BASELINE9_BUDGET_COST, AssignmentField.BASELINE10_BUDGET_COST }; private static final AssignmentField[] BASELINE_BUDGET_WORKS = { AssignmentField.BASELINE1_BUDGET_WORK, AssignmentField.BASELINE2_BUDGET_WORK, AssignmentField.BASELINE3_BUDGET_WORK, AssignmentField.BASELINE4_BUDGET_WORK, AssignmentField.BASELINE5_BUDGET_WORK, AssignmentField.BASELINE6_BUDGET_WORK, AssignmentField.BASELINE7_BUDGET_WORK, AssignmentField.BASELINE8_BUDGET_WORK, AssignmentField.BASELINE9_BUDGET_WORK, AssignmentField.BASELINE10_BUDGET_WORK }; private static final AssignmentField[] CUSTOM_TEXT = { AssignmentField.TEXT1, AssignmentField.TEXT2, AssignmentField.TEXT3, AssignmentField.TEXT4, AssignmentField.TEXT5, AssignmentField.TEXT6, AssignmentField.TEXT7, AssignmentField.TEXT8, AssignmentField.TEXT9, AssignmentField.TEXT10, AssignmentField.TEXT11, AssignmentField.TEXT12, AssignmentField.TEXT13, AssignmentField.TEXT14, AssignmentField.TEXT15, AssignmentField.TEXT16, AssignmentField.TEXT17, AssignmentField.TEXT18, AssignmentField.TEXT19, AssignmentField.TEXT20, AssignmentField.TEXT21, AssignmentField.TEXT22, AssignmentField.TEXT23, AssignmentField.TEXT24, AssignmentField.TEXT25, AssignmentField.TEXT26, AssignmentField.TEXT27, AssignmentField.TEXT28, AssignmentField.TEXT29, AssignmentField.TEXT30 }; private static final AssignmentField[] CUSTOM_START = { AssignmentField.START1, AssignmentField.START2, AssignmentField.START3, AssignmentField.START4, AssignmentField.START5, AssignmentField.START6, AssignmentField.START7, AssignmentField.START8, AssignmentField.START9, AssignmentField.START10 }; private static final AssignmentField[] CUSTOM_FINISH = { AssignmentField.FINISH1, AssignmentField.FINISH2, AssignmentField.FINISH3, AssignmentField.FINISH4, AssignmentField.FINISH5, AssignmentField.FINISH6, AssignmentField.FINISH7, AssignmentField.FINISH8, AssignmentField.FINISH9, AssignmentField.FINISH10 }; private static final AssignmentField[] CUSTOM_DATE = { AssignmentField.DATE1, AssignmentField.DATE2, AssignmentField.DATE3, AssignmentField.DATE4, AssignmentField.DATE5, AssignmentField.DATE6, AssignmentField.DATE7, AssignmentField.DATE8, AssignmentField.DATE9, AssignmentField.DATE10 }; private static final AssignmentField[] CUSTOM_NUMBER = { AssignmentField.NUMBER1, AssignmentField.NUMBER2, AssignmentField.NUMBER3, AssignmentField.NUMBER4, AssignmentField.NUMBER5, AssignmentField.NUMBER6, AssignmentField.NUMBER7, AssignmentField.NUMBER8, AssignmentField.NUMBER9, AssignmentField.NUMBER10, AssignmentField.NUMBER11, AssignmentField.NUMBER12, AssignmentField.NUMBER13, AssignmentField.NUMBER14, AssignmentField.NUMBER15, AssignmentField.NUMBER16, AssignmentField.NUMBER17, AssignmentField.NUMBER18, AssignmentField.NUMBER19, AssignmentField.NUMBER20 }; private static final AssignmentField[] CUSTOM_DURATION = { AssignmentField.DURATION1, AssignmentField.DURATION2, AssignmentField.DURATION3, AssignmentField.DURATION4, AssignmentField.DURATION5, AssignmentField.DURATION6, AssignmentField.DURATION7, AssignmentField.DURATION8, AssignmentField.DURATION9, AssignmentField.DURATION10 }; private static final AssignmentField[] CUSTOM_COST = { AssignmentField.COST1, AssignmentField.COST2, AssignmentField.COST3, AssignmentField.COST4, AssignmentField.COST5, AssignmentField.COST6, AssignmentField.COST7, AssignmentField.COST8, AssignmentField.COST9, AssignmentField.COST10 }; private static final AssignmentField[] CUSTOM_FLAG = { AssignmentField.FLAG1, AssignmentField.FLAG2, AssignmentField.FLAG3, AssignmentField.FLAG4, AssignmentField.FLAG5, AssignmentField.FLAG6, AssignmentField.FLAG7, AssignmentField.FLAG8, AssignmentField.FLAG9, AssignmentField.FLAG10, AssignmentField.FLAG11, AssignmentField.FLAG12, AssignmentField.FLAG13, AssignmentField.FLAG14, AssignmentField.FLAG15, AssignmentField.FLAG16, AssignmentField.FLAG17, AssignmentField.FLAG18, AssignmentField.FLAG19, AssignmentField.FLAG20 }; private static final AssignmentField[] ENTERPRISE_COST = { AssignmentField.ENTERPRISE_COST1, AssignmentField.ENTERPRISE_COST2, AssignmentField.ENTERPRISE_COST3, AssignmentField.ENTERPRISE_COST4, AssignmentField.ENTERPRISE_COST5, AssignmentField.ENTERPRISE_COST6, AssignmentField.ENTERPRISE_COST7, AssignmentField.ENTERPRISE_COST8, AssignmentField.ENTERPRISE_COST9, AssignmentField.ENTERPRISE_COST10 }; private static final AssignmentField[] ENTERPRISE_DATE = { AssignmentField.ENTERPRISE_DATE1, AssignmentField.ENTERPRISE_DATE2, AssignmentField.ENTERPRISE_DATE3, AssignmentField.ENTERPRISE_DATE4, AssignmentField.ENTERPRISE_DATE5, AssignmentField.ENTERPRISE_DATE6, AssignmentField.ENTERPRISE_DATE7, AssignmentField.ENTERPRISE_DATE8, AssignmentField.ENTERPRISE_DATE9, AssignmentField.ENTERPRISE_DATE10, AssignmentField.ENTERPRISE_DATE11, AssignmentField.ENTERPRISE_DATE12, AssignmentField.ENTERPRISE_DATE13, AssignmentField.ENTERPRISE_DATE14, AssignmentField.ENTERPRISE_DATE15, AssignmentField.ENTERPRISE_DATE16, AssignmentField.ENTERPRISE_DATE17, AssignmentField.ENTERPRISE_DATE18, AssignmentField.ENTERPRISE_DATE19, AssignmentField.ENTERPRISE_DATE20, AssignmentField.ENTERPRISE_DATE21, AssignmentField.ENTERPRISE_DATE22, AssignmentField.ENTERPRISE_DATE23, AssignmentField.ENTERPRISE_DATE24, AssignmentField.ENTERPRISE_DATE25, AssignmentField.ENTERPRISE_DATE26, AssignmentField.ENTERPRISE_DATE27, AssignmentField.ENTERPRISE_DATE28, AssignmentField.ENTERPRISE_DATE29, AssignmentField.ENTERPRISE_DATE30 }; private static final AssignmentField[] ENTERPRISE_DURATION = { AssignmentField.ENTERPRISE_DURATION1, AssignmentField.ENTERPRISE_DURATION2, AssignmentField.ENTERPRISE_DURATION3, AssignmentField.ENTERPRISE_DURATION4, AssignmentField.ENTERPRISE_DURATION5, AssignmentField.ENTERPRISE_DURATION6, AssignmentField.ENTERPRISE_DURATION7, AssignmentField.ENTERPRISE_DURATION8, AssignmentField.ENTERPRISE_DURATION9, AssignmentField.ENTERPRISE_DURATION10 }; private static final AssignmentField[] ENTERPRISE_FLAG = { AssignmentField.ENTERPRISE_FLAG1, AssignmentField.ENTERPRISE_FLAG2, AssignmentField.ENTERPRISE_FLAG3, AssignmentField.ENTERPRISE_FLAG4, AssignmentField.ENTERPRISE_FLAG5, AssignmentField.ENTERPRISE_FLAG6, AssignmentField.ENTERPRISE_FLAG7, AssignmentField.ENTERPRISE_FLAG8, AssignmentField.ENTERPRISE_FLAG9, AssignmentField.ENTERPRISE_FLAG10, AssignmentField.ENTERPRISE_FLAG11, AssignmentField.ENTERPRISE_FLAG12, AssignmentField.ENTERPRISE_FLAG13, AssignmentField.ENTERPRISE_FLAG14, AssignmentField.ENTERPRISE_FLAG15, AssignmentField.ENTERPRISE_FLAG16, AssignmentField.ENTERPRISE_FLAG17, AssignmentField.ENTERPRISE_FLAG18, AssignmentField.ENTERPRISE_FLAG19, AssignmentField.ENTERPRISE_FLAG20 }; private static final AssignmentField[] ENTERPRISE_NUMBER = { AssignmentField.ENTERPRISE_NUMBER1, AssignmentField.ENTERPRISE_NUMBER2, AssignmentField.ENTERPRISE_NUMBER3, AssignmentField.ENTERPRISE_NUMBER4, AssignmentField.ENTERPRISE_NUMBER5, AssignmentField.ENTERPRISE_NUMBER6, AssignmentField.ENTERPRISE_NUMBER7, AssignmentField.ENTERPRISE_NUMBER8, AssignmentField.ENTERPRISE_NUMBER9, AssignmentField.ENTERPRISE_NUMBER10, AssignmentField.ENTERPRISE_NUMBER11, AssignmentField.ENTERPRISE_NUMBER12, AssignmentField.ENTERPRISE_NUMBER13, AssignmentField.ENTERPRISE_NUMBER14, AssignmentField.ENTERPRISE_NUMBER15, AssignmentField.ENTERPRISE_NUMBER16, AssignmentField.ENTERPRISE_NUMBER17, AssignmentField.ENTERPRISE_NUMBER18, AssignmentField.ENTERPRISE_NUMBER19, AssignmentField.ENTERPRISE_NUMBER20, AssignmentField.ENTERPRISE_NUMBER21, AssignmentField.ENTERPRISE_NUMBER22, AssignmentField.ENTERPRISE_NUMBER23, AssignmentField.ENTERPRISE_NUMBER24, AssignmentField.ENTERPRISE_NUMBER25, AssignmentField.ENTERPRISE_NUMBER26, AssignmentField.ENTERPRISE_NUMBER27, AssignmentField.ENTERPRISE_NUMBER28, AssignmentField.ENTERPRISE_NUMBER29, AssignmentField.ENTERPRISE_NUMBER30, AssignmentField.ENTERPRISE_NUMBER31, AssignmentField.ENTERPRISE_NUMBER32, AssignmentField.ENTERPRISE_NUMBER33, AssignmentField.ENTERPRISE_NUMBER34, AssignmentField.ENTERPRISE_NUMBER35, AssignmentField.ENTERPRISE_NUMBER36, AssignmentField.ENTERPRISE_NUMBER37, AssignmentField.ENTERPRISE_NUMBER38, AssignmentField.ENTERPRISE_NUMBER39, AssignmentField.ENTERPRISE_NUMBER40 }; private static final AssignmentField[] ENTERPRISE_TEXT = { AssignmentField.ENTERPRISE_TEXT1, AssignmentField.ENTERPRISE_TEXT2, AssignmentField.ENTERPRISE_TEXT3, AssignmentField.ENTERPRISE_TEXT4, AssignmentField.ENTERPRISE_TEXT5, AssignmentField.ENTERPRISE_TEXT6, AssignmentField.ENTERPRISE_TEXT7, AssignmentField.ENTERPRISE_TEXT8, AssignmentField.ENTERPRISE_TEXT9, AssignmentField.ENTERPRISE_TEXT10, AssignmentField.ENTERPRISE_TEXT11, AssignmentField.ENTERPRISE_TEXT12, AssignmentField.ENTERPRISE_TEXT13, AssignmentField.ENTERPRISE_TEXT14, AssignmentField.ENTERPRISE_TEXT15, AssignmentField.ENTERPRISE_TEXT16, AssignmentField.ENTERPRISE_TEXT17, AssignmentField.ENTERPRISE_TEXT18, AssignmentField.ENTERPRISE_TEXT19, AssignmentField.ENTERPRISE_TEXT20, AssignmentField.ENTERPRISE_TEXT21, AssignmentField.ENTERPRISE_TEXT22, AssignmentField.ENTERPRISE_TEXT23, AssignmentField.ENTERPRISE_TEXT24, AssignmentField.ENTERPRISE_TEXT25, AssignmentField.ENTERPRISE_TEXT26, AssignmentField.ENTERPRISE_TEXT27, AssignmentField.ENTERPRISE_TEXT28, AssignmentField.ENTERPRISE_TEXT29, AssignmentField.ENTERPRISE_TEXT30, AssignmentField.ENTERPRISE_TEXT31, AssignmentField.ENTERPRISE_TEXT32, AssignmentField.ENTERPRISE_TEXT33, AssignmentField.ENTERPRISE_TEXT34, AssignmentField.ENTERPRISE_TEXT35, AssignmentField.ENTERPRISE_TEXT36, AssignmentField.ENTERPRISE_TEXT37, AssignmentField.ENTERPRISE_TEXT38, AssignmentField.ENTERPRISE_TEXT39, AssignmentField.ENTERPRISE_TEXT40 }; private static final AssignmentField[] ENTERPRISE_CUSTOM_FIELD = { AssignmentField.ENTERPRISE_CUSTOM_FIELD1, AssignmentField.ENTERPRISE_CUSTOM_FIELD2, AssignmentField.ENTERPRISE_CUSTOM_FIELD3, AssignmentField.ENTERPRISE_CUSTOM_FIELD4, AssignmentField.ENTERPRISE_CUSTOM_FIELD5, AssignmentField.ENTERPRISE_CUSTOM_FIELD6, AssignmentField.ENTERPRISE_CUSTOM_FIELD7, AssignmentField.ENTERPRISE_CUSTOM_FIELD8, AssignmentField.ENTERPRISE_CUSTOM_FIELD9, AssignmentField.ENTERPRISE_CUSTOM_FIELD10, AssignmentField.ENTERPRISE_CUSTOM_FIELD11, AssignmentField.ENTERPRISE_CUSTOM_FIELD12, AssignmentField.ENTERPRISE_CUSTOM_FIELD13, AssignmentField.ENTERPRISE_CUSTOM_FIELD14, AssignmentField.ENTERPRISE_CUSTOM_FIELD15, AssignmentField.ENTERPRISE_CUSTOM_FIELD16, AssignmentField.ENTERPRISE_CUSTOM_FIELD17, AssignmentField.ENTERPRISE_CUSTOM_FIELD18, AssignmentField.ENTERPRISE_CUSTOM_FIELD19, AssignmentField.ENTERPRISE_CUSTOM_FIELD20, AssignmentField.ENTERPRISE_CUSTOM_FIELD21, AssignmentField.ENTERPRISE_CUSTOM_FIELD22, AssignmentField.ENTERPRISE_CUSTOM_FIELD23, AssignmentField.ENTERPRISE_CUSTOM_FIELD24, AssignmentField.ENTERPRISE_CUSTOM_FIELD25, AssignmentField.ENTERPRISE_CUSTOM_FIELD26, AssignmentField.ENTERPRISE_CUSTOM_FIELD27, AssignmentField.ENTERPRISE_CUSTOM_FIELD28, AssignmentField.ENTERPRISE_CUSTOM_FIELD29, AssignmentField.ENTERPRISE_CUSTOM_FIELD30, AssignmentField.ENTERPRISE_CUSTOM_FIELD31, AssignmentField.ENTERPRISE_CUSTOM_FIELD32, AssignmentField.ENTERPRISE_CUSTOM_FIELD33, AssignmentField.ENTERPRISE_CUSTOM_FIELD34, AssignmentField.ENTERPRISE_CUSTOM_FIELD35, AssignmentField.ENTERPRISE_CUSTOM_FIELD36, AssignmentField.ENTERPRISE_CUSTOM_FIELD37, AssignmentField.ENTERPRISE_CUSTOM_FIELD38, AssignmentField.ENTERPRISE_CUSTOM_FIELD39, AssignmentField.ENTERPRISE_CUSTOM_FIELD40, AssignmentField.ENTERPRISE_CUSTOM_FIELD41, AssignmentField.ENTERPRISE_CUSTOM_FIELD42, AssignmentField.ENTERPRISE_CUSTOM_FIELD43, AssignmentField.ENTERPRISE_CUSTOM_FIELD44, AssignmentField.ENTERPRISE_CUSTOM_FIELD45, AssignmentField.ENTERPRISE_CUSTOM_FIELD46, AssignmentField.ENTERPRISE_CUSTOM_FIELD47, AssignmentField.ENTERPRISE_CUSTOM_FIELD48, AssignmentField.ENTERPRISE_CUSTOM_FIELD49, AssignmentField.ENTERPRISE_CUSTOM_FIELD50 };
void function() { m_eventsEnabled = true; } private Object[] m_array = new Object[AssignmentField.MAX_VALUE]; private boolean m_eventsEnabled = true; private TimephasedWorkData m_timephasedWork; private List<TimephasedCost> m_timephasedCost; private TimephasedWorkData m_timephasedActualWork; private List<TimephasedCost> m_timephasedActualCost; private TimephasedWorkData m_timephasedOvertimeWork; private TimephasedWorkData m_timephasedActualOvertimeWork; private List<FieldListener> m_listeners; private TimephasedWorkData[] m_timephasedBaselineWork = new TimephasedWorkData[11]; private TimephasedCostData[] m_timephasedBaselineCost = new TimephasedCostData[11]; private Task m_task; private ResourceAssignmentWorkgroupFields m_workgroup; public static final Double DEFAULT_UNITS = Double.valueOf(100); private static final AssignmentField[] BASELINE_COSTS = { AssignmentField.BASELINE1_COST, AssignmentField.BASELINE2_COST, AssignmentField.BASELINE3_COST, AssignmentField.BASELINE4_COST, AssignmentField.BASELINE5_COST, AssignmentField.BASELINE6_COST, AssignmentField.BASELINE7_COST, AssignmentField.BASELINE8_COST, AssignmentField.BASELINE9_COST, AssignmentField.BASELINE10_COST }; private static final AssignmentField[] BASELINE_WORKS = { AssignmentField.BASELINE1_WORK, AssignmentField.BASELINE2_WORK, AssignmentField.BASELINE3_WORK, AssignmentField.BASELINE4_WORK, AssignmentField.BASELINE5_WORK, AssignmentField.BASELINE6_WORK, AssignmentField.BASELINE7_WORK, AssignmentField.BASELINE8_WORK, AssignmentField.BASELINE9_WORK, AssignmentField.BASELINE10_WORK }; private static final AssignmentField[] BASELINE_STARTS = { AssignmentField.BASELINE1_START, AssignmentField.BASELINE2_START, AssignmentField.BASELINE3_START, AssignmentField.BASELINE4_START, AssignmentField.BASELINE5_START, AssignmentField.BASELINE6_START, AssignmentField.BASELINE7_START, AssignmentField.BASELINE8_START, AssignmentField.BASELINE9_START, AssignmentField.BASELINE10_START }; private static final AssignmentField[] BASELINE_FINISHES = { AssignmentField.BASELINE1_FINISH, AssignmentField.BASELINE2_FINISH, AssignmentField.BASELINE3_FINISH, AssignmentField.BASELINE4_FINISH, AssignmentField.BASELINE5_FINISH, AssignmentField.BASELINE6_FINISH, AssignmentField.BASELINE7_FINISH, AssignmentField.BASELINE8_FINISH, AssignmentField.BASELINE9_FINISH, AssignmentField.BASELINE10_FINISH }; private static final AssignmentField[] BASELINE_BUDGET_COSTS = { AssignmentField.BASELINE1_BUDGET_COST, AssignmentField.BASELINE2_BUDGET_COST, AssignmentField.BASELINE3_BUDGET_COST, AssignmentField.BASELINE4_BUDGET_COST, AssignmentField.BASELINE5_BUDGET_COST, AssignmentField.BASELINE6_BUDGET_COST, AssignmentField.BASELINE7_BUDGET_COST, AssignmentField.BASELINE8_BUDGET_COST, AssignmentField.BASELINE9_BUDGET_COST, AssignmentField.BASELINE10_BUDGET_COST }; private static final AssignmentField[] BASELINE_BUDGET_WORKS = { AssignmentField.BASELINE1_BUDGET_WORK, AssignmentField.BASELINE2_BUDGET_WORK, AssignmentField.BASELINE3_BUDGET_WORK, AssignmentField.BASELINE4_BUDGET_WORK, AssignmentField.BASELINE5_BUDGET_WORK, AssignmentField.BASELINE6_BUDGET_WORK, AssignmentField.BASELINE7_BUDGET_WORK, AssignmentField.BASELINE8_BUDGET_WORK, AssignmentField.BASELINE9_BUDGET_WORK, AssignmentField.BASELINE10_BUDGET_WORK }; private static final AssignmentField[] CUSTOM_TEXT = { AssignmentField.TEXT1, AssignmentField.TEXT2, AssignmentField.TEXT3, AssignmentField.TEXT4, AssignmentField.TEXT5, AssignmentField.TEXT6, AssignmentField.TEXT7, AssignmentField.TEXT8, AssignmentField.TEXT9, AssignmentField.TEXT10, AssignmentField.TEXT11, AssignmentField.TEXT12, AssignmentField.TEXT13, AssignmentField.TEXT14, AssignmentField.TEXT15, AssignmentField.TEXT16, AssignmentField.TEXT17, AssignmentField.TEXT18, AssignmentField.TEXT19, AssignmentField.TEXT20, AssignmentField.TEXT21, AssignmentField.TEXT22, AssignmentField.TEXT23, AssignmentField.TEXT24, AssignmentField.TEXT25, AssignmentField.TEXT26, AssignmentField.TEXT27, AssignmentField.TEXT28, AssignmentField.TEXT29, AssignmentField.TEXT30 }; private static final AssignmentField[] CUSTOM_START = { AssignmentField.START1, AssignmentField.START2, AssignmentField.START3, AssignmentField.START4, AssignmentField.START5, AssignmentField.START6, AssignmentField.START7, AssignmentField.START8, AssignmentField.START9, AssignmentField.START10 }; private static final AssignmentField[] CUSTOM_FINISH = { AssignmentField.FINISH1, AssignmentField.FINISH2, AssignmentField.FINISH3, AssignmentField.FINISH4, AssignmentField.FINISH5, AssignmentField.FINISH6, AssignmentField.FINISH7, AssignmentField.FINISH8, AssignmentField.FINISH9, AssignmentField.FINISH10 }; private static final AssignmentField[] CUSTOM_DATE = { AssignmentField.DATE1, AssignmentField.DATE2, AssignmentField.DATE3, AssignmentField.DATE4, AssignmentField.DATE5, AssignmentField.DATE6, AssignmentField.DATE7, AssignmentField.DATE8, AssignmentField.DATE9, AssignmentField.DATE10 }; private static final AssignmentField[] CUSTOM_NUMBER = { AssignmentField.NUMBER1, AssignmentField.NUMBER2, AssignmentField.NUMBER3, AssignmentField.NUMBER4, AssignmentField.NUMBER5, AssignmentField.NUMBER6, AssignmentField.NUMBER7, AssignmentField.NUMBER8, AssignmentField.NUMBER9, AssignmentField.NUMBER10, AssignmentField.NUMBER11, AssignmentField.NUMBER12, AssignmentField.NUMBER13, AssignmentField.NUMBER14, AssignmentField.NUMBER15, AssignmentField.NUMBER16, AssignmentField.NUMBER17, AssignmentField.NUMBER18, AssignmentField.NUMBER19, AssignmentField.NUMBER20 }; private static final AssignmentField[] CUSTOM_DURATION = { AssignmentField.DURATION1, AssignmentField.DURATION2, AssignmentField.DURATION3, AssignmentField.DURATION4, AssignmentField.DURATION5, AssignmentField.DURATION6, AssignmentField.DURATION7, AssignmentField.DURATION8, AssignmentField.DURATION9, AssignmentField.DURATION10 }; private static final AssignmentField[] CUSTOM_COST = { AssignmentField.COST1, AssignmentField.COST2, AssignmentField.COST3, AssignmentField.COST4, AssignmentField.COST5, AssignmentField.COST6, AssignmentField.COST7, AssignmentField.COST8, AssignmentField.COST9, AssignmentField.COST10 }; private static final AssignmentField[] CUSTOM_FLAG = { AssignmentField.FLAG1, AssignmentField.FLAG2, AssignmentField.FLAG3, AssignmentField.FLAG4, AssignmentField.FLAG5, AssignmentField.FLAG6, AssignmentField.FLAG7, AssignmentField.FLAG8, AssignmentField.FLAG9, AssignmentField.FLAG10, AssignmentField.FLAG11, AssignmentField.FLAG12, AssignmentField.FLAG13, AssignmentField.FLAG14, AssignmentField.FLAG15, AssignmentField.FLAG16, AssignmentField.FLAG17, AssignmentField.FLAG18, AssignmentField.FLAG19, AssignmentField.FLAG20 }; private static final AssignmentField[] ENTERPRISE_COST = { AssignmentField.ENTERPRISE_COST1, AssignmentField.ENTERPRISE_COST2, AssignmentField.ENTERPRISE_COST3, AssignmentField.ENTERPRISE_COST4, AssignmentField.ENTERPRISE_COST5, AssignmentField.ENTERPRISE_COST6, AssignmentField.ENTERPRISE_COST7, AssignmentField.ENTERPRISE_COST8, AssignmentField.ENTERPRISE_COST9, AssignmentField.ENTERPRISE_COST10 }; private static final AssignmentField[] ENTERPRISE_DATE = { AssignmentField.ENTERPRISE_DATE1, AssignmentField.ENTERPRISE_DATE2, AssignmentField.ENTERPRISE_DATE3, AssignmentField.ENTERPRISE_DATE4, AssignmentField.ENTERPRISE_DATE5, AssignmentField.ENTERPRISE_DATE6, AssignmentField.ENTERPRISE_DATE7, AssignmentField.ENTERPRISE_DATE8, AssignmentField.ENTERPRISE_DATE9, AssignmentField.ENTERPRISE_DATE10, AssignmentField.ENTERPRISE_DATE11, AssignmentField.ENTERPRISE_DATE12, AssignmentField.ENTERPRISE_DATE13, AssignmentField.ENTERPRISE_DATE14, AssignmentField.ENTERPRISE_DATE15, AssignmentField.ENTERPRISE_DATE16, AssignmentField.ENTERPRISE_DATE17, AssignmentField.ENTERPRISE_DATE18, AssignmentField.ENTERPRISE_DATE19, AssignmentField.ENTERPRISE_DATE20, AssignmentField.ENTERPRISE_DATE21, AssignmentField.ENTERPRISE_DATE22, AssignmentField.ENTERPRISE_DATE23, AssignmentField.ENTERPRISE_DATE24, AssignmentField.ENTERPRISE_DATE25, AssignmentField.ENTERPRISE_DATE26, AssignmentField.ENTERPRISE_DATE27, AssignmentField.ENTERPRISE_DATE28, AssignmentField.ENTERPRISE_DATE29, AssignmentField.ENTERPRISE_DATE30 }; private static final AssignmentField[] ENTERPRISE_DURATION = { AssignmentField.ENTERPRISE_DURATION1, AssignmentField.ENTERPRISE_DURATION2, AssignmentField.ENTERPRISE_DURATION3, AssignmentField.ENTERPRISE_DURATION4, AssignmentField.ENTERPRISE_DURATION5, AssignmentField.ENTERPRISE_DURATION6, AssignmentField.ENTERPRISE_DURATION7, AssignmentField.ENTERPRISE_DURATION8, AssignmentField.ENTERPRISE_DURATION9, AssignmentField.ENTERPRISE_DURATION10 }; private static final AssignmentField[] ENTERPRISE_FLAG = { AssignmentField.ENTERPRISE_FLAG1, AssignmentField.ENTERPRISE_FLAG2, AssignmentField.ENTERPRISE_FLAG3, AssignmentField.ENTERPRISE_FLAG4, AssignmentField.ENTERPRISE_FLAG5, AssignmentField.ENTERPRISE_FLAG6, AssignmentField.ENTERPRISE_FLAG7, AssignmentField.ENTERPRISE_FLAG8, AssignmentField.ENTERPRISE_FLAG9, AssignmentField.ENTERPRISE_FLAG10, AssignmentField.ENTERPRISE_FLAG11, AssignmentField.ENTERPRISE_FLAG12, AssignmentField.ENTERPRISE_FLAG13, AssignmentField.ENTERPRISE_FLAG14, AssignmentField.ENTERPRISE_FLAG15, AssignmentField.ENTERPRISE_FLAG16, AssignmentField.ENTERPRISE_FLAG17, AssignmentField.ENTERPRISE_FLAG18, AssignmentField.ENTERPRISE_FLAG19, AssignmentField.ENTERPRISE_FLAG20 }; private static final AssignmentField[] ENTERPRISE_NUMBER = { AssignmentField.ENTERPRISE_NUMBER1, AssignmentField.ENTERPRISE_NUMBER2, AssignmentField.ENTERPRISE_NUMBER3, AssignmentField.ENTERPRISE_NUMBER4, AssignmentField.ENTERPRISE_NUMBER5, AssignmentField.ENTERPRISE_NUMBER6, AssignmentField.ENTERPRISE_NUMBER7, AssignmentField.ENTERPRISE_NUMBER8, AssignmentField.ENTERPRISE_NUMBER9, AssignmentField.ENTERPRISE_NUMBER10, AssignmentField.ENTERPRISE_NUMBER11, AssignmentField.ENTERPRISE_NUMBER12, AssignmentField.ENTERPRISE_NUMBER13, AssignmentField.ENTERPRISE_NUMBER14, AssignmentField.ENTERPRISE_NUMBER15, AssignmentField.ENTERPRISE_NUMBER16, AssignmentField.ENTERPRISE_NUMBER17, AssignmentField.ENTERPRISE_NUMBER18, AssignmentField.ENTERPRISE_NUMBER19, AssignmentField.ENTERPRISE_NUMBER20, AssignmentField.ENTERPRISE_NUMBER21, AssignmentField.ENTERPRISE_NUMBER22, AssignmentField.ENTERPRISE_NUMBER23, AssignmentField.ENTERPRISE_NUMBER24, AssignmentField.ENTERPRISE_NUMBER25, AssignmentField.ENTERPRISE_NUMBER26, AssignmentField.ENTERPRISE_NUMBER27, AssignmentField.ENTERPRISE_NUMBER28, AssignmentField.ENTERPRISE_NUMBER29, AssignmentField.ENTERPRISE_NUMBER30, AssignmentField.ENTERPRISE_NUMBER31, AssignmentField.ENTERPRISE_NUMBER32, AssignmentField.ENTERPRISE_NUMBER33, AssignmentField.ENTERPRISE_NUMBER34, AssignmentField.ENTERPRISE_NUMBER35, AssignmentField.ENTERPRISE_NUMBER36, AssignmentField.ENTERPRISE_NUMBER37, AssignmentField.ENTERPRISE_NUMBER38, AssignmentField.ENTERPRISE_NUMBER39, AssignmentField.ENTERPRISE_NUMBER40 }; private static final AssignmentField[] ENTERPRISE_TEXT = { AssignmentField.ENTERPRISE_TEXT1, AssignmentField.ENTERPRISE_TEXT2, AssignmentField.ENTERPRISE_TEXT3, AssignmentField.ENTERPRISE_TEXT4, AssignmentField.ENTERPRISE_TEXT5, AssignmentField.ENTERPRISE_TEXT6, AssignmentField.ENTERPRISE_TEXT7, AssignmentField.ENTERPRISE_TEXT8, AssignmentField.ENTERPRISE_TEXT9, AssignmentField.ENTERPRISE_TEXT10, AssignmentField.ENTERPRISE_TEXT11, AssignmentField.ENTERPRISE_TEXT12, AssignmentField.ENTERPRISE_TEXT13, AssignmentField.ENTERPRISE_TEXT14, AssignmentField.ENTERPRISE_TEXT15, AssignmentField.ENTERPRISE_TEXT16, AssignmentField.ENTERPRISE_TEXT17, AssignmentField.ENTERPRISE_TEXT18, AssignmentField.ENTERPRISE_TEXT19, AssignmentField.ENTERPRISE_TEXT20, AssignmentField.ENTERPRISE_TEXT21, AssignmentField.ENTERPRISE_TEXT22, AssignmentField.ENTERPRISE_TEXT23, AssignmentField.ENTERPRISE_TEXT24, AssignmentField.ENTERPRISE_TEXT25, AssignmentField.ENTERPRISE_TEXT26, AssignmentField.ENTERPRISE_TEXT27, AssignmentField.ENTERPRISE_TEXT28, AssignmentField.ENTERPRISE_TEXT29, AssignmentField.ENTERPRISE_TEXT30, AssignmentField.ENTERPRISE_TEXT31, AssignmentField.ENTERPRISE_TEXT32, AssignmentField.ENTERPRISE_TEXT33, AssignmentField.ENTERPRISE_TEXT34, AssignmentField.ENTERPRISE_TEXT35, AssignmentField.ENTERPRISE_TEXT36, AssignmentField.ENTERPRISE_TEXT37, AssignmentField.ENTERPRISE_TEXT38, AssignmentField.ENTERPRISE_TEXT39, AssignmentField.ENTERPRISE_TEXT40 }; private static final AssignmentField[] ENTERPRISE_CUSTOM_FIELD = { AssignmentField.ENTERPRISE_CUSTOM_FIELD1, AssignmentField.ENTERPRISE_CUSTOM_FIELD2, AssignmentField.ENTERPRISE_CUSTOM_FIELD3, AssignmentField.ENTERPRISE_CUSTOM_FIELD4, AssignmentField.ENTERPRISE_CUSTOM_FIELD5, AssignmentField.ENTERPRISE_CUSTOM_FIELD6, AssignmentField.ENTERPRISE_CUSTOM_FIELD7, AssignmentField.ENTERPRISE_CUSTOM_FIELD8, AssignmentField.ENTERPRISE_CUSTOM_FIELD9, AssignmentField.ENTERPRISE_CUSTOM_FIELD10, AssignmentField.ENTERPRISE_CUSTOM_FIELD11, AssignmentField.ENTERPRISE_CUSTOM_FIELD12, AssignmentField.ENTERPRISE_CUSTOM_FIELD13, AssignmentField.ENTERPRISE_CUSTOM_FIELD14, AssignmentField.ENTERPRISE_CUSTOM_FIELD15, AssignmentField.ENTERPRISE_CUSTOM_FIELD16, AssignmentField.ENTERPRISE_CUSTOM_FIELD17, AssignmentField.ENTERPRISE_CUSTOM_FIELD18, AssignmentField.ENTERPRISE_CUSTOM_FIELD19, AssignmentField.ENTERPRISE_CUSTOM_FIELD20, AssignmentField.ENTERPRISE_CUSTOM_FIELD21, AssignmentField.ENTERPRISE_CUSTOM_FIELD22, AssignmentField.ENTERPRISE_CUSTOM_FIELD23, AssignmentField.ENTERPRISE_CUSTOM_FIELD24, AssignmentField.ENTERPRISE_CUSTOM_FIELD25, AssignmentField.ENTERPRISE_CUSTOM_FIELD26, AssignmentField.ENTERPRISE_CUSTOM_FIELD27, AssignmentField.ENTERPRISE_CUSTOM_FIELD28, AssignmentField.ENTERPRISE_CUSTOM_FIELD29, AssignmentField.ENTERPRISE_CUSTOM_FIELD30, AssignmentField.ENTERPRISE_CUSTOM_FIELD31, AssignmentField.ENTERPRISE_CUSTOM_FIELD32, AssignmentField.ENTERPRISE_CUSTOM_FIELD33, AssignmentField.ENTERPRISE_CUSTOM_FIELD34, AssignmentField.ENTERPRISE_CUSTOM_FIELD35, AssignmentField.ENTERPRISE_CUSTOM_FIELD36, AssignmentField.ENTERPRISE_CUSTOM_FIELD37, AssignmentField.ENTERPRISE_CUSTOM_FIELD38, AssignmentField.ENTERPRISE_CUSTOM_FIELD39, AssignmentField.ENTERPRISE_CUSTOM_FIELD40, AssignmentField.ENTERPRISE_CUSTOM_FIELD41, AssignmentField.ENTERPRISE_CUSTOM_FIELD42, AssignmentField.ENTERPRISE_CUSTOM_FIELD43, AssignmentField.ENTERPRISE_CUSTOM_FIELD44, AssignmentField.ENTERPRISE_CUSTOM_FIELD45, AssignmentField.ENTERPRISE_CUSTOM_FIELD46, AssignmentField.ENTERPRISE_CUSTOM_FIELD47, AssignmentField.ENTERPRISE_CUSTOM_FIELD48, AssignmentField.ENTERPRISE_CUSTOM_FIELD49, AssignmentField.ENTERPRISE_CUSTOM_FIELD50 };
/** * Enable events firing when fields are updated. This is the default state. */
Enable events firing when fields are updated. This is the default state
enableEvents
{ "repo_name": "tmyroadctfig/mpxj", "path": "net/sf/mpxj/ResourceAssignment.java", "license": "lgpl-2.1", "size": 94335 }
[ "java.util.List", "net.sf.mpxj.listener.FieldListener" ]
import java.util.List; import net.sf.mpxj.listener.FieldListener;
import java.util.*; import net.sf.mpxj.listener.*;
[ "java.util", "net.sf.mpxj" ]
java.util; net.sf.mpxj;
2,591,900
public static ColorUIResource getControlHighlight() { return metalTheme.getControlHighlight(); }
static ColorUIResource function() { return metalTheme.getControlHighlight(); }
/** * Return control highlight color * @return ColorUIResource color */
Return control highlight color
getControlHighlight
{ "repo_name": "freeVM/freeVM", "path": "enhanced/archive/classlib/java6/modules/swing/src/main/java/common/javax/swing/plaf/metal/MetalLookAndFeel.java", "license": "apache-2.0", "size": 80984 }
[ "javax.swing.plaf.ColorUIResource" ]
import javax.swing.plaf.ColorUIResource;
import javax.swing.plaf.*;
[ "javax.swing" ]
javax.swing;
1,744,521
//name, description etc, are now covered under {@link com.chrisdoyle.validation.tests.Test_Issues_23_27#checkPomNameDescriptionUrlLicenseEtcAreDefined} String[] search = new String[]{ //compile "<dependency>\\s*<groupId>org.apache.commons</groupId>\\s*<artifactId>commons-lang3</artifactId>\\s*<scope>compile</scope>", //test "<dependency>\\s*<groupId>junit</groupId>\\s*<artifactId>junit</artifactId>\\s*<scope>test</scope>" , //provided "<dependency>\\s*<groupId>commons-io</groupId>\\s*<artifactId>commons-io</artifactId>\\s*<scope>provided</scope>" }; for (int i = 0; i < Main.allPoms.length; i++) { if (Main.allPoms[i].contains("hello-world-lib/")) { File f = new File(Main.allPoms[i]); String str = FileUtils.readFileToString(f, "utf-8"); for (int k = 0; k < search.length; k++) { Pattern p = Pattern.compile(search[k]); Matcher matcher = p.matcher(str); Assert.assertTrue(search[k] + " not found in " + f.getAbsolutePath(), matcher.find()); } } } }
String[] search = new String[]{ STR, STR , STR }; for (int i = 0; i < Main.allPoms.length; i++) { if (Main.allPoms[i].contains(STR)) { File f = new File(Main.allPoms[i]); String str = FileUtils.readFileToString(f, "utf-8"); for (int k = 0; k < search.length; k++) { Pattern p = Pattern.compile(search[k]); Matcher matcher = p.matcher(str); Assert.assertTrue(search[k] + STR + f.getAbsolutePath(), matcher.find()); } } } }
/** * this test is commented out because gradle is inconsistent with provided/compile only dependencies * it's only supported with certain versions and on certain versions of the android plugin * @throws Exception */
this test is commented out because gradle is inconsistent with provided/compile only dependencies it's only supported with certain versions and on certain versions of the android plugin
pomScopeDependency
{ "repo_name": "gradle-fury/gradle-fury", "path": "gradle-fury-validation/src/main/java/com/chrisdoyle/validation/tests/Test_Issue46.java", "license": "apache-2.0", "size": 2232 }
[ "com.chrisdoyle.validation.Main", "java.io.File", "java.util.regex.Matcher", "java.util.regex.Pattern", "org.apache.commons.io.FileUtils", "org.junit.Assert" ]
import com.chrisdoyle.validation.Main; import java.io.File; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.io.FileUtils; import org.junit.Assert;
import com.chrisdoyle.validation.*; import java.io.*; import java.util.regex.*; import org.apache.commons.io.*; import org.junit.*;
[ "com.chrisdoyle.validation", "java.io", "java.util", "org.apache.commons", "org.junit" ]
com.chrisdoyle.validation; java.io; java.util; org.apache.commons; org.junit;
1,165,441
@Nullable public static Geocache loadCache(final String geocode, final EnumSet<LoadFlag> loadFlags) { if (StringUtils.isBlank(geocode)) { throw new IllegalArgumentException("geocode must not be empty"); } final Set<Geocache> caches = loadCaches(Collections.singleton(geocode), loadFlags); return caches.isEmpty() ? null : caches.iterator().next(); }
static Geocache function(final String geocode, final EnumSet<LoadFlag> loadFlags) { if (StringUtils.isBlank(geocode)) { throw new IllegalArgumentException(STR); } final Set<Geocache> caches = loadCaches(Collections.singleton(geocode), loadFlags); return caches.isEmpty() ? null : caches.iterator().next(); }
/** * Load a single Cache. * * @param geocode * The Geocode GCXXXX * @return the loaded cache (if found). Can be null */
Load a single Cache
loadCache
{ "repo_name": "lewurm/cgeo", "path": "main/src/cgeo/geocaching/DataStore.java", "license": "apache-2.0", "size": 128597 }
[ "java.util.Collections", "java.util.EnumSet", "java.util.Set", "org.apache.commons.lang3.StringUtils" ]
import java.util.Collections; import java.util.EnumSet; import java.util.Set; import org.apache.commons.lang3.StringUtils;
import java.util.*; import org.apache.commons.lang3.*;
[ "java.util", "org.apache.commons" ]
java.util; org.apache.commons;
2,713,169
protected Object createDirMBean(String groupName, String groupOid, ObjectName groupObjname, MBeanServer server) { // Note that when using standard metadata, // the returned object must implement the "DirMBean" // interface. // if (server != null) return new Dir(this,server); else return new Dir(this); } // ------------------------------------------------------------ // // Initialization of the "General" group. // // ------------------------------------------------------------
Object function(String groupName, String groupOid, ObjectName groupObjname, MBeanServer server) { return new Dir(this,server); else return new Dir(this); }
/** * Factory method for "Dir" group MBean. * * You can redefine this method if you need to replace the default * generated MBean class with your own customized class. * * @param groupName Name of the group ("Dir") * @param groupOid OID of this group * @param groupObjname ObjectName for this group (may be null) * @param server MBeanServer for this group (may be null) * * @return An instance of the MBean class generated for the * "Dir" group (Dir) * * Note that when using standard metadata, * the returned object must implement the "DirMBean" * interface. **/
Factory method for "Dir" group MBean. You can redefine this method if you need to replace the default generated MBean class with your own customized class
createDirMBean
{ "repo_name": "jswrenn/xtreemfs", "path": "java/servers/src/org/xtreemfs/common/monitoring/generatedcode/XTREEMFS_MIB.java", "license": "bsd-3-clause", "size": 17586 }
[ "javax.management.MBeanServer", "javax.management.ObjectName" ]
import javax.management.MBeanServer; import javax.management.ObjectName;
import javax.management.*;
[ "javax.management" ]
javax.management;
1,107,165
@Test public void fetchesInstallScript() throws Exception { final Profile profile = new Profile.Fixed( new XMLDocument( Joiner.on(' ').join( "<p><entry key='f'><entry key='script'>hi</entry></entry>", "<entry key='install'><item>one</item><item>two</item>", "</entry></p>" ) ) ); MatcherAssert.assertThat( // @checkstyle MultipleStringLiterals (1 line) new DockerRun(profile, "/p/entry[@key='f']").script(), Matchers.equalTo("( 'one' ';' 'two' ';' 'hi' ';' )") ); }
void function() throws Exception { final Profile profile = new Profile.Fixed( new XMLDocument( Joiner.on(' ').join( STR, STR, STR ) ) ); MatcherAssert.assertThat( new DockerRun(profile, STR).script(), Matchers.equalTo(STR) ); }
/** * DockerRun can fetch script. * @throws Exception In case of error. * @since 1.22 */
DockerRun can fetch script
fetchesInstallScript
{ "repo_name": "dalifreire/rultor", "path": "src/test/java/com/rultor/agents/req/DockerRunTest.java", "license": "bsd-3-clause", "size": 10190 }
[ "com.google.common.base.Joiner", "com.jcabi.xml.XMLDocument", "com.rultor.spi.Profile", "org.hamcrest.MatcherAssert", "org.hamcrest.Matchers" ]
import com.google.common.base.Joiner; import com.jcabi.xml.XMLDocument; import com.rultor.spi.Profile; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers;
import com.google.common.base.*; import com.jcabi.xml.*; import com.rultor.spi.*; import org.hamcrest.*;
[ "com.google.common", "com.jcabi.xml", "com.rultor.spi", "org.hamcrest" ]
com.google.common; com.jcabi.xml; com.rultor.spi; org.hamcrest;
888,965
public void testGetPricipalStudyInvestigator1() { StudyOrganization organization = registerMockFor(StudyOrganization.class); StudyInvestigator studyInv = registerMockFor(StudyInvestigator.class); List<StudyInvestigator> studyInvs = new ArrayList<StudyInvestigator>(); studyInvs.add(studyInv); EasyMock.expect(organization.getStudyInvestigators()).andReturn( studyInvs); EasyMock.expect(studyInv.getRoleCode()).andReturn("Study Investigator"); organization.setStudy(simpleStudy); replayMocks(); simpleStudy.addStudyOrganization(organization); assertNull("principal study investigator not found", simpleStudy .getPrincipalStudyInvestigator()); verifyMocks(); }
void function() { StudyOrganization organization = registerMockFor(StudyOrganization.class); StudyInvestigator studyInv = registerMockFor(StudyInvestigator.class); List<StudyInvestigator> studyInvs = new ArrayList<StudyInvestigator>(); studyInvs.add(studyInv); EasyMock.expect(organization.getStudyInvestigators()).andReturn( studyInvs); EasyMock.expect(studyInv.getRoleCode()).andReturn(STR); organization.setStudy(simpleStudy); replayMocks(); simpleStudy.addStudyOrganization(organization); assertNull(STR, simpleStudy .getPrincipalStudyInvestigator()); verifyMocks(); }
/** * test get principal study investigator */
test get principal study investigator
testGetPricipalStudyInvestigator1
{ "repo_name": "NCIP/c3pr", "path": "codebase/projects/core/test/src/java/edu/duke/cabig/c3pr/domain/StudyTestCase.java", "license": "bsd-3-clause", "size": 75546 }
[ "java.util.ArrayList", "java.util.List", "org.easymock.classextension.EasyMock" ]
import java.util.ArrayList; import java.util.List; import org.easymock.classextension.EasyMock;
import java.util.*; import org.easymock.classextension.*;
[ "java.util", "org.easymock.classextension" ]
java.util; org.easymock.classextension;
36,044
@Test public void testTitleComparison() { mPref1.setTitle("value 1"); assertFalse("Compare non-null to null", mComparisonCallback.arePreferenceContentsTheSame(mPref1, mPref2)); assertFalse("Compare null to non-null", mComparisonCallback.arePreferenceContentsTheSame(mPref2, mPref1)); mPref2.setTitle("value 1"); assertTrue("Compare identical", mComparisonCallback.arePreferenceContentsTheSame(mPref1, mPref2)); mPref2.setTitle("value 2"); assertFalse("Compare different", mComparisonCallback.arePreferenceContentsTheSame(mPref1, mPref2)); }
void function() { mPref1.setTitle(STR); assertFalse(STR, mComparisonCallback.arePreferenceContentsTheSame(mPref1, mPref2)); assertFalse(STR, mComparisonCallback.arePreferenceContentsTheSame(mPref2, mPref1)); mPref2.setTitle(STR); assertTrue(STR, mComparisonCallback.arePreferenceContentsTheSame(mPref1, mPref2)); mPref2.setTitle(STR); assertFalse(STR, mComparisonCallback.arePreferenceContentsTheSame(mPref1, mPref2)); }
/** * Title differences should be detected */
Title differences should be detected
testTitleComparison
{ "repo_name": "AndroidX/androidx", "path": "preference/preference/src/androidTest/java/androidx/preference/tests/PreferenceComparisonCallbackTest.java", "license": "apache-2.0", "size": 9806 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
657,862
private void writeTokenType(XMLStreamWriter writer, URI uri) throws ProcessingException { StaxUtil.writeStartElement(writer, PREFIX, WSTrustConstants.TOKEN_TYPE, BASE_NAMESPACE); StaxUtil.writeCharacters(writer, uri.toASCIIString()); StaxUtil.writeEndElement(writer); }
void function(XMLStreamWriter writer, URI uri) throws ProcessingException { StaxUtil.writeStartElement(writer, PREFIX, WSTrustConstants.TOKEN_TYPE, BASE_NAMESPACE); StaxUtil.writeCharacters(writer, uri.toASCIIString()); StaxUtil.writeEndElement(writer); }
/** * Write Token Type * @param writer * @param uri * @throws ProcessingException */
Write Token Type
writeTokenType
{ "repo_name": "taylor-project/taylor-picketlink-2.0.3", "path": "federation/picketlink-fed-core/src/main/java/org/picketlink/identity/federation/core/wstrust/writers/WSTrustRSTWriter.java", "license": "gpl-2.0", "size": 18854 }
[ "javax.xml.stream.XMLStreamWriter", "org.picketlink.identity.federation.core.exceptions.ProcessingException", "org.picketlink.identity.federation.core.util.StaxUtil", "org.picketlink.identity.federation.core.wstrust.WSTrustConstants" ]
import javax.xml.stream.XMLStreamWriter; import org.picketlink.identity.federation.core.exceptions.ProcessingException; import org.picketlink.identity.federation.core.util.StaxUtil; import org.picketlink.identity.federation.core.wstrust.WSTrustConstants;
import javax.xml.stream.*; import org.picketlink.identity.federation.core.exceptions.*; import org.picketlink.identity.federation.core.util.*; import org.picketlink.identity.federation.core.wstrust.*;
[ "javax.xml", "org.picketlink.identity" ]
javax.xml; org.picketlink.identity;
2,037,687
void visualizeSpecialBlock(SpecialBlock sbb);
void visualizeSpecialBlock(SpecialBlock sbb);
/** * Visualize a SpecialBlock. * * @param sbb the special block */
Visualize a SpecialBlock
visualizeSpecialBlock
{ "repo_name": "damienmg/bazel", "path": "third_party/checker_framework_dataflow/java/org/checkerframework/dataflow/cfg/CFGVisualizer.java", "license": "apache-2.0", "size": 6202 }
[ "org.checkerframework.dataflow.cfg.block.SpecialBlock" ]
import org.checkerframework.dataflow.cfg.block.SpecialBlock;
import org.checkerframework.dataflow.cfg.block.*;
[ "org.checkerframework.dataflow" ]
org.checkerframework.dataflow;
961,540
@Override public void exitFunctionDeclaration(@NotNull BigDataScriptParser.FunctionDeclarationContext ctx) { }
@Override public void exitFunctionDeclaration(@NotNull BigDataScriptParser.FunctionDeclarationContext ctx) { }
/** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */
The default implementation does nothing
enterFunctionDeclaration
{ "repo_name": "leepc12/BigDataScript", "path": "src/org/bds/antlr/BigDataScriptBaseListener.java", "license": "apache-2.0", "size": 36363 }
[ "org.antlr.v4.runtime.misc.NotNull" ]
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.*;
[ "org.antlr.v4" ]
org.antlr.v4;
449,971
static public ResultsContainer importThisControlFile(final String fName, final java.io.InputStream is) { // create the new handler final ASSETReaderWriter xr = new ASSETReaderWriter(); final Vector<ResultsContainer> resultsHolder = new Vector<ResultsContainer>(0, 1);
static ResultsContainer function(final String fName, final java.io.InputStream is) { final ASSETReaderWriter xr = new ASSETReaderWriter(); final Vector<ResultsContainer> resultsHolder = new Vector<ResultsContainer>(0, 1);
/** * handle the import of XML data, creating a new session for it * * @param fName * the filename to read from (largely ignored) * @param is * an input stream to read from * @return the output directory to write, if applicable */
handle the import of XML data, creating a new session for it
importThisControlFile
{ "repo_name": "theanuradha/debrief", "path": "org.mwc.asset.legacy/src/ASSET/Util/XML/ASSETReaderWriter.java", "license": "epl-1.0", "size": 15975 }
[ "java.util.Vector" ]
import java.util.Vector;
import java.util.*;
[ "java.util" ]
java.util;
347,637
@SuppressWarnings( "unchecked" ) @Override @Transactional( propagation = Propagation.REQUIRED, isolation = Isolation.READ_UNCOMMITTED ) public DAOResponse< Tab > getTabBySearchParams( SearchOption searchOption, RequestParams requestParams ) { String location = this.getClass( ).getCanonicalName( ) + "#getTabBySearchParams()"; List< Tab > tabList = new ArrayList<>( ); logger.debug( "Starting " + location ); DAOResponse< Tab > tabDAOResponse = new DAOResponse<>( ); ErrorContainer errorContainer = requestParams.isError( ) ? new ErrorContainer( ) : null; try { this.openDBTransaction( ); DetachedCriteria detachedCriteria = this.getDetachedCriteriaBySearchParams( searchOption ); tabList = detachedCriteria.getExecutableCriteria( session ).list( ); this.closeDBTransaction( ); tabDAOResponse.setRequestSuccess( Boolean.TRUE ); } catch ( HibernateException exception ) { this.handleExceptions( exception ); tabDAOResponse.setRequestSuccess( Boolean.FALSE ); if ( requestParams.isError( ) ) { errorContainer = fillErrorContainer( location, exception ); } logger.error( "Failed searching for tabs in database with search params" ); } tabDAOResponse.setResults( tabList ); tabDAOResponse.setCount( tabList.size( ) ); tabDAOResponse.setErrorContainer( errorContainer ); logger.debug( "Finishing " + location ); return tabDAOResponse; }
@SuppressWarnings( STR ) @Transactional( propagation = Propagation.REQUIRED, isolation = Isolation.READ_UNCOMMITTED ) DAOResponse< Tab > function( SearchOption searchOption, RequestParams requestParams ) { String location = this.getClass( ).getCanonicalName( ) + STR; List< Tab > tabList = new ArrayList<>( ); logger.debug( STR + location ); DAOResponse< Tab > tabDAOResponse = new DAOResponse<>( ); ErrorContainer errorContainer = requestParams.isError( ) ? new ErrorContainer( ) : null; try { this.openDBTransaction( ); DetachedCriteria detachedCriteria = this.getDetachedCriteriaBySearchParams( searchOption ); tabList = detachedCriteria.getExecutableCriteria( session ).list( ); this.closeDBTransaction( ); tabDAOResponse.setRequestSuccess( Boolean.TRUE ); } catch ( HibernateException exception ) { this.handleExceptions( exception ); tabDAOResponse.setRequestSuccess( Boolean.FALSE ); if ( requestParams.isError( ) ) { errorContainer = fillErrorContainer( location, exception ); } logger.error( STR ); } tabDAOResponse.setResults( tabList ); tabDAOResponse.setCount( tabList.size( ) ); tabDAOResponse.setErrorContainer( errorContainer ); logger.debug( STR + location ); return tabDAOResponse; }
/** * Gets tab by search params. * * @param searchOption the tab search option * @param requestParams the request params * @return the tab by search params */
Gets tab by search params
getTabBySearchParams
{ "repo_name": "arkoghosh11/bloom-test", "path": "bloom-dao/src/main/java/com/mana/innovative/dao/common/impl/TabDAOImpl.java", "license": "apache-2.0", "size": 18016 }
[ "com.mana.innovative.dao.response.DAOResponse", "com.mana.innovative.domain.common.SearchOption", "com.mana.innovative.domain.common.Tab", "com.mana.innovative.dto.request.RequestParams", "com.mana.innovative.exception.response.ErrorContainer", "java.util.ArrayList", "java.util.List", "org.hibernate.HibernateException", "org.hibernate.criterion.DetachedCriteria", "org.springframework.transaction.annotation.Isolation", "org.springframework.transaction.annotation.Propagation", "org.springframework.transaction.annotation.Transactional" ]
import com.mana.innovative.dao.response.DAOResponse; import com.mana.innovative.domain.common.SearchOption; import com.mana.innovative.domain.common.Tab; import com.mana.innovative.dto.request.RequestParams; import com.mana.innovative.exception.response.ErrorContainer; import java.util.ArrayList; import java.util.List; import org.hibernate.HibernateException; import org.hibernate.criterion.DetachedCriteria; import org.springframework.transaction.annotation.Isolation; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional;
import com.mana.innovative.dao.response.*; import com.mana.innovative.domain.common.*; import com.mana.innovative.dto.request.*; import com.mana.innovative.exception.response.*; import java.util.*; import org.hibernate.*; import org.hibernate.criterion.*; import org.springframework.transaction.annotation.*;
[ "com.mana.innovative", "java.util", "org.hibernate", "org.hibernate.criterion", "org.springframework.transaction" ]
com.mana.innovative; java.util; org.hibernate; org.hibernate.criterion; org.springframework.transaction;
1,752,995
@Test public void testDrawWhenDrawinTheTriangle() throws Exception { Shape shape1 = new TriangleShape(1); Paint paint = new Paint(iPrinter); paint.draw(shape1); String expectedValue = " /" + "\\" + "\n" + "/__\\"; verify(iPrinter).println(expectedValue); }
void function() throws Exception { Shape shape1 = new TriangleShape(1); Paint paint = new Paint(iPrinter); paint.draw(shape1); String expectedValue = STR + "\\" + "\n" + "/__\\"; verify(iPrinter).println(expectedValue); }
/** * Method to testing method draw when drawin the triangle. * @throws Exception Exception. */
Method to testing method draw when drawin the triangle
testDrawWhenDrawinTheTriangle
{ "repo_name": "evgenymatveev/Task", "path": "chapter_002/src/test/java/ru/ematveev/stratigypattern/PaintTest.java", "license": "apache-2.0", "size": 1499 }
[ "org.mockito.Mockito" ]
import org.mockito.Mockito;
import org.mockito.*;
[ "org.mockito" ]
org.mockito;
838,818
void addClassFilter(ReferenceType refType);
void addClassFilter(ReferenceType refType);
/** * Restricts the events generated by this request to those whose * location is in the given reference type or any of its subtypes. * An event will be generated for any location in a reference type * that can be safely cast to the given reference type. * * @param refType the reference type to filter on. * @throws InvalidRequestStateException if this request is currently * enabled or has been deleted. * Filters may be added only to disabled requests. */
Restricts the events generated by this request to those whose location is in the given reference type or any of its subtypes. An event will be generated for any location in a reference type that can be safely cast to the given reference type
addClassFilter
{ "repo_name": "md-5/jdk10", "path": "src/jdk.jdi/share/classes/com/sun/jdi/request/StepRequest.java", "license": "gpl-2.0", "size": 5196 }
[ "com.sun.jdi.ReferenceType" ]
import com.sun.jdi.ReferenceType;
import com.sun.jdi.*;
[ "com.sun.jdi" ]
com.sun.jdi;
878,430
@Test public void testRemoveFeature() { Marker marker = new Marker(map); map.removeFeature(marker); assertEquals(0, map.Features().size()); }
void function() { Marker marker = new Marker(map); map.removeFeature(marker); assertEquals(0, map.Features().size()); }
/** * Tests that removing a feature from the map results in the feature list going back to its * original size. */
Tests that removing a feature from the map results in the feature list going back to its original size
testRemoveFeature
{ "repo_name": "Klomi/appinventor-sources", "path": "appinventor/components/tests/com/google/appinventor/components/runtime/MapTest.java", "license": "apache-2.0", "size": 10054 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
1,173,222
protected Export export; @ManyToOne( cascade = { CascadeType.PERSIST, CascadeType.MERGE }, fetch = FetchType.LAZY ) @org.hibernate.annotations.Cascade({org.hibernate.annotations.CascadeType.SAVE_UPDATE}) @Basic( optional = true ) @JoinColumn(name = "export_id", nullable = true ) public Export getExport() { return this.export; }
Export export; @ManyToOne( cascade = { CascadeType.PERSIST, CascadeType.MERGE }, fetch = FetchType.LAZY ) @org.hibernate.annotations.Cascade({org.hibernate.annotations.CascadeType.SAVE_UPDATE}) @Basic( optional = true ) @JoinColumn(name = STR, nullable = true ) public Export function() { return this.export; }
/** * Return the value associated with the column: export. * @return A Export object (this.export) */
Return the value associated with the column: export
getExport
{ "repo_name": "servinglynk/servinglynk-hmis", "path": "hmis-model-v2017/src/main/java/com/servinglynk/hmis/warehouse/model/v2017/Employment.java", "license": "mpl-2.0", "size": 13773 }
[ "javax.persistence.Basic", "javax.persistence.CascadeType", "javax.persistence.FetchType", "javax.persistence.JoinColumn", "javax.persistence.ManyToOne" ]
import javax.persistence.Basic; import javax.persistence.CascadeType; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne;
import javax.persistence.*;
[ "javax.persistence" ]
javax.persistence;
220,695
@Schema(required = true, description = "Parent node path `/` if node is a root node (room)") public String getParentPath() { return parentPath; }
@Schema(required = true, description = STR) String function() { return parentPath; }
/** * Parent node path &#x60;/&#x60; if node is a root node (room) * @return parentPath **/
Parent node path &#x60;/&#x60; if node is a root node (room)
getParentPath
{ "repo_name": "iterate-ch/cyberduck", "path": "dracoon/src/main/java/ch/cyberduck/core/sds/io/swagger/client/model/DeletedNodeSummary.java", "license": "gpl-3.0", "size": 9879 }
[ "io.swagger.v3.oas.annotations.media.Schema" ]
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.media.*;
[ "io.swagger.v3" ]
io.swagger.v3;
405,362
protected List<String> getMatchingKeys(List messagePropertyList, String[] keysToMatch) { List<String> matchingKeys = new ArrayList<String>(); if (messagePropertyList != null && messagePropertyList.size() > 0) { for (Object keyAsObject : messagePropertyList) { String key = (String)keyAsObject; if (matchesGroup(key, keysToMatch)) { matchingKeys.add(key); } } } return matchingKeys; }
List<String> function(List messagePropertyList, String[] keysToMatch) { List<String> matchingKeys = new ArrayList<String>(); if (messagePropertyList != null && messagePropertyList.size() > 0) { for (Object keyAsObject : messagePropertyList) { String key = (String)keyAsObject; if (matchesGroup(key, keysToMatch)) { matchingKeys.add(key); } } } return matchingKeys; }
/** * Returns a list of all error keys that should be rendered * @param keysToMatch the keys that this group will match * @return a List of all error keys this group will match */
Returns a list of all error keys that should be rendered
getMatchingKeys
{ "repo_name": "Ariah-Group/Finance", "path": "af_webapp/src/main/java/org/kuali/kfs/sys/document/web/renderers/GroupErrorsRenderer.java", "license": "apache-2.0", "size": 12840 }
[ "java.util.ArrayList", "java.util.List" ]
import java.util.ArrayList; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,257,082
public List<Bmv2ModelRuntimeData> runtimeDatas() { return ImmutableList.copyOf(runtimeDatas.values()); }
List<Bmv2ModelRuntimeData> function() { return ImmutableList.copyOf(runtimeDatas.values()); }
/** * Returns an immutable list of runtime data for this action. * The list is ordered according to the values defined in the model. * * @return list of runtime data. */
Returns an immutable list of runtime data for this action. The list is ordered according to the values defined in the model
runtimeDatas
{ "repo_name": "Phaneendra-Huawei/demo", "path": "protocols/bmv2/src/main/java/org/onosproject/bmv2/api/model/Bmv2ModelAction.java", "license": "apache-2.0", "size": 3182 }
[ "com.google.common.collect.ImmutableList", "java.util.List" ]
import com.google.common.collect.ImmutableList; import java.util.List;
import com.google.common.collect.*; import java.util.*;
[ "com.google.common", "java.util" ]
com.google.common; java.util;
1,083,085
public static BeCarrier make(BeLabel label, BeRangeElement element) { return new BeCarrier(label, element); } public BeCarrier() { } public BeCarrier(Rcvr receiver) { super(receiver); }
static BeCarrier function(BeLabel label, BeRangeElement element) { return new BeCarrier(label, element); } public BeCarrier() { } public BeCarrier(Rcvr receiver) { super(receiver); }
/** * For editions only. */
For editions only
make
{ "repo_name": "jonesd/udanax-gold2java", "path": "abora-gold/src/generated-sources/translator/info/dgjones/abora/gold/be/basic/BeCarrier.java", "license": "mit", "size": 4906 }
[ "info.dgjones.abora.gold.be.basic.BeCarrier", "info.dgjones.abora.gold.be.basic.BeLabel", "info.dgjones.abora.gold.be.basic.BeRangeElement", "info.dgjones.abora.gold.xcvr.Rcvr" ]
import info.dgjones.abora.gold.be.basic.BeCarrier; import info.dgjones.abora.gold.be.basic.BeLabel; import info.dgjones.abora.gold.be.basic.BeRangeElement; import info.dgjones.abora.gold.xcvr.Rcvr;
import info.dgjones.abora.gold.be.basic.*; import info.dgjones.abora.gold.xcvr.*;
[ "info.dgjones.abora" ]
info.dgjones.abora;
326,233
public void setPrivateFor(List<String> privateFor) { this.privateFor = privateFor; }
void function(List<String> privateFor) { this.privateFor = privateFor; }
/** * A transaction privateFor nodes with public keys in a Quorum network */
A transaction privateFor nodes with public keys in a Quorum network
setPrivateFor
{ "repo_name": "DariusX/camel", "path": "components/camel-web3j/src/main/java/org/apache/camel/component/web3j/Web3jConfiguration.java", "license": "apache-2.0", "size": 12465 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,714,413
public static QDataSet decimate( QDataSet ds ) { return decimate( ds, 10 ); }
static QDataSet function( QDataSet ds ) { return decimate( ds, 10 ); }
/** * reduce the size of the data by keeping every 10th measurement. * @param ds a qube dataset. * @return a decimated qube dataset. * @see #decimate(org.das2.qds.QDataSet, int) */
reduce the size of the data by keeping every 10th measurement
decimate
{ "repo_name": "autoplot/app", "path": "QDataSet/src/org/das2/qds/ops/Ops.java", "license": "gpl-2.0", "size": 492716 }
[ "org.das2.qds.QDataSet" ]
import org.das2.qds.QDataSet;
import org.das2.qds.*;
[ "org.das2.qds" ]
org.das2.qds;
524,854
Runnable acceptPutStatement(CommandStatementUpdate command, CallContext context, FlightStream flightStream, StreamListener<PutResult> ackStream);
Runnable acceptPutStatement(CommandStatementUpdate command, CallContext context, FlightStream flightStream, StreamListener<PutResult> ackStream);
/** * Accepts uploaded data for a particular SQL query based data stream. * <p>`PutResult`s must be in the form of a {@link DoPutUpdateResult}. * * @param command The sql command to generate the data stream. * @param context Per-call context. * @param flightStream The data stream being uploaded. * @param ackStream The result data stream. * @return A runnable to process the stream. */
Accepts uploaded data for a particular SQL query based data stream. `PutResult`s must be in the form of a <code>DoPutUpdateResult</code>
acceptPutStatement
{ "repo_name": "apache/arrow", "path": "java/flight/flight-sql/src/main/java/org/apache/arrow/flight/sql/FlightSqlProducer.java", "license": "apache-2.0", "size": 31807 }
[ "org.apache.arrow.flight.FlightStream", "org.apache.arrow.flight.PutResult", "org.apache.arrow.flight.sql.impl.FlightSql" ]
import org.apache.arrow.flight.FlightStream; import org.apache.arrow.flight.PutResult; import org.apache.arrow.flight.sql.impl.FlightSql;
import org.apache.arrow.flight.*; import org.apache.arrow.flight.sql.impl.*;
[ "org.apache.arrow" ]
org.apache.arrow;
1,405,688
public static String getXMLSignatureAlgorithmURI(String algo) { String xmlSignatureAlgo = null; if ("DSA".equalsIgnoreCase(algo)) { xmlSignatureAlgo = JBossSAMLConstants.SIGNATURE_SHA1_WITH_DSA.get(); } else if ("RSA".equalsIgnoreCase(algo)) { xmlSignatureAlgo = JBossSAMLConstants.SIGNATURE_SHA1_WITH_RSA.get(); } return xmlSignatureAlgo; }
static String function(String algo) { String xmlSignatureAlgo = null; if ("DSA".equalsIgnoreCase(algo)) { xmlSignatureAlgo = JBossSAMLConstants.SIGNATURE_SHA1_WITH_DSA.get(); } else if ("RSA".equalsIgnoreCase(algo)) { xmlSignatureAlgo = JBossSAMLConstants.SIGNATURE_SHA1_WITH_RSA.get(); } return xmlSignatureAlgo; }
/** * Get the XML Signature URI for the algo (RSA, DSA) * * @param algo * * @return */
Get the XML Signature URI for the algo (RSA, DSA)
getXMLSignatureAlgorithmURI
{ "repo_name": "mbaluch/keycloak", "path": "saml-core/src/main/java/org/keycloak/saml/processing/core/saml/v2/util/SignatureUtil.java", "license": "apache-2.0", "size": 11016 }
[ "org.keycloak.saml.common.constants.JBossSAMLConstants" ]
import org.keycloak.saml.common.constants.JBossSAMLConstants;
import org.keycloak.saml.common.constants.*;
[ "org.keycloak.saml" ]
org.keycloak.saml;
565,924
@Operation(opcode = Opcode.READ_BARRIERED) public static native byte readByte(Object object, int offset);
@Operation(opcode = Opcode.READ_BARRIERED) static native byte function(Object object, int offset);
/** * Reads the memory at address {@code (object + offset)}. The offset is in bytes. * * @param object the base object for the memory access * @param offset the signed offset for the memory access * @return the result of the memory access */
Reads the memory at address (object + offset). The offset is in bytes
readByte
{ "repo_name": "smarr/Truffle", "path": "compiler/src/org.graalvm.compiler.word/src/org/graalvm/compiler/word/BarrieredAccess.java", "license": "gpl-2.0", "size": 51471 }
[ "org.graalvm.compiler.word.Word" ]
import org.graalvm.compiler.word.Word;
import org.graalvm.compiler.word.*;
[ "org.graalvm.compiler" ]
org.graalvm.compiler;
2,056,825
public static JLabel addLabel(Container component, String text, int horizontalAlignment) { return addLabel(component, text, null, horizontalAlignment, null); }
static JLabel function(Container component, String text, int horizontalAlignment) { return addLabel(component, text, null, horizontalAlignment, null); }
/** * Add a new label to a given component * * @param component Component to add the label to * @param text Label's text * @param horizontalAlignment Label's horizontal alignment (e.g. JLabel.LEFT) * @return Created label */
Add a new label to a given component
addLabel
{ "repo_name": "mzmine/mzmine3", "path": "src/main/java/io/github/mzmine/util/GUIUtils.java", "license": "gpl-2.0", "size": 19563 }
[ "java.awt.Container", "javax.swing.JLabel" ]
import java.awt.Container; import javax.swing.JLabel;
import java.awt.*; import javax.swing.*;
[ "java.awt", "javax.swing" ]
java.awt; javax.swing;
2,378,360
public boolean beginTrackingIfPossible(Application application) { if (mAutomaticTracker == null) { AutomaticTracker automaticTracker = AutomaticTracker.newInstanceIfPossible(application, this ); if (automaticTracker != null) { automaticTracker.register(); mAutomaticTracker = automaticTracker; return true; } } return false; }
boolean function(Application application) { if (mAutomaticTracker == null) { AutomaticTracker automaticTracker = AutomaticTracker.newInstanceIfPossible(application, this ); if (automaticTracker != null) { automaticTracker.register(); mAutomaticTracker = automaticTracker; return true; } } return false; }
/** * Start automatic tracking if we are running on ICS+. * * @return Automatic tracking has been started. No need to manually invoke {@link #add} or * {@link #remove} methods. */
Start automatic tracking if we are running on ICS+
beginTrackingIfPossible
{ "repo_name": "tryroach/stetho", "path": "stetho/src/main/java/com/facebook/stetho/inspector/elements/android/ActivityTracker.java", "license": "bsd-3-clause", "size": 5743 }
[ "android.app.Application" ]
import android.app.Application;
import android.app.*;
[ "android.app" ]
android.app;
1,429,165
IScheduledFuture<?> scheduleOnKeyOwner(Runnable command, Object key, long delay, TimeUnit unit);
IScheduledFuture<?> scheduleOnKeyOwner(Runnable command, Object key, long delay, TimeUnit unit);
/** * Creates and executes a one-shot action that becomes enabled * after the given delay on the partition owner of the given key. * * @param command the task to execute * @param key the key to identify the partition owner, which will execute the task * @param delay the time from now to delay execution * @param unit the time unit of the delay parameter * @return a ScheduledFuture representing pending completion of * the task and whose {@code get()} method will return * {@code null} upon completion * @throws RejectedExecutionException if the task cannot be * scheduled for execution * @throws NullPointerException if command is null */
Creates and executes a one-shot action that becomes enabled after the given delay on the partition owner of the given key
scheduleOnKeyOwner
{ "repo_name": "tombujok/hazelcast", "path": "hazelcast/src/main/java/com/hazelcast/scheduledexecutor/IScheduledExecutorService.java", "license": "apache-2.0", "size": 20453 }
[ "java.util.concurrent.TimeUnit" ]
import java.util.concurrent.TimeUnit;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
522,017
public String getStringValue(String key){ try { return dictionary.get(key).toJavaObject().toString().trim(); } catch (NullPointerException e){ Main.log(key); Main.sendError(String.format("File %s is missing key %s, defaulting to 0",fileName,key),true,e); return ""; } }
String function(String key){ try { return dictionary.get(key).toJavaObject().toString().trim(); } catch (NullPointerException e){ Main.log(key); Main.sendError(String.format(STR,fileName,key),true,e); return ""; } }
/** * Returns the value in the dictionary based on the key provided * * @param key Key associated with the desired value * @return The value associated with the key in String form * @throws NullPointerException */
Returns the value in the dictionary based on the key provided
getStringValue
{ "repo_name": "FullMetalFalcons/RoboticsScoutingExcel", "path": "src/org/fullmetalfalcons/scouting/teams/Team.java", "license": "mit", "size": 3736 }
[ "org.fullmetalfalcons.scouting.main.Main" ]
import org.fullmetalfalcons.scouting.main.Main;
import org.fullmetalfalcons.scouting.main.*;
[ "org.fullmetalfalcons.scouting" ]
org.fullmetalfalcons.scouting;
1,845,300
private void requestCameraPermission() { Log.w(TAG, "Camera permission is not granted. Requesting permission"); final String[] permissions = new String[]{Manifest.permission.CAMERA}; if (!ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) { ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM); return; } final Activity thisActivity = this;
void function() { Log.w(TAG, STR); final String[] permissions = new String[]{Manifest.permission.CAMERA}; if (!ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) { ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM); return; } final Activity thisActivity = this;
/** * Handles the requesting of the camera permission. This includes * showing a "Snackbar" message of why the permission is needed then * sending the request. */
Handles the requesting of the camera permission. This includes showing a "Snackbar" message of why the permission is needed then sending the request
requestCameraPermission
{ "repo_name": "googlesamples/android-vision", "path": "visionSamples/ocr-codelab/ocr-reader-complete/app/src/main/java/com/google/android/gms/samples/vision/ocrreader/OcrCaptureActivity.java", "license": "apache-2.0", "size": 17277 }
[ "android.app.Activity", "android.support.v4.app.ActivityCompat", "android.util.Log" ]
import android.app.Activity; import android.support.v4.app.ActivityCompat; import android.util.Log;
import android.app.*; import android.support.v4.app.*; import android.util.*;
[ "android.app", "android.support", "android.util" ]
android.app; android.support; android.util;
649,959
if (!(lhs instanceof LeftHandSideIdentifier)) throw new RuntimeCompilerException(lhs.getPosition(), "MESSAGE variables must be simple identifiers"); TIdentifier ident = ((LeftHandSideIdentifier) lhs).identifier; Variable variable = codeGen.getVariable(ident.getText()); if (variable instanceof MessageVariable) { return (MessageVariable) variable; } else { FieldReference mlmField = codeGen.createField(ident.getText(), ArdenValue.class, Modifier.PRIVATE); MessageVariable dv = new MessageVariable(ident, mlmField); codeGen.addVariable(dv); return dv; } }
if (!(lhs instanceof LeftHandSideIdentifier)) throw new RuntimeCompilerException(lhs.getPosition(), STR); TIdentifier ident = ((LeftHandSideIdentifier) lhs).identifier; Variable variable = codeGen.getVariable(ident.getText()); if (variable instanceof MessageVariable) { return (MessageVariable) variable; } else { FieldReference mlmField = codeGen.createField(ident.getText(), ArdenValue.class, Modifier.PRIVATE); MessageVariable dv = new MessageVariable(ident, mlmField); codeGen.addVariable(dv); return dv; } }
/** * Gets the MessageVariable for the LHSR, or creates it on demand. */
Gets the MessageVariable for the LHSR, or creates it on demand
getVariable
{ "repo_name": "Tetr4/arden2bytecode", "path": "src/arden/compiler/MessageVariable.java", "license": "gpl-3.0", "size": 3143 }
[ "java.lang.reflect.Modifier" ]
import java.lang.reflect.Modifier;
import java.lang.reflect.*;
[ "java.lang" ]
java.lang;
2,011,156
public List<Object>[] generateRows(BufferedImageContainer img) { return doGenerateRows(img); }
List<Object>[] function(BufferedImageContainer img) { return doGenerateRows(img); }
/** * Performs the actual feature genration. * * @param img the image to process * @return the generated features */
Performs the actual feature genration
generateRows
{ "repo_name": "waikato-datamining/adams-base", "path": "adams-imaging/src/main/java/adams/data/image/features/AbstractScript.java", "license": "gpl-3.0", "size": 2643 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,256,839
@ReturnsMutableObject ("Speed") @SuppressFBWarnings ("EI_EXPOSE_REP") @Nullable public byte [] directGet () { if (isInMemory ()) { _ensureCachedContentIsPresent (); return m_aCachedContent; } return SimpleFileIO.getAllFileBytes (m_aDFOS.getFile ()); }
@ReturnsMutableObject ("Speed") @SuppressFBWarnings (STR) byte [] function () { if (isInMemory ()) { _ensureCachedContentIsPresent (); return m_aCachedContent; } return SimpleFileIO.getAllFileBytes (m_aDFOS.getFile ()); }
/** * Returns the contents of the file as an array of bytes. If the contents of * the file were not yet cached in memory, they will be loaded from the disk * storage and cached. * * @return The contents of the file as an array of bytes. */
Returns the contents of the file as an array of bytes. If the contents of the file were not yet cached in memory, they will be loaded from the disk storage and cached
directGet
{ "repo_name": "phax/ph-web", "path": "ph-web/src/main/java/com/helger/web/fileupload/parse/DiskFileItem.java", "license": "apache-2.0", "size": 20311 }
[ "com.helger.commons.annotation.ReturnsMutableObject", "com.helger.commons.io.file.SimpleFileIO", "edu.umd.cs.findbugs.annotations.SuppressFBWarnings" ]
import com.helger.commons.annotation.ReturnsMutableObject; import com.helger.commons.io.file.SimpleFileIO; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import com.helger.commons.annotation.*; import com.helger.commons.io.file.*; import edu.umd.cs.findbugs.annotations.*;
[ "com.helger.commons", "edu.umd.cs" ]
com.helger.commons; edu.umd.cs;
498,958
List<? extends Name> getAllNames();
List<? extends Name> getAllNames();
/** * Returns all defined names. * * @return a list of the defined names. An empty list is returned if none is found. */
Returns all defined names
getAllNames
{ "repo_name": "lvweiwolf/poi-3.16", "path": "src/java/org/apache/poi/ss/usermodel/Workbook.java", "license": "apache-2.0", "size": 22642 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,369,381
public void setViewAdapter(WheelViewAdapter viewAdapter) { if (this.mViewAdapter != null) { this.mViewAdapter.unregisterDataSetObserver(mDataObserver); } this.mViewAdapter = viewAdapter; mCurrentItemIdx = 0; if (this.mViewAdapter != null) { this.mViewAdapter.registerDataSetObserver(mDataObserver); } invalidateItemsLayout(true); }
void function(WheelViewAdapter viewAdapter) { if (this.mViewAdapter != null) { this.mViewAdapter.unregisterDataSetObserver(mDataObserver); } this.mViewAdapter = viewAdapter; mCurrentItemIdx = 0; if (this.mViewAdapter != null) { this.mViewAdapter.registerDataSetObserver(mDataObserver); } invalidateItemsLayout(true); }
/** * Sets view adapter. Usually new adapters contain different views, so it * needs to rebuild view by calling measure(). * * @param viewAdapter * the view adapter */
Sets view adapter. Usually new adapters contain different views, so it needs to rebuild view by calling measure()
setViewAdapter
{ "repo_name": "Yndal/ArduPilot-SensorPlatform", "path": "Tower_with_3drservices/Android/src/org/droidplanner/android/widgets/spinnerWheel/AbstractWheel.java", "license": "mit", "size": 23477 }
[ "org.droidplanner.android.widgets.spinnerWheel.adapters.WheelViewAdapter" ]
import org.droidplanner.android.widgets.spinnerWheel.adapters.WheelViewAdapter;
import org.droidplanner.android.widgets.*;
[ "org.droidplanner.android" ]
org.droidplanner.android;
127,964
@SuppressWarnings("IfMayBeConditional") public boolean checkKeyIndexCondition(int masks[], int mask) { assert masks != null; assert masks.length > 0; if (keyAliasColId < 0) return (masks[QueryUtils.KEY_COL] & mask) != 0; else return (masks[QueryUtils.KEY_COL] & mask) != 0 || (masks[keyAliasColId] & mask) != 0; }
@SuppressWarnings(STR) boolean function(int masks[], int mask) { assert masks != null; assert masks.length > 0; if (keyAliasColId < 0) return (masks[QueryUtils.KEY_COL] & mask) != 0; else return (masks[QueryUtils.KEY_COL] & mask) != 0 (masks[keyAliasColId] & mask) != 0; }
/** * Checks if provided index condition is allowed for key column or key alias column. * * @param masks Array containing Index Condition masks for each column. * @param mask Index Condition to check. * @return Result. */
Checks if provided index condition is allowed for key column or key alias column
checkKeyIndexCondition
{ "repo_name": "xtern/ignite", "path": "modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/opt/GridH2RowDescriptor.java", "license": "apache-2.0", "size": 12368 }
[ "org.apache.ignite.internal.processors.query.QueryUtils" ]
import org.apache.ignite.internal.processors.query.QueryUtils;
import org.apache.ignite.internal.processors.query.*;
[ "org.apache.ignite" ]
org.apache.ignite;
2,554,298
public ResultMatcher isNetworkAuthenticationRequired() { return matcher(HttpStatus.valueOf(511)); }
ResultMatcher function() { return matcher(HttpStatus.valueOf(511)); }
/** * Assert the response status code is {@code HttpStatus.NETWORK_AUTHENTICATION_REQUIRED} (511). */
Assert the response status code is HttpStatus.NETWORK_AUTHENTICATION_REQUIRED (511)
isNetworkAuthenticationRequired
{ "repo_name": "spring-projects/spring-framework", "path": "spring-test/src/main/java/org/springframework/test/web/servlet/result/StatusResultMatchers.java", "license": "apache-2.0", "size": 17758 }
[ "org.springframework.http.HttpStatus", "org.springframework.test.web.servlet.ResultMatcher" ]
import org.springframework.http.HttpStatus; import org.springframework.test.web.servlet.ResultMatcher;
import org.springframework.http.*; import org.springframework.test.web.servlet.*;
[ "org.springframework.http", "org.springframework.test" ]
org.springframework.http; org.springframework.test;
1,833,037
public void setMaturityTenor(Tenor maturityTenor) { JodaBeanUtils.notNull(maturityTenor, "maturityTenor"); this._maturityTenor = maturityTenor; }
void function(Tenor maturityTenor) { JodaBeanUtils.notNull(maturityTenor, STR); this._maturityTenor = maturityTenor; }
/** * Sets the tenor. * @param maturityTenor the new value of the property, not null */
Sets the tenor
setMaturityTenor
{ "repo_name": "McLeodMoores/starling", "path": "projects/financial/src/main/java/com/opengamma/financial/analytics/ircurve/strips/BondNode.java", "license": "apache-2.0", "size": 7022 }
[ "com.opengamma.util.time.Tenor", "org.joda.beans.JodaBeanUtils" ]
import com.opengamma.util.time.Tenor; import org.joda.beans.JodaBeanUtils;
import com.opengamma.util.time.*; import org.joda.beans.*;
[ "com.opengamma.util", "org.joda.beans" ]
com.opengamma.util; org.joda.beans;
1,775,705
public static void drawBezel(Graphics g, int x, int y, int width, int height, boolean isPressed, boolean isDefault, Color shadow, Color darkShadow, Color highlight, Color lightHighlight) { Color oldColor = g.getColor(); try { if ((isPressed == false) && (isDefault == false)) { drawEtchedRect(g, x, y, width, height, lightHighlight, highlight, shadow, darkShadow); } if ((isPressed == true) && (isDefault == false)) { g.setColor(shadow); g.drawRect(x + 1, y + 1, width - 2, height - 2); } if ((isPressed == false) && (isDefault == true)) { g.setColor(darkShadow); g.drawRect(x, y, width - 1, height - 1); drawEtchedRect(g, x + 1, y + 1, width - 2, height - 2, lightHighlight, highlight, shadow, darkShadow); } if ((isPressed == true) && (isDefault == true)) { g.setColor(darkShadow); g.drawRect(x, y, width - 1, height - 1); g.setColor(shadow); g.drawRect(x + 1, y + 1, width - 3, height - 3); } } finally { g.setColor(oldColor); } }
static void function(Graphics g, int x, int y, int width, int height, boolean isPressed, boolean isDefault, Color shadow, Color darkShadow, Color highlight, Color lightHighlight) { Color oldColor = g.getColor(); try { if ((isPressed == false) && (isDefault == false)) { drawEtchedRect(g, x, y, width, height, lightHighlight, highlight, shadow, darkShadow); } if ((isPressed == true) && (isDefault == false)) { g.setColor(shadow); g.drawRect(x + 1, y + 1, width - 2, height - 2); } if ((isPressed == false) && (isDefault == true)) { g.setColor(darkShadow); g.drawRect(x, y, width - 1, height - 1); drawEtchedRect(g, x + 1, y + 1, width - 2, height - 2, lightHighlight, highlight, shadow, darkShadow); } if ((isPressed == true) && (isDefault == true)) { g.setColor(darkShadow); g.drawRect(x, y, width - 1, height - 1); g.setColor(shadow); g.drawRect(x + 1, y + 1, width - 3, height - 3); } } finally { g.setColor(oldColor); } }
/** * Draws a border that is suitable for buttons of the Basic look and * feel. * * <p><img src="doc-files/BasicGraphicsUtils-3.png" width="500" * height="300" alt="[An illustration that shows which pixels * get painted in what color]" /> * * @param g the graphics into which the rectangle is drawn. * @param x the x coordinate of the rectangle. * @param y the y coordinate of the rectangle. * @param width the width of the rectangle in pixels. * @param height the height of the rectangle in pixels. * * @param isPressed <code>true</code> to draw the button border * with a pressed-in appearance; <code>false</code> for * normal (unpressed) appearance. * * @param isDefault <code>true</code> to draw the border with * the appearance it has when hitting the enter key in a * dialog will simulate a click to this button; * <code>false</code> for normal appearance. * * @param shadow the shadow color. * @param darkShadow a darker variant of the shadow color. * @param highlight the highlight color. * @param lightHighlight a brighter variant of the highlight color. */
Draws a border that is suitable for buttons of the Basic look and feel.
drawBezel
{ "repo_name": "wesen/nmedit", "path": "libs/nmutils/src/gnu/classpath/javax/swing/plaf/basic/BasicGraphicsUtils.java", "license": "gpl-2.0", "size": 23352 }
[ "java.awt.Color", "java.awt.Graphics" ]
import java.awt.Color; import java.awt.Graphics;
import java.awt.*;
[ "java.awt" ]
java.awt;
1,807,307
@Test public void testWriteSetTracking8() throws Exception { dropTable(new String[] {"tab1", "TAB1"}); CommandProcessorResponse cpr = driver.run("create table if not exists tab1 (a int, b int) partitioned by (p string) " + "clustered by (a) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')"); checkCmdOnDriver(cpr); checkCmdOnDriver(driver.run("insert into tab1 partition(p)(a,b,p) values(1,1,'one'),(2,2,'two')"));//txnid:1 HiveTxnManager txnMgr2 = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); txnMgr2.openTxn(ctx, "T2"); checkCmdOnDriver(driver.compileAndRespond("update tab1 set b = 7 where b=1")); txnMgr2.acquireLocks(driver.getPlan(), ctx, "T2"); List<ShowLocksResponseElement> locks = getLocks(txnMgr2); Assert.assertEquals("Unexpected lock count", 2, locks.size()); checkLock(LockType.SHARED_WRITE, LockState.ACQUIRED, "default", "TAB1", "p=two", locks); checkLock(LockType.SHARED_WRITE, LockState.ACQUIRED, "default", "TAB1", "p=one", locks); //now start concurrent txn txnMgr.openTxn(ctx, "T3"); checkCmdOnDriver(driver.compileAndRespond("update tab1 set b = 7 where p='two'")); ((DbTxnManager)txnMgr).acquireLocks(driver.getPlan(), ctx, "T3", false); locks = getLocks(txnMgr); Assert.assertEquals("Unexpected lock count", 3, locks.size()); checkLock(LockType.SHARED_WRITE, LockState.ACQUIRED, "default", "TAB1", "p=two", locks); checkLock(LockType.SHARED_WRITE, LockState.ACQUIRED, "default", "TAB1", "p=one", locks); checkLock(LockType.SHARED_WRITE, LockState.WAITING, "default", "TAB1", "p=two", locks); //this simulates the completion of txnid:2 AddDynamicPartitions adp = new AddDynamicPartitions(txnMgr2.getCurrentTxnId(), "default", "tab1", Collections.singletonList("p=one")); adp.setOperationType(DataOperationType.UPDATE); txnHandler.addDynamicPartitions(adp); txnMgr2.commitTxn();//txnid:2 ((DbLockManager)txnMgr.getLockManager()).checkLock(locks.get(2).getLockid());//retest WAITING locks (both have same ext id) locks = getLocks(txnMgr); Assert.assertEquals("Unexpected lock count", 1, locks.size()); checkLock(LockType.SHARED_WRITE, LockState.ACQUIRED, "default", "TAB1", "p=two", locks); //completion of txnid:3 adp = new AddDynamicPartitions(txnMgr.getCurrentTxnId(), "default", "tab1", Collections.singletonList("p=two")); adp.setOperationType(DataOperationType.UPDATE); txnHandler.addDynamicPartitions(adp); txnMgr.commitTxn();//txnid:3 Assert.assertEquals("WRITE_SET mismatch: " + TxnDbUtil.queryToString("select * from WRITE_SET"), 1, TxnDbUtil.countQueryAgent("select count(*) from WRITE_SET where ws_partition='p=one' and ws_operation_type='u' and ws_table='tab1'")); Assert.assertEquals("WRITE_SET mismatch: " + TxnDbUtil.queryToString("select * from WRITE_SET"), 1, TxnDbUtil.countQueryAgent("select count(*) from WRITE_SET where ws_partition='p=two' and ws_operation_type='u' and ws_table='tab1'")); Assert.assertEquals("COMPLETED_TXN_COMPONENTS mismatch: " + TxnDbUtil.queryToString("select * from COMPLETED_TXN_COMPONENTS"), 4, TxnDbUtil.countQueryAgent("select count(*) from COMPLETED_TXN_COMPONENTS where ctc_table='tab1' and ctc_partition is not null")); }
void function() throws Exception { dropTable(new String[] {"tab1", "TAB1"}); CommandProcessorResponse cpr = driver.run(STR + STR); checkCmdOnDriver(cpr); checkCmdOnDriver(driver.run(STR)); HiveTxnManager txnMgr2 = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); txnMgr2.openTxn(ctx, "T2"); checkCmdOnDriver(driver.compileAndRespond(STR)); txnMgr2.acquireLocks(driver.getPlan(), ctx, "T2"); List<ShowLocksResponseElement> locks = getLocks(txnMgr2); Assert.assertEquals(STR, 2, locks.size()); checkLock(LockType.SHARED_WRITE, LockState.ACQUIRED, STR, "TAB1", "p=two", locks); checkLock(LockType.SHARED_WRITE, LockState.ACQUIRED, STR, "TAB1", "p=one", locks); txnMgr.openTxn(ctx, "T3"); checkCmdOnDriver(driver.compileAndRespond(STR)); ((DbTxnManager)txnMgr).acquireLocks(driver.getPlan(), ctx, "T3", false); locks = getLocks(txnMgr); Assert.assertEquals(STR, 3, locks.size()); checkLock(LockType.SHARED_WRITE, LockState.ACQUIRED, STR, "TAB1", "p=two", locks); checkLock(LockType.SHARED_WRITE, LockState.ACQUIRED, STR, "TAB1", "p=one", locks); checkLock(LockType.SHARED_WRITE, LockState.WAITING, STR, "TAB1", "p=two", locks); AddDynamicPartitions adp = new AddDynamicPartitions(txnMgr2.getCurrentTxnId(), STR, "tab1", Collections.singletonList("p=one")); adp.setOperationType(DataOperationType.UPDATE); txnHandler.addDynamicPartitions(adp); txnMgr2.commitTxn(); ((DbLockManager)txnMgr.getLockManager()).checkLock(locks.get(2).getLockid()); locks = getLocks(txnMgr); Assert.assertEquals(STR, 1, locks.size()); checkLock(LockType.SHARED_WRITE, LockState.ACQUIRED, STR, "TAB1", "p=two", locks); adp = new AddDynamicPartitions(txnMgr.getCurrentTxnId(), STR, "tab1", Collections.singletonList("p=two")); adp.setOperationType(DataOperationType.UPDATE); txnHandler.addDynamicPartitions(adp); txnMgr.commitTxn(); Assert.assertEquals(STR + TxnDbUtil.queryToString(STR), 1, TxnDbUtil.countQueryAgent(STR)); Assert.assertEquals(STR + TxnDbUtil.queryToString(STR), 1, TxnDbUtil.countQueryAgent(STR)); Assert.assertEquals(STR + TxnDbUtil.queryToString(STR), 4, TxnDbUtil.countQueryAgent(STR)); }
/** * Concurrent updates with partition pruning predicate and w/o one */
Concurrent updates with partition pruning predicate and w/o one
testWriteSetTracking8
{ "repo_name": "vergilchiu/hive", "path": "ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java", "license": "apache-2.0", "size": 121935 }
[ "java.util.Collections", "java.util.List", "org.apache.hadoop.hive.metastore.api.AddDynamicPartitions", "org.apache.hadoop.hive.metastore.api.DataOperationType", "org.apache.hadoop.hive.metastore.api.LockState", "org.apache.hadoop.hive.metastore.api.LockType", "org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement", "org.apache.hadoop.hive.metastore.txn.TxnDbUtil", "org.apache.hadoop.hive.ql.processors.CommandProcessorResponse", "org.junit.Assert" ]
import java.util.Collections; import java.util.List; import org.apache.hadoop.hive.metastore.api.AddDynamicPartitions; import org.apache.hadoop.hive.metastore.api.DataOperationType; import org.apache.hadoop.hive.metastore.api.LockState; import org.apache.hadoop.hive.metastore.api.LockType; import org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement; import org.apache.hadoop.hive.metastore.txn.TxnDbUtil; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.junit.Assert;
import java.util.*; import org.apache.hadoop.hive.metastore.api.*; import org.apache.hadoop.hive.metastore.txn.*; import org.apache.hadoop.hive.ql.processors.*; import org.junit.*;
[ "java.util", "org.apache.hadoop", "org.junit" ]
java.util; org.apache.hadoop; org.junit;
2,664,430
public void switchCamera() throws CameraOpenException { if (isStreaming() || onPreview) { cameraManager.switchCamera(); } else { cameraManager.setCameraFacing(getCameraFacing() == CameraHelper.Facing.FRONT ? CameraHelper.Facing.BACK : CameraHelper.Facing.FRONT); } }
void function() throws CameraOpenException { if (isStreaming() onPreview) { cameraManager.switchCamera(); } else { cameraManager.setCameraFacing(getCameraFacing() == CameraHelper.Facing.FRONT ? CameraHelper.Facing.BACK : CameraHelper.Facing.FRONT); } }
/** * Switch camera used. Can be called anytime * * @throws CameraOpenException If the other camera doesn't support same resolution. */
Switch camera used. Can be called anytime
switchCamera
{ "repo_name": "pedroSG94/rtmp-streamer-java", "path": "rtplibrary/src/main/java/com/pedro/rtplibrary/base/Camera2Base.java", "license": "apache-2.0", "size": 30857 }
[ "com.pedro.encoder.input.video.CameraHelper", "com.pedro.encoder.input.video.CameraOpenException" ]
import com.pedro.encoder.input.video.CameraHelper; import com.pedro.encoder.input.video.CameraOpenException;
import com.pedro.encoder.input.video.*;
[ "com.pedro.encoder" ]
com.pedro.encoder;
2,122,981
@Test public void testBuildChunkNullContent() { instance = new SimpleTextExplanationChunkBuilder(); try { instance.buildChunk(context, group, rule, tags, null); fail("Exception should have been thrown, but it wasn't"); } catch (Exception e) { String result = e.getMessage(); String expResult = "The content must be a non-null, non-empty string value"; assertTrue(e instanceof org.goodoldai.jeff.explanation.ExplanationException); assertEquals(expResult, result); } }
void function() { instance = new SimpleTextExplanationChunkBuilder(); try { instance.buildChunk(context, group, rule, tags, null); fail(STR); } catch (Exception e) { String result = e.getMessage(); String expResult = STR; assertTrue(e instanceof org.goodoldai.jeff.explanation.ExplanationException); assertEquals(expResult, result); } }
/** * Test of buildChunk method, of class SimpleTextExplanationChunkBuilder. * Test case: unsuccessfull execution - null content */
Test of buildChunk method, of class SimpleTextExplanationChunkBuilder. Test case: unsuccessfull execution - null content
testBuildChunkNullContent
{ "repo_name": "bojantomic/jeff", "path": "src/test/java/org/goodoldai/jeff/explanation/builder/SimpleTextExplanationChunkBuilderTest.java", "license": "lgpl-3.0", "size": 4744 }
[ "org.junit.Assert" ]
import org.junit.Assert;
import org.junit.*;
[ "org.junit" ]
org.junit;
997,364
public void onViewCreated(View view, Bundle savedInstanceState);
void function(View view, Bundle savedInstanceState);
/** * On view created. * * @param view * the view * @param savedInstanceState * the saved instance state */
On view created
onViewCreated
{ "repo_name": "yossigruner/AndroidMVC", "path": "src/com/mika/cheggmeout/ui/controllers/interfaces/IBaseController.java", "license": "mit", "size": 2040 }
[ "android.os.Bundle", "android.view.View" ]
import android.os.Bundle; import android.view.View;
import android.os.*; import android.view.*;
[ "android.os", "android.view" ]
android.os; android.view;
22,970
protected void doColumnProfile(ColumnProfileResult cpr, MonitorableImpl pm) throws SQLException, SQLObjectException { logger.debug("Doing profile for column " + cpr.getProfiledObject().getName()); if (pm.isCancelled()) { return; } Connection con = null; Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { SQLColumn col = cpr.getProfiledObject(); SQLDatabase db = col.getParent().getParentDatabase(); con = db.getConnection(); stmt = con.createStatement(); stmt.setEscapeProcessing(false); ProfileFunctionDescriptor pfd = profileFunctionMap.get(col.getSourceDataTypeName()); long profileStartTime = System.currentTimeMillis(); if (pfd == null) { logger.debug(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + ")."); logger.debug("Known data types are: " + profileFunctionMap.keySet()); pfd = discoverProfileFunctionDescriptor(col, con, pm); profileFunctionMap.put(col.getSourceDataTypeName(), pfd); } try { execProfileFunctions(cpr, pfd, col, con, pm); } catch (Exception ex) { cpr.setCreateStartTime(profileStartTime); cpr.setException(ex); cpr.setCreateEndTime(System.currentTimeMillis()); logger.error("Error in Column Profiling: "+lastSQL, ex); } } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } if (con != null) { con.close(); } } }
void function(ColumnProfileResult cpr, MonitorableImpl pm) throws SQLException, SQLObjectException { logger.debug(STR + cpr.getProfiledObject().getName()); if (pm.isCancelled()) { return; } Connection con = null; Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { SQLColumn col = cpr.getProfiledObject(); SQLDatabase db = col.getParent().getParentDatabase(); con = db.getConnection(); stmt = con.createStatement(); stmt.setEscapeProcessing(false); ProfileFunctionDescriptor pfd = profileFunctionMap.get(col.getSourceDataTypeName()); long profileStartTime = System.currentTimeMillis(); if (pfd == null) { logger.debug(col.getName()+ STR + col.getSourceDataTypeName() + ")."); logger.debug(STR + profileFunctionMap.keySet()); pfd = discoverProfileFunctionDescriptor(col, con, pm); profileFunctionMap.put(col.getSourceDataTypeName(), pfd); } try { execProfileFunctions(cpr, pfd, col, con, pm); } catch (Exception ex) { cpr.setCreateStartTime(profileStartTime); cpr.setException(ex); cpr.setCreateEndTime(System.currentTimeMillis()); logger.error(STR+lastSQL, ex); } } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error(STR, ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error(STR, ex); } if (con != null) { con.close(); } } }
/** * Performs profiling at the column level by issuing a SELECT statement against * the column referenced by <tt>cpr</tt>. * * @param cpr The profile result to populate * @param pm The progress monitor. This progress monitor is only used for checking * if the operation is canceled; it is not updated with progress information. */
Performs profiling at the column level by issuing a SELECT statement against the column referenced by cpr
doColumnProfile
{ "repo_name": "amitkr/power-architect", "path": "src/main/java/ca/sqlpower/architect/profile/RemoteDatabaseProfileCreator.java", "license": "gpl-3.0", "size": 30862 }
[ "ca.sqlpower.sqlobject.SQLColumn", "ca.sqlpower.sqlobject.SQLDatabase", "ca.sqlpower.sqlobject.SQLObjectException", "ca.sqlpower.util.MonitorableImpl", "java.sql.Connection", "java.sql.ResultSet", "java.sql.SQLException", "java.sql.Statement" ]
import ca.sqlpower.sqlobject.SQLColumn; import ca.sqlpower.sqlobject.SQLDatabase; import ca.sqlpower.sqlobject.SQLObjectException; import ca.sqlpower.util.MonitorableImpl; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement;
import ca.sqlpower.sqlobject.*; import ca.sqlpower.util.*; import java.sql.*;
[ "ca.sqlpower.sqlobject", "ca.sqlpower.util", "java.sql" ]
ca.sqlpower.sqlobject; ca.sqlpower.util; java.sql;
878,364
private Job createJob() throws IOException { String jobName = "distcp"; Job job = Job.getInstance(getConf()); String userChosenName = job.getJobName(); if (userChosenName != null) jobName += ": " + userChosenName; job.setJobName(jobName); job.setInputFormatClass(DistCpUtils.getStrategy(getConf(), inputOptions)); job.setJarByClass(CopyMapper.class); configureOutputFormat(job); job.setMapperClass(CopyMapper.class); job.setNumReduceTasks(0); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Text.class); job.setOutputFormatClass(CopyOutputFormat.class); job.setSpeculativeExecution(false); ((JobConf)job.getConfiguration()).setNumMapTasks(inputOptions.getMaxMaps()); if (inputOptions.getSslConfigurationFile() != null) { setupSSLConfig(job); } inputOptions.appendToConf(job.getConfiguration()); return job; }
Job function() throws IOException { String jobName = STR; Job job = Job.getInstance(getConf()); String userChosenName = job.getJobName(); if (userChosenName != null) jobName += STR + userChosenName; job.setJobName(jobName); job.setInputFormatClass(DistCpUtils.getStrategy(getConf(), inputOptions)); job.setJarByClass(CopyMapper.class); configureOutputFormat(job); job.setMapperClass(CopyMapper.class); job.setNumReduceTasks(0); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(Text.class); job.setOutputFormatClass(CopyOutputFormat.class); job.setSpeculativeExecution(false); ((JobConf)job.getConfiguration()).setNumMapTasks(inputOptions.getMaxMaps()); if (inputOptions.getSslConfigurationFile() != null) { setupSSLConfig(job); } inputOptions.appendToConf(job.getConfiguration()); return job; }
/** * Create Job object for submitting it, with all the configuration * * @return Reference to job object. * @throws IOException - Exception if any */
Create Job object for submitting it, with all the configuration
createJob
{ "repo_name": "gndpig/hadoop", "path": "src/tools/org/apache/hadoop/tools/distcp2/DistCp.java", "license": "apache-2.0", "size": 14742 }
[ "java.io.IOException", "org.apache.hadoop.io.Text", "org.apache.hadoop.mapred.JobConf", "org.apache.hadoop.mapreduce.Job", "org.apache.hadoop.tools.distcp2.mapred.CopyMapper", "org.apache.hadoop.tools.distcp2.mapred.CopyOutputFormat", "org.apache.hadoop.tools.distcp2.util.DistCpUtils" ]
import java.io.IOException; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.tools.distcp2.mapred.CopyMapper; import org.apache.hadoop.tools.distcp2.mapred.CopyOutputFormat; import org.apache.hadoop.tools.distcp2.util.DistCpUtils;
import java.io.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapred.*; import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.tools.distcp2.mapred.*; import org.apache.hadoop.tools.distcp2.util.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
1,277,253
@ApiModelProperty(example = "null", value = "When the POS started unanchoring, for starbases (POSes) in unanchoring state") public OffsetDateTime getUnanchorAt() { return unanchorAt; }
@ApiModelProperty(example = "null", value = STR) OffsetDateTime function() { return unanchorAt; }
/** * When the POS started unanchoring, for starbases (POSes) in unanchoring * state * * @return unanchorAt **/
When the POS started unanchoring, for starbases (POSes) in unanchoring state
getUnanchorAt
{ "repo_name": "GoldenGnu/eve-esi", "path": "src/main/java/net/troja/eve/esi/model/CorporationStarbasesResponse.java", "license": "apache-2.0", "size": 8793 }
[ "io.swagger.annotations.ApiModelProperty", "java.time.OffsetDateTime" ]
import io.swagger.annotations.ApiModelProperty; import java.time.OffsetDateTime;
import io.swagger.annotations.*; import java.time.*;
[ "io.swagger.annotations", "java.time" ]
io.swagger.annotations; java.time;
2,816,258
public Map<String, Object> readKeyValues(Deserializer deserializer) { Map<String, Object> result = new HashMap<String, Object>(); readKeyValues(result, deserializer); return result; }
Map<String, Object> function(Deserializer deserializer) { Map<String, Object> result = new HashMap<String, Object>(); readKeyValues(result, deserializer); return result; }
/** * Read key - value pairs. This is required for the RecordSet * deserializer. */
Read key - value pairs. This is required for the RecordSet deserializer
readKeyValues
{ "repo_name": "cwpenhale/red5-mobileconsole", "path": "red5_server/src/main/java/org/red5/io/amf/Input.java", "license": "apache-2.0", "size": 19582 }
[ "java.util.HashMap", "java.util.Map", "org.red5.io.object.Deserializer" ]
import java.util.HashMap; import java.util.Map; import org.red5.io.object.Deserializer;
import java.util.*; import org.red5.io.object.*;
[ "java.util", "org.red5.io" ]
java.util; org.red5.io;
1,920,409
public void initialize() { for (TabModel model : mTabModelSelector.getModels()) model.addObserver(mTabModelObserver); mTabModelSelector.addObserver(mTabModelSelectorObserver); }
void function() { for (TabModel model : mTabModelSelector.getModels()) model.addObserver(mTabModelObserver); mTabModelSelector.addObserver(mTabModelSelectorObserver); }
/** * Initialize the wrapper to listen for the proper notifications. */
Initialize the wrapper to listen for the proper notifications
initialize
{ "repo_name": "axinging/chromium-crosswalk", "path": "chrome/android/java/src/org/chromium/chrome/browser/widget/emptybackground/EmptyBackgroundViewWrapper.java", "license": "bsd-3-clause", "size": 6346 }
[ "org.chromium.chrome.browser.tabmodel.TabModel" ]
import org.chromium.chrome.browser.tabmodel.TabModel;
import org.chromium.chrome.browser.tabmodel.*;
[ "org.chromium.chrome" ]
org.chromium.chrome;
256,252
public List<STData> readGrADSData_Station(int timeIdx) throws FileNotFoundException, UnsupportedEncodingException, IOException { List<STData> stDataList = new ArrayList<>(); String filePath = DSET; int tIdx = timeIdx; if (OPTIONS.template) { Object[] result = getFilePath_Template(timeIdx); filePath = (String) result[0]; tIdx = (Integer) result[1]; } RandomAccessFile br = new RandomAccessFile(filePath, "r"); int i, j, tNum; STDataHead aSTDH; STLevData aSTLevData; STData aSTData; int varNum = VARDEF.getVNum(); int uVarNum = this.getUpperVariables().size(); if (uVarNum > 0) { varNum = varNum - uVarNum; } byte[] aBytes; tNum = 0; if (OPTIONS.template) { timeIdx = 0; } do { aSTDH = new STDataHead(); aBytes = getByteArray(br, 8); aSTDH.STID = new String(aBytes); aBytes = getByteArray(br, 4); aSTDH.Lat = DataConvert.bytes2Float(aBytes, _byteOrder); aBytes = getByteArray(br, 4); aSTDH.Lon = DataConvert.bytes2Float(aBytes, _byteOrder); aBytes = getByteArray(br, 4); aSTDH.T = DataConvert.bytes2Float(aBytes, _byteOrder); aBytes = getByteArray(br, 4); aSTDH.NLev = DataConvert.bytes2Int(aBytes, _byteOrder); aBytes = getByteArray(br, 4); aSTDH.Flag = DataConvert.bytes2Int(aBytes, _byteOrder); if (aSTDH.NLev > 0) { aSTData = new STData(); aSTData.STHead = aSTDH; aSTData.dataList = new ArrayList<>(); if (aSTDH.Flag == 1) //Has ground level { aSTLevData = new STLevData(); aSTLevData.data = new float[varNum]; for (i = 0; i < varNum; i++) { aBytes = getByteArray(br, 4); aSTLevData.data[i] = DataConvert.bytes2Float(aBytes, _byteOrder); } aSTLevData.lev = 0; aSTData.dataList.add(aSTLevData); } if (aSTDH.NLev - aSTDH.Flag > 0) //Has upper level { for (i = 0; i < aSTDH.NLev - aSTDH.Flag; i++) { aBytes = getByteArray(br, 4); aSTLevData = new STLevData(); aSTLevData.lev = DataConvert.bytes2Float(aBytes, _byteOrder); aSTLevData.data = new float[uVarNum]; for (j = 0; j < uVarNum; j++) { aBytes = getByteArray(br, 4); aSTLevData.data[j] = DataConvert.bytes2Float(aBytes, _byteOrder); } aSTData.dataList.add(aSTLevData); } } if (tNum == tIdx) { stDataList.add(aSTData); } } else //End of time seriel { if (tNum == tIdx) { break; } tNum += 1; if (br.getFilePointer() + 28 >= br.length()) { break; } } } while (true); br.close(); return stDataList; }
List<STData> function(int timeIdx) throws FileNotFoundException, UnsupportedEncodingException, IOException { List<STData> stDataList = new ArrayList<>(); String filePath = DSET; int tIdx = timeIdx; if (OPTIONS.template) { Object[] result = getFilePath_Template(timeIdx); filePath = (String) result[0]; tIdx = (Integer) result[1]; } RandomAccessFile br = new RandomAccessFile(filePath, "r"); int i, j, tNum; STDataHead aSTDH; STLevData aSTLevData; STData aSTData; int varNum = VARDEF.getVNum(); int uVarNum = this.getUpperVariables().size(); if (uVarNum > 0) { varNum = varNum - uVarNum; } byte[] aBytes; tNum = 0; if (OPTIONS.template) { timeIdx = 0; } do { aSTDH = new STDataHead(); aBytes = getByteArray(br, 8); aSTDH.STID = new String(aBytes); aBytes = getByteArray(br, 4); aSTDH.Lat = DataConvert.bytes2Float(aBytes, _byteOrder); aBytes = getByteArray(br, 4); aSTDH.Lon = DataConvert.bytes2Float(aBytes, _byteOrder); aBytes = getByteArray(br, 4); aSTDH.T = DataConvert.bytes2Float(aBytes, _byteOrder); aBytes = getByteArray(br, 4); aSTDH.NLev = DataConvert.bytes2Int(aBytes, _byteOrder); aBytes = getByteArray(br, 4); aSTDH.Flag = DataConvert.bytes2Int(aBytes, _byteOrder); if (aSTDH.NLev > 0) { aSTData = new STData(); aSTData.STHead = aSTDH; aSTData.dataList = new ArrayList<>(); if (aSTDH.Flag == 1) { aSTLevData = new STLevData(); aSTLevData.data = new float[varNum]; for (i = 0; i < varNum; i++) { aBytes = getByteArray(br, 4); aSTLevData.data[i] = DataConvert.bytes2Float(aBytes, _byteOrder); } aSTLevData.lev = 0; aSTData.dataList.add(aSTLevData); } if (aSTDH.NLev - aSTDH.Flag > 0) { for (i = 0; i < aSTDH.NLev - aSTDH.Flag; i++) { aBytes = getByteArray(br, 4); aSTLevData = new STLevData(); aSTLevData.lev = DataConvert.bytes2Float(aBytes, _byteOrder); aSTLevData.data = new float[uVarNum]; for (j = 0; j < uVarNum; j++) { aBytes = getByteArray(br, 4); aSTLevData.data[j] = DataConvert.bytes2Float(aBytes, _byteOrder); } aSTData.dataList.add(aSTLevData); } } if (tNum == tIdx) { stDataList.add(aSTData); } } else { if (tNum == tIdx) { break; } tNum += 1; if (br.getFilePointer() + 28 >= br.length()) { break; } } } while (true); br.close(); return stDataList; }
/** * Read GrADS station data * * @param timeIdx Time index * @return Station data list * @throws java.io.FileNotFoundException * @throws java.io.UnsupportedEncodingException */
Read GrADS station data
readGrADSData_Station
{ "repo_name": "meteoinfo/meteoinfolib", "path": "src/org/meteoinfo/data/meteodata/grads/GrADSDataInfo.java", "license": "lgpl-3.0", "size": 115651 }
[ "java.io.FileNotFoundException", "java.io.IOException", "java.io.RandomAccessFile", "java.io.UnsupportedEncodingException", "java.util.ArrayList", "java.util.List", "org.meteoinfo.global.DataConvert" ]
import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; import org.meteoinfo.global.DataConvert;
import java.io.*; import java.util.*; import org.meteoinfo.global.*;
[ "java.io", "java.util", "org.meteoinfo.global" ]
java.io; java.util; org.meteoinfo.global;
561,808
public ExpressionClause<ThrottleDefinition> throttle() { ThrottleDefinition answer = new ThrottleDefinition(); addOutput(answer); return ExpressionClause.createAndSetExpression(answer); }
ExpressionClause<ThrottleDefinition> function() { ThrottleDefinition answer = new ThrottleDefinition(); addOutput(answer); return ExpressionClause.createAndSetExpression(answer); }
/** * <a href="http://camel.apache.org/throttler.html">Throttler EIP:</a> * Creates a throttler using a fluent builder. * * @return the builder */
Creates a throttler using a fluent builder
throttle
{ "repo_name": "dmvolod/camel", "path": "camel-core/src/main/java/org/apache/camel/model/ProcessorDefinition.java", "license": "apache-2.0", "size": 177777 }
[ "org.apache.camel.builder.ExpressionClause" ]
import org.apache.camel.builder.ExpressionClause;
import org.apache.camel.builder.*;
[ "org.apache.camel" ]
org.apache.camel;
1,920,235
@Override protected DataPoint newDataPoint(Point2D smoothed) { return new TimeseriesPoint(new Date((long) smoothed.getX()), smoothed.getY()); }
DataPoint function(Point2D smoothed) { return new TimeseriesPoint(new Date((long) smoothed.getX()), smoothed.getY()); }
/** * Creates a new DataPoint from the smoothed one. * * @param smoothed the smoothed data point * @return the new DataPoint */
Creates a new DataPoint from the smoothed one
newDataPoint
{ "repo_name": "waikato-datamining/adams-base", "path": "adams-timeseries/src/main/java/adams/data/filter/TimeseriesLOWESS.java", "license": "gpl-3.0", "size": 3150 }
[ "java.awt.geom.Point2D", "java.util.Date" ]
import java.awt.geom.Point2D; import java.util.Date;
import java.awt.geom.*; import java.util.*;
[ "java.awt", "java.util" ]
java.awt; java.util;
819,720
protected void addEndPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_SetType_end_feature"), getString("_UI_PropertyDescriptor_description", "_UI_SetType_end_feature", "_UI_SetType_type"), LanguagePackage.Literals.SET_TYPE__END, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); }
void function(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), LanguagePackage.Literals.SET_TYPE__END, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); }
/** * This adds a property descriptor for the End feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This adds a property descriptor for the End feature.
addEndPropertyDescriptor
{ "repo_name": "markus1978/citygml4emf", "path": "de.hub.citygml.emf.ecore.edit/src/org/w3/_2001/smil20/language/provider/SetTypeItemProvider.java", "license": "apache-2.0", "size": 255549 }
[ "org.eclipse.emf.edit.provider.ComposeableAdapterFactory", "org.eclipse.emf.edit.provider.ItemPropertyDescriptor", "org.w3._2001.smil20.language.LanguagePackage" ]
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; import org.w3._2001.smil20.language.LanguagePackage;
import org.eclipse.emf.edit.provider.*; import org.w3.*;
[ "org.eclipse.emf", "org.w3" ]
org.eclipse.emf; org.w3;
2,134,430
private Map mapFileToName(Set fileIds) { Map m = new HashMap(); for (Iterator itr = fileIds.iterator(); itr.hasNext();) { Long id = (Long)itr.next(); ConfigFile cf = ConfigurationFactory.lookupConfigFileById(id); if (cf != null) { m.put(id, cf.getConfigFileName().getId()); } } return m; }
Map function(Set fileIds) { Map m = new HashMap(); for (Iterator itr = fileIds.iterator(); itr.hasNext();) { Long id = (Long)itr.next(); ConfigFile cf = ConfigurationFactory.lookupConfigFileById(id); if (cf != null) { m.put(id, cf.getConfigFileName().getId()); } } return m; }
/** * From file id, get file.fileName and map to file-id * @param fileIds set of file-ids of interest * @return Map<Long,Long> of file-id to cfn-id */
From file id, get file.fileName and map to file-id
mapFileToName
{ "repo_name": "ogajduse/spacewalk", "path": "java/code/src/com/redhat/rhn/manager/configuration/ConfigurationManager.java", "license": "gpl-2.0", "size": 102493 }
[ "com.redhat.rhn.domain.config.ConfigFile", "com.redhat.rhn.domain.config.ConfigurationFactory", "java.util.HashMap", "java.util.Iterator", "java.util.Map", "java.util.Set" ]
import com.redhat.rhn.domain.config.ConfigFile; import com.redhat.rhn.domain.config.ConfigurationFactory; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set;
import com.redhat.rhn.domain.config.*; import java.util.*;
[ "com.redhat.rhn", "java.util" ]
com.redhat.rhn; java.util;
2,361,184
public void testSSLSocketImpl3() throws Exception { Server server = null; SSLSocket socket = null; try { server = new Server(); socket = new SSLSocketImpl( "localhost", server.getPort(), InetAddress.getByName("localhost"), 0, JSSETestData.getSSLParameters()); socket.setUseClientMode(true);
void function() throws Exception { Server server = null; SSLSocket socket = null; try { server = new Server(); socket = new SSLSocketImpl( STR, server.getPort(), InetAddress.getByName(STR), 0, JSSETestData.getSSLParameters()); socket.setUseClientMode(true);
/** * SSLSocketImpl(String host, int port, InetAddress localHost, int * localPort, SSLParameters sslParameters) method testing. */
SSLSocketImpl(String host, int port, InetAddress localHost, int localPort, SSLParameters sslParameters) method testing
testSSLSocketImpl3
{ "repo_name": "s20121035/rk3288_android5.1_repo", "path": "external/apache-harmony/x-net/src/test/impl/java.injected/org/apache/harmony/xnet/provider/jsse/SSLSocketImplTest.java", "license": "gpl-3.0", "size": 39896 }
[ "java.net.InetAddress", "javax.net.ssl.SSLSocket" ]
import java.net.InetAddress; import javax.net.ssl.SSLSocket;
import java.net.*; import javax.net.ssl.*;
[ "java.net", "javax.net" ]
java.net; javax.net;
960,052
private boolean createCandidateString(String input, HashMap<String,String> map, StringBuffer outBuf) { if (outBuf.length() > 0) { outBuf.delete(0, outBuf.length()); } for (int index = 0; index < input.length(); index++) { String convChar = map.get(input.substring(index, index + 1)); if (convChar == null) { return false; } outBuf.append(convChar); } return true; }
boolean function(String input, HashMap<String,String> map, StringBuffer outBuf) { if (outBuf.length() > 0) { outBuf.delete(0, outBuf.length()); } for (int index = 0; index < input.length(); index++) { String convChar = map.get(input.substring(index, index + 1)); if (convChar == null) { return false; } outBuf.append(convChar); } return true; }
/** * Create the candidate string * <br> * @param input The input string * @param map The hash map * @param outBuf The output string * @return {@code true} if success */
Create the candidate string
createCandidateString
{ "repo_name": "s20121035/rk3288_android5.1_repo", "path": "packages/inputmethods/OpenWnn/src/jp/co/omronsoft/openwnn/JAJP/KanaConverter.java", "license": "gpl-3.0", "size": 24128 }
[ "java.util.HashMap" ]
import java.util.HashMap;
import java.util.*;
[ "java.util" ]
java.util;
519,506
public BaseNetworkContent getNetworkContent();
BaseNetworkContent function();
/** * Get data which gets stored in the network * * @return data to store */
Get data which gets stored in the network
getNetworkContent
{ "repo_name": "Hive2Hive/Hive2Hive", "path": "org.hive2hive.core/src/main/java/org/hive2hive/core/network/data/parameters/IParameters.java", "license": "mit", "size": 3975 }
[ "org.hive2hive.core.model.BaseNetworkContent" ]
import org.hive2hive.core.model.BaseNetworkContent;
import org.hive2hive.core.model.*;
[ "org.hive2hive.core" ]
org.hive2hive.core;
614,948
public static HashDescriptionWriter hashWriter(HashFunction func) { return new HashDescriptionWriter(func.newHasher()); }
static HashDescriptionWriter function(HashFunction func) { return new HashDescriptionWriter(func.newHasher()); }
/** * Create a description writer for a particular hash function. * @param func The hash function. * @return A description writer that computes a hash using {@code func}. */
Create a description writer for a particular hash function
hashWriter
{ "repo_name": "vijayvani/Lenskit", "path": "lenskit-core/src/main/java/org/grouplens/lenskit/util/io/Descriptions.java", "license": "lgpl-2.1", "size": 5284 }
[ "com.google.common.hash.HashFunction" ]
import com.google.common.hash.HashFunction;
import com.google.common.hash.*;
[ "com.google.common" ]
com.google.common;
1,467,163
public String getLocalName() { return name; } private static final Map<String, Element> MAP; static { final Map<String, Element> map = new HashMap<String, Element>(8); for (Element element : values()) { final String name = element.getLocalName(); if (name != null) { map.put(name, element); } } MAP = map; }
String function() { return name; } private static final Map<String, Element> MAP; static { final Map<String, Element> map = new HashMap<String, Element>(8); for (Element element : values()) { final String name = element.getLocalName(); if (name != null) { map.put(name, element); } } MAP = map; }
/** * Get the local name of this element. * * @return the local name */
Get the local name of this element
getLocalName
{ "repo_name": "nmldiegues/stibt", "path": "infinispan/cachestore/remote/src/main/java/org/infinispan/loaders/remote/configuration/as/Element.java", "license": "apache-2.0", "size": 2268 }
[ "java.util.HashMap", "java.util.Map" ]
import java.util.HashMap; import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,850,861
@Override public T visitTypeArgs(@NotNull ScalaParser.TypeArgsContext ctx) { return visitChildren(ctx); }
@Override public T visitTypeArgs(@NotNull ScalaParser.TypeArgsContext ctx) { return visitChildren(ctx); }
/** * {@inheritDoc} * * <p>The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.</p> */
The default implementation returns the result of calling <code>#visitChildren</code> on ctx
visitClassParamClauses
{ "repo_name": "IsThisThePayneResidence/intellidots", "path": "src/main/java/ua/edu/hneu/ast/parsers/ScalaBaseVisitor.java", "license": "gpl-3.0", "size": 26845 }
[ "org.antlr.v4.runtime.misc.NotNull" ]
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.*;
[ "org.antlr.v4" ]
org.antlr.v4;
1,020,814
public boolean includes(DocumentEvent event) { return includes(event.getDocument(), event.getOffset(), event.getLength()); }
boolean function(DocumentEvent event) { return includes(event.getDocument(), event.getOffset(), event.getLength()); }
/** * Returns whether this position includes <code>event</code>. * * @param event the event to check. * @return <code>true</code> if this position includes <code>event</code>, * <code>false</code> otherwise */
Returns whether this position includes <code>event</code>
includes
{ "repo_name": "neelance/jface4ruby", "path": "jface4ruby/src/org/eclipse/jface/text/link/LinkedPosition.java", "license": "epl-1.0", "size": 5689 }
[ "org.eclipse.jface.text.DocumentEvent" ]
import org.eclipse.jface.text.DocumentEvent;
import org.eclipse.jface.text.*;
[ "org.eclipse.jface" ]
org.eclipse.jface;
2,068,621
public String getDeviceModel() { return Build.MODEL; }
String function() { return Build.MODEL; }
/** * get device model * * Full BMOS on Passion */
get device model Full BMOS on Passion
getDeviceModel
{ "repo_name": "BaiduQA/Cafe", "path": "testservice/src/com/baidu/cafe/remote/SystemLib.java", "license": "apache-2.0", "size": 87756 }
[ "android.os.Build" ]
import android.os.Build;
import android.os.*;
[ "android.os" ]
android.os;
2,232,025
@Test public void starGlob() { EventFilter rootFilter = new GlobbingPathFilter("a/*/c"); NodeState a = tree.getChild("a").getNodeState(); assertFalse(rootFilter.includeAdd("a", a)); EventFilter aFilter = rootFilter.create("a", a, a); assertNotNull(aFilter); NodeState b = a.getChildNode("b"); assertFalse(aFilter.includeAdd("b", b)); EventFilter bFilter = aFilter.create("b", b, b); assertNotNull(bFilter); NodeState c = b.getChildNode("c"); assertTrue(bFilter.includeAdd("c", b)); assertFalse(bFilter.includeAdd("x", b)); assertNull(bFilter.create("c", c, c)); }
void function() { EventFilter rootFilter = new GlobbingPathFilter("a/*/c"); NodeState a = tree.getChild("a").getNodeState(); assertFalse(rootFilter.includeAdd("a", a)); EventFilter aFilter = rootFilter.create("a", a, a); assertNotNull(aFilter); NodeState b = a.getChildNode("b"); assertFalse(aFilter.includeAdd("b", b)); EventFilter bFilter = aFilter.create("b", b, b); assertNotNull(bFilter); NodeState c = b.getChildNode("c"); assertTrue(bFilter.includeAdd("c", b)); assertFalse(bFilter.includeAdd("x", b)); assertNull(bFilter.create("c", c, c)); }
/** * a/*&#47c should match a/b/c */
a/*&#47c should match a/b/c
starGlob
{ "repo_name": "mduerig/jackrabbit-oak", "path": "oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/observation/filter/GlobbingPathFilterTest.java", "license": "apache-2.0", "size": 11605 }
[ "org.apache.jackrabbit.oak.spi.state.NodeState", "org.junit.Assert" ]
import org.apache.jackrabbit.oak.spi.state.NodeState; import org.junit.Assert;
import org.apache.jackrabbit.oak.spi.state.*; import org.junit.*;
[ "org.apache.jackrabbit", "org.junit" ]
org.apache.jackrabbit; org.junit;
1,584,863
public void minimizeFrame(JInternalFrame frame) { Rectangle normalBounds = frame.getNormalBounds(); JDesktopPane p = frame.getDesktopPane(); if (p != null) p.setSelectedFrame(frame); else { try { frame.setSelected(true); } catch (PropertyVetoException e) { // Do nothing. } } setBoundsForFrame(frame, normalBounds.x, normalBounds.y, normalBounds.width, normalBounds.height); }
void function(JInternalFrame frame) { Rectangle normalBounds = frame.getNormalBounds(); JDesktopPane p = frame.getDesktopPane(); if (p != null) p.setSelectedFrame(frame); else { try { frame.setSelected(true); } catch (PropertyVetoException e) { } } setBoundsForFrame(frame, normalBounds.x, normalBounds.y, normalBounds.width, normalBounds.height); }
/** * This method restores the JInternalFrame's bounds to what they were * previous to the setMaximize call. * * @param frame The JInternalFrame to minimize. */
This method restores the JInternalFrame's bounds to what they were previous to the setMaximize call
minimizeFrame
{ "repo_name": "shaotuanchen/sunflower_exp", "path": "tools/source/gcc-4.2.4/libjava/classpath/javax/swing/DefaultDesktopManager.java", "license": "bsd-3-clause", "size": 18383 }
[ "java.awt.Rectangle", "java.beans.PropertyVetoException" ]
import java.awt.Rectangle; import java.beans.PropertyVetoException;
import java.awt.*; import java.beans.*;
[ "java.awt", "java.beans" ]
java.awt; java.beans;
1,726,029
@Override public Map<LocalDate, MultipleCurrencyAmount> visitCouponIborDefinition(final CouponIborDefinition coupon) { ArgumentChecker.notNull(coupon, "coupon"); final LocalDate endDate = coupon.getPaymentDate().toLocalDate(); if (coupon.getNotional() < 0) { return Collections.emptyMap(); } final double amount = coupon.getNotional() * coupon.getFixingPeriodAccrualFactor(); return Collections.singletonMap(endDate, MultipleCurrencyAmount.of(CurrencyAmount.of(coupon.getCurrency(), amount))); }
Map<LocalDate, MultipleCurrencyAmount> function(final CouponIborDefinition coupon) { ArgumentChecker.notNull(coupon, STR); final LocalDate endDate = coupon.getPaymentDate().toLocalDate(); if (coupon.getNotional() < 0) { return Collections.emptyMap(); } final double amount = coupon.getNotional() * coupon.getFixingPeriodAccrualFactor(); return Collections.singletonMap(endDate, MultipleCurrencyAmount.of(CurrencyAmount.of(coupon.getCurrency(), amount))); }
/** * If the notional is negative (i.e. the amount is to be paid), returns * an empty map. Otherwise, returns a map containing a single payment date and the notional amount * multiplied by the accrual period. * @param coupon The coupon instrument, not null * @return A map containing the (single) payment date and amount, or an empty map, as appropriate */
If the notional is negative (i.e. the amount is to be paid), returns an empty map. Otherwise, returns a map containing a single payment date and the notional amount multiplied by the accrual period
visitCouponIborDefinition
{ "repo_name": "jeorme/OG-Platform", "path": "projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/instrument/FloatingReceiveCashFlowVisitor.java", "license": "apache-2.0", "size": 14588 }
[ "com.opengamma.analytics.financial.instrument.payment.CouponIborDefinition", "com.opengamma.util.ArgumentChecker", "com.opengamma.util.money.CurrencyAmount", "com.opengamma.util.money.MultipleCurrencyAmount", "java.util.Collections", "java.util.Map", "org.threeten.bp.LocalDate" ]
import com.opengamma.analytics.financial.instrument.payment.CouponIborDefinition; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.money.CurrencyAmount; import com.opengamma.util.money.MultipleCurrencyAmount; import java.util.Collections; import java.util.Map; import org.threeten.bp.LocalDate;
import com.opengamma.analytics.financial.instrument.payment.*; import com.opengamma.util.*; import com.opengamma.util.money.*; import java.util.*; import org.threeten.bp.*;
[ "com.opengamma.analytics", "com.opengamma.util", "java.util", "org.threeten.bp" ]
com.opengamma.analytics; com.opengamma.util; java.util; org.threeten.bp;
799,747
public void mergeFromOsmPois(List<Poi> remotePois, Box box) { List<Poi> toMergePois = new ArrayList<>(); Map<String, Poi> remotePoisMap = new HashMap<>(); // Map remote Poi backend Ids for (Poi poi : remotePois) { remotePoisMap.put(poi.getBackendId(), poi); } // List matching Pois List<Poi> localPois = poiDao.queryForAllInRect(box); Map<String, Poi> localPoisMap = new ConcurrentHashMap<>(); // Map matching local Pois for (Poi localPoi : localPois) { localPoisMap.put(localPoi.getBackendId(), localPoi); } // Browse remote pois for (Poi remotePoi : remotePois) { Poi localPoi = localPoisMap.remove(remotePoi.getBackendId()); Long localVersion = -1L; // If localPoi is versioned if (localPoi != null && localPoi.getVersion() != null) { localVersion = Long.valueOf(localPoi.getVersion()); } // Compute version delta if (Long.valueOf(remotePoi.getVersion()) > localVersion) { // Remote version is newer, override existing one if (localPoi != null) { remotePoi.setId(localPoi.getId()); } // This Poi should be updated toMergePois.add(remotePoi); } } poiDao.delete(localPoisMap.values()); // savePois of either new or existing Pois savePois(toMergePois); }
void function(List<Poi> remotePois, Box box) { List<Poi> toMergePois = new ArrayList<>(); Map<String, Poi> remotePoisMap = new HashMap<>(); for (Poi poi : remotePois) { remotePoisMap.put(poi.getBackendId(), poi); } List<Poi> localPois = poiDao.queryForAllInRect(box); Map<String, Poi> localPoisMap = new ConcurrentHashMap<>(); for (Poi localPoi : localPois) { localPoisMap.put(localPoi.getBackendId(), localPoi); } for (Poi remotePoi : remotePois) { Poi localPoi = localPoisMap.remove(remotePoi.getBackendId()); Long localVersion = -1L; if (localPoi != null && localPoi.getVersion() != null) { localVersion = Long.valueOf(localPoi.getVersion()); } if (Long.valueOf(remotePoi.getVersion()) > localVersion) { if (localPoi != null) { remotePoi.setId(localPoi.getId()); } toMergePois.add(remotePoi); } } poiDao.delete(localPoisMap.values()); savePois(toMergePois); }
/** * Merge POIs in parameters to those already in the database. * * @param remotePois The POIs to merge. */
Merge POIs in parameters to those already in the database
mergeFromOsmPois
{ "repo_name": "mapsquare/osm-contributor", "path": "src/main/java/io/jawg/osmcontributor/ui/managers/PoiManager.java", "license": "gpl-3.0", "size": 34966 }
[ "io.jawg.osmcontributor.model.entities.Poi", "io.jawg.osmcontributor.utils.Box", "java.util.ArrayList", "java.util.HashMap", "java.util.List", "java.util.Map", "java.util.concurrent.ConcurrentHashMap" ]
import io.jawg.osmcontributor.model.entities.Poi; import io.jawg.osmcontributor.utils.Box; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap;
import io.jawg.osmcontributor.model.entities.*; import io.jawg.osmcontributor.utils.*; import java.util.*; import java.util.concurrent.*;
[ "io.jawg.osmcontributor", "java.util" ]
io.jawg.osmcontributor; java.util;
1,835,562
Optional<DiscreteResource> lookup(DiscreteResourceId id);
Optional<DiscreteResource> lookup(DiscreteResourceId id);
/** * Look up a discrete resource instance by ID. * * @param id id * @return found instance enclosed by Optional */
Look up a discrete resource instance by ID
lookup
{ "repo_name": "Shashikanth-Huawei/bmp", "path": "core/store/dist/src/main/java/org/onosproject/store/resource/impl/DiscreteResources.java", "license": "apache-2.0", "size": 3125 }
[ "java.util.Optional", "org.onosproject.net.resource.DiscreteResource", "org.onosproject.net.resource.DiscreteResourceId" ]
import java.util.Optional; import org.onosproject.net.resource.DiscreteResource; import org.onosproject.net.resource.DiscreteResourceId;
import java.util.*; import org.onosproject.net.resource.*;
[ "java.util", "org.onosproject.net" ]
java.util; org.onosproject.net;
651,065
public void edgeOverUtilized(Edge edge);
void function(Edge edge);
/** * Called when an Edge utilization is above the safety threshold * configured on the controller * * @param edge The edge which bandwidth usage is above the safety level */
Called when an Edge utilization is above the safety threshold configured on the controller
edgeOverUtilized
{ "repo_name": "Milstein/controllerODP", "path": "opendaylight/sal/api/src/main/java/org/opendaylight/controller/sal/topology/IListenTopoUpdates.java", "license": "epl-1.0", "size": 1784 }
[ "org.opendaylight.controller.sal.core.Edge" ]
import org.opendaylight.controller.sal.core.Edge;
import org.opendaylight.controller.sal.core.*;
[ "org.opendaylight.controller" ]
org.opendaylight.controller;
1,769,188
public void removeListener(INotifyChangedListener notifyChangedListener) { changeNotifier.removeListener(notifyChangedListener); }
void function(INotifyChangedListener notifyChangedListener) { changeNotifier.removeListener(notifyChangedListener); }
/** * This removes a listener. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This removes a listener.
removeListener
{ "repo_name": "ZsoltKovari/incquery-examples-cps", "path": "domains/org.eclipse.incquery.examples.cps.model.edit/src/org/eclipse/incquery/examples/cps/cyberPhysicalSystem/provider/CyberPhysicalSystemItemProviderAdapterFactory.java", "license": "epl-1.0", "size": 13614 }
[ "org.eclipse.emf.edit.provider.INotifyChangedListener" ]
import org.eclipse.emf.edit.provider.INotifyChangedListener;
import org.eclipse.emf.edit.provider.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
2,239,753
private void validatePsmPerPeptideAnnotationNamesUniqueWithinSearchProgramAndType( SearchProgram searchProgram ) throws ProxlImporterDataException { Set<String> annotationNames = new HashSet<>(); SearchProgram.PsmPerPeptideAnnotationTypes psmPerPeptideAnnotationTypes = searchProgram.getPsmPerPeptideAnnotationTypes(); if ( psmPerPeptideAnnotationTypes == null ) { if ( log.isInfoEnabled() ) { String msg = "No PsmPerPeptide Annotation Types for search program name: " + searchProgram.getName(); log.info(msg); } return; } ////// Filterable Psm PerPeptide Annotations FilterablePsmPerPeptideAnnotationTypes filterablePsmPerPeptideAnnotationTypes = psmPerPeptideAnnotationTypes.getFilterablePsmPerPeptideAnnotationTypes(); if ( filterablePsmPerPeptideAnnotationTypes == null ) { if ( log.isInfoEnabled() ) { String msg = "No Filterable PsmPerPeptide Annotation Types for search program name: " + searchProgram.getName(); log.info(msg); } } else { List<FilterablePsmPerPeptideAnnotationType> filterablePsmPerPeptideAnnotationTypeList = filterablePsmPerPeptideAnnotationTypes.getFilterablePsmPerPeptideAnnotationType(); if ( filterablePsmPerPeptideAnnotationTypeList == null || filterablePsmPerPeptideAnnotationTypeList.isEmpty() ) { if ( log.isInfoEnabled() ) { String msg = "No Filterable PsmPerPeptide Annotation Types for search program name: " + searchProgram.getName(); log.info(msg); } } else { for ( FilterablePsmPerPeptideAnnotationType filterablePsmPerPeptideAnnotationType : filterablePsmPerPeptideAnnotationTypeList ) { String annotationName = filterablePsmPerPeptideAnnotationType.getName(); if ( ! annotationNames.add( annotationName ) ) { String msg = "Annotation name '" + annotationName + "'" + " occurs more than once for PsmPerPeptide annotation types for search program " + "'" + searchProgram.getName() + "'."; log.error( msg ); throw new ProxlImporterDataException( msg ); } } } } //////// Descriptive Psm PerPeptide Annotations DescriptivePsmPerPeptideAnnotationTypes descriptivePsmPerPeptideAnnotationTypes = psmPerPeptideAnnotationTypes.getDescriptivePsmPerPeptideAnnotationTypes(); if ( descriptivePsmPerPeptideAnnotationTypes == null ) { if ( log.isInfoEnabled() ) { String msg = "No Descriptive PsmPerPeptide Annotation Types for search program name: " + searchProgram.getName(); log.info(msg); } } else { List<DescriptivePsmPerPeptideAnnotationType> descriptivePsmPerPeptideAnnotationTypeList = descriptivePsmPerPeptideAnnotationTypes.getDescriptivePsmPerPeptideAnnotationType(); if ( descriptivePsmPerPeptideAnnotationTypeList == null || descriptivePsmPerPeptideAnnotationTypeList.isEmpty() ) { if ( log.isInfoEnabled() ) { String msg = "No Descriptive PsmPerPeptide Annotation Types for search program name: " + searchProgram.getName(); log.info(msg); } } else { for ( DescriptivePsmPerPeptideAnnotationType descriptivePsmPerPeptideAnnotationType : descriptivePsmPerPeptideAnnotationTypeList ) { String annotationName = descriptivePsmPerPeptideAnnotationType.getName(); if ( ! annotationNames.add( annotationName ) ) { String msg = "Annotation name '" + annotationName + "'" + " occurs more than once for PsmPerPeptide annotation types for search program " + "'" + searchProgram.getName() + "'."; log.error( msg ); throw new ProxlImporterDataException( msg ); } } } } }
void function( SearchProgram searchProgram ) throws ProxlImporterDataException { Set<String> annotationNames = new HashSet<>(); SearchProgram.PsmPerPeptideAnnotationTypes psmPerPeptideAnnotationTypes = searchProgram.getPsmPerPeptideAnnotationTypes(); if ( psmPerPeptideAnnotationTypes == null ) { if ( log.isInfoEnabled() ) { String msg = STR + searchProgram.getName(); log.info(msg); } return; } FilterablePsmPerPeptideAnnotationTypes filterablePsmPerPeptideAnnotationTypes = psmPerPeptideAnnotationTypes.getFilterablePsmPerPeptideAnnotationTypes(); if ( filterablePsmPerPeptideAnnotationTypes == null ) { if ( log.isInfoEnabled() ) { String msg = STR + searchProgram.getName(); log.info(msg); } } else { List<FilterablePsmPerPeptideAnnotationType> filterablePsmPerPeptideAnnotationTypeList = filterablePsmPerPeptideAnnotationTypes.getFilterablePsmPerPeptideAnnotationType(); if ( filterablePsmPerPeptideAnnotationTypeList == null filterablePsmPerPeptideAnnotationTypeList.isEmpty() ) { if ( log.isInfoEnabled() ) { String msg = STR + searchProgram.getName(); log.info(msg); } } else { for ( FilterablePsmPerPeptideAnnotationType filterablePsmPerPeptideAnnotationType : filterablePsmPerPeptideAnnotationTypeList ) { String annotationName = filterablePsmPerPeptideAnnotationType.getName(); if ( ! annotationNames.add( annotationName ) ) { String msg = STR + annotationName + "'" + STR + "'" + searchProgram.getName() + "'."; log.error( msg ); throw new ProxlImporterDataException( msg ); } } } } DescriptivePsmPerPeptideAnnotationTypes descriptivePsmPerPeptideAnnotationTypes = psmPerPeptideAnnotationTypes.getDescriptivePsmPerPeptideAnnotationTypes(); if ( descriptivePsmPerPeptideAnnotationTypes == null ) { if ( log.isInfoEnabled() ) { String msg = STR + searchProgram.getName(); log.info(msg); } } else { List<DescriptivePsmPerPeptideAnnotationType> descriptivePsmPerPeptideAnnotationTypeList = descriptivePsmPerPeptideAnnotationTypes.getDescriptivePsmPerPeptideAnnotationType(); if ( descriptivePsmPerPeptideAnnotationTypeList == null descriptivePsmPerPeptideAnnotationTypeList.isEmpty() ) { if ( log.isInfoEnabled() ) { String msg = STR + searchProgram.getName(); log.info(msg); } } else { for ( DescriptivePsmPerPeptideAnnotationType descriptivePsmPerPeptideAnnotationType : descriptivePsmPerPeptideAnnotationTypeList ) { String annotationName = descriptivePsmPerPeptideAnnotationType.getName(); if ( ! annotationNames.add( annotationName ) ) { String msg = STR + annotationName + "'" + STR + "'" + searchProgram.getName() + "'."; log.error( msg ); throw new ProxlImporterDataException( msg ); } } } } }
/** * validate Psm Per Peptide Annotation Types * * @param searchProgram * @throws ProxlImporterDataException */
validate Psm Per Peptide Annotation Types
validatePsmPerPeptideAnnotationNamesUniqueWithinSearchProgramAndType
{ "repo_name": "yeastrc/proxl-web-app", "path": "proxl_importer/src/main/java/org/yeastrc/proxl/import_xml_to_db/pre_validate_xml/ValidateAnnotationTypeRecords.java", "license": "apache-2.0", "size": 17658 }
[ "java.util.HashSet", "java.util.List", "java.util.Set", "org.yeastrc.proxl.import_xml_to_db.exceptions.ProxlImporterDataException", "org.yeastrc.proxl_import.api.xml_dto.DescriptivePsmPerPeptideAnnotationType", "org.yeastrc.proxl_import.api.xml_dto.DescriptivePsmPerPeptideAnnotationTypes", "org.yeastrc.proxl_import.api.xml_dto.FilterablePsmPerPeptideAnnotationType", "org.yeastrc.proxl_import.api.xml_dto.FilterablePsmPerPeptideAnnotationTypes", "org.yeastrc.proxl_import.api.xml_dto.SearchProgram" ]
import java.util.HashSet; import java.util.List; import java.util.Set; import org.yeastrc.proxl.import_xml_to_db.exceptions.ProxlImporterDataException; import org.yeastrc.proxl_import.api.xml_dto.DescriptivePsmPerPeptideAnnotationType; import org.yeastrc.proxl_import.api.xml_dto.DescriptivePsmPerPeptideAnnotationTypes; import org.yeastrc.proxl_import.api.xml_dto.FilterablePsmPerPeptideAnnotationType; import org.yeastrc.proxl_import.api.xml_dto.FilterablePsmPerPeptideAnnotationTypes; import org.yeastrc.proxl_import.api.xml_dto.SearchProgram;
import java.util.*; import org.yeastrc.proxl.import_xml_to_db.exceptions.*; import org.yeastrc.proxl_import.api.xml_dto.*;
[ "java.util", "org.yeastrc.proxl", "org.yeastrc.proxl_import" ]
java.util; org.yeastrc.proxl; org.yeastrc.proxl_import;
454,902
private String extractID(String label) { Matcher matcher = OLSON_ID_PATTERN.matcher(label); if (matcher.find()) { String id = matcher.group(); try { DateTimeZone.forID(id); // just to check whether it exists } catch (IllegalArgumentException e) { // not found return null; } return id; } return null; }
String function(String label) { Matcher matcher = OLSON_ID_PATTERN.matcher(label); if (matcher.find()) { String id = matcher.group(); try { DateTimeZone.forID(id); } catch (IllegalArgumentException e) { return null; } return id; } return null; }
/** * Looks for a substring that corresponds to an Olson ID. * * @param label the string to search through * @return the substring that represents an Olson ID */
Looks for a substring that corresponds to an Olson ID
extractID
{ "repo_name": "accesstest3/cfunambol", "path": "common/pim-framework/src/main/java/com/funambol/common/pim/converter/TimeZoneHelper.java", "license": "agpl-3.0", "size": 50332 }
[ "java.util.regex.Matcher", "org.joda.time.DateTimeZone" ]
import java.util.regex.Matcher; import org.joda.time.DateTimeZone;
import java.util.regex.*; import org.joda.time.*;
[ "java.util", "org.joda.time" ]
java.util; org.joda.time;
590,886
public Set<String> getFinalParameters() { Set<String> setFinalParams = Collections.newSetFromMap( new ConcurrentHashMap<String, Boolean>()); setFinalParams.addAll(finalParameters); return setFinalParams; }
Set<String> function() { Set<String> setFinalParams = Collections.newSetFromMap( new ConcurrentHashMap<String, Boolean>()); setFinalParams.addAll(finalParameters); return setFinalParams; }
/** * Get the set of parameters marked final. * * @return final parameter set. */
Get the set of parameters marked final
getFinalParameters
{ "repo_name": "NJUJYB/disYarn", "path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java", "license": "apache-2.0", "size": 104133 }
[ "java.util.Collections", "java.util.Set", "java.util.concurrent.ConcurrentHashMap" ]
import java.util.Collections; import java.util.Set; import java.util.concurrent.ConcurrentHashMap;
import java.util.*; import java.util.concurrent.*;
[ "java.util" ]
java.util;
1,892,434
return this.token; } public static final Operator INCREMENT = new Operator("++");//$NON-NLS-1$ public static final Operator DECREMENT = new Operator("--");//$NON-NLS-1$ public static final Operator PLUS = new Operator("+");//$NON-NLS-1$ public static final Operator MINUS = new Operator("-");//$NON-NLS-1$ public static final Operator COMPLEMENT = new Operator("~");//$NON-NLS-1$ public static final Operator NOT = new Operator("!");//$NON-NLS-1$ private static final Map CODES; static { CODES = new HashMap(20); Operator[] ops = {INCREMENT, DECREMENT, PLUS, MINUS, COMPLEMENT, NOT,}; for (int i = 0; i < ops.length; i++) { CODES.put(ops[i].toString(), ops[i]); } }
return this.token; } public static final Operator INCREMENT = new Operator("++"); public static final Operator DECREMENT = new Operator("--"); public static final Operator PLUS = new Operator("+"); public static final Operator MINUS = new Operator("-"); public static final Operator COMPLEMENT = new Operator("~"); public static final Operator NOT = new Operator("!"); private static final Map CODES; static { CODES = new HashMap(20); Operator[] ops = {INCREMENT, DECREMENT, PLUS, MINUS, COMPLEMENT, NOT,}; for (int i = 0; i < ops.length; i++) { CODES.put(ops[i].toString(), ops[i]); } }
/** * Returns the character sequence for the operator. * * @return the character sequence for the operator */
Returns the character sequence for the operator
toString
{ "repo_name": "riuvshin/che-plugins", "path": "plugin-java/che-plugin-java-ext-java/src/main/java/org/eclipse/che/ide/ext/java/jdt/core/dom/PrefixExpression.java", "license": "epl-1.0", "size": 11109 }
[ "java.util.HashMap", "java.util.Map" ]
import java.util.HashMap; import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,248,833
@Test(expected=SocketTimeoutException.class) public void testInterDNProtocolTimeout() throws Throwable { final Server server = new TestServer(1, true); server.start(); final InetSocketAddress addr = NetUtils.getConnectAddress(server); DatanodeID fakeDnId = DFSTestUtil.getLocalDatanodeID(addr.getPort()); DatanodeInfo dInfo = new DatanodeInfo(fakeDnId); InterDatanodeProtocol proxy = null; try { proxy = DataNode.createInterDataNodeProtocolProxy( dInfo, conf, 500, false); proxy.initReplicaRecovery(new RecoveringBlock( new ExtendedBlock("bpid", 1), null, 100)); fail ("Expected SocketTimeoutException exception, but did not get."); } finally { if (proxy != null) { RPC.stopProxy(proxy); } server.stop(); } }
@Test(expected=SocketTimeoutException.class) void function() throws Throwable { final Server server = new TestServer(1, true); server.start(); final InetSocketAddress addr = NetUtils.getConnectAddress(server); DatanodeID fakeDnId = DFSTestUtil.getLocalDatanodeID(addr.getPort()); DatanodeInfo dInfo = new DatanodeInfo(fakeDnId); InterDatanodeProtocol proxy = null; try { proxy = DataNode.createInterDataNodeProtocolProxy( dInfo, conf, 500, false); proxy.initReplicaRecovery(new RecoveringBlock( new ExtendedBlock("bpid", 1), null, 100)); fail (STR); } finally { if (proxy != null) { RPC.stopProxy(proxy); } server.stop(); } }
/** Test to verify that InterDatanode RPC timesout as expected when * the server DN does not respond. */
Test to verify that InterDatanode RPC timesout as expected when the server DN does not respond
testInterDNProtocolTimeout
{ "repo_name": "NJUJYB/disYarn", "path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestInterDatanodeProtocol.java", "license": "apache-2.0", "size": 16299 }
[ "java.net.InetSocketAddress", "java.net.SocketTimeoutException", "org.apache.hadoop.hdfs.DFSTestUtil", "org.apache.hadoop.hdfs.protocol.DatanodeID", "org.apache.hadoop.hdfs.protocol.DatanodeInfo", "org.apache.hadoop.hdfs.protocol.ExtendedBlock", "org.apache.hadoop.hdfs.server.datanode.DataNode", "org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand", "org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol", "org.apache.hadoop.ipc.RPC", "org.apache.hadoop.ipc.Server", "org.apache.hadoop.net.NetUtils", "org.junit.Test" ]
import java.net.InetSocketAddress; import java.net.SocketTimeoutException; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand; import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.NetUtils; import org.junit.Test;
import java.net.*; import org.apache.hadoop.hdfs.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.server.datanode.*; import org.apache.hadoop.hdfs.server.protocol.*; import org.apache.hadoop.ipc.*; import org.apache.hadoop.net.*; import org.junit.*;
[ "java.net", "org.apache.hadoop", "org.junit" ]
java.net; org.apache.hadoop; org.junit;
669,090
@Override protected FlexiBean createRootData() { FlexiBean out = super.createRootData(); LegalEntityDocument latestDoc = data().getLegalEntity(); LegalEntityDocument versionedLegalEntity = data().getVersioned(); out.put("latestLegalEntityDoc", latestDoc); out.put("latestLegalEntity", latestDoc.getLegalEntity()); out.put("legalEntityDoc", versionedLegalEntity); out.put("legalEntity", versionedLegalEntity.getLegalEntity()); out.put("legalEntityXML", StringEscapeUtils.escapeJavaScript(createXML(versionedLegalEntity.getLegalEntity()))); out.put("deleted", !latestDoc.isLatest()); return out; } //-------------------------------------------------------------------------
FlexiBean function() { FlexiBean out = super.createRootData(); LegalEntityDocument latestDoc = data().getLegalEntity(); LegalEntityDocument versionedLegalEntity = data().getVersioned(); out.put(STR, latestDoc); out.put(STR, latestDoc.getLegalEntity()); out.put(STR, versionedLegalEntity); out.put(STR, versionedLegalEntity.getLegalEntity()); out.put(STR, StringEscapeUtils.escapeJavaScript(createXML(versionedLegalEntity.getLegalEntity()))); out.put(STR, !latestDoc.isLatest()); return out; }
/** * Creates the output root data. * * @return the output root data, not null */
Creates the output root data
createRootData
{ "repo_name": "jeorme/OG-Platform", "path": "projects/OG-Web/src/main/java/com/opengamma/web/legalentity/WebLegalEntityVersionResource.java", "license": "apache-2.0", "size": 3762 }
[ "com.opengamma.master.legalentity.LegalEntityDocument", "org.apache.commons.lang.StringEscapeUtils", "org.joda.beans.impl.flexi.FlexiBean" ]
import com.opengamma.master.legalentity.LegalEntityDocument; import org.apache.commons.lang.StringEscapeUtils; import org.joda.beans.impl.flexi.FlexiBean;
import com.opengamma.master.legalentity.*; import org.apache.commons.lang.*; import org.joda.beans.impl.flexi.*;
[ "com.opengamma.master", "org.apache.commons", "org.joda.beans" ]
com.opengamma.master; org.apache.commons; org.joda.beans;
2,027,644
List queryForList(String id, Object parameterObject) throws SQLException;
List queryForList(String id, Object parameterObject) throws SQLException;
/** * Executes a mapped SQL SELECT statement that returns data to populate * a number of result objects. * <p/> * The parameter object is generally used to supply the input * data for the WHERE clause parameter(s) of the SELECT statement. * * @param id The name of the statement to execute. * @param parameterObject The parameter object (e.g. JavaBean, Map, XML etc.). * @return A List of result objects. * @throws java.sql.SQLException If an error occurs. */
Executes a mapped SQL SELECT statement that returns data to populate a number of result objects. The parameter object is generally used to supply the input data for the WHERE clause parameter(s) of the SELECT statement
queryForList
{ "repo_name": "cavajtennis/ibatis", "path": "java/mapper/mapper2/src/com/ibatis/sqlmap/client/SqlMapExecutor.java", "license": "apache-2.0", "size": 9902 }
[ "java.sql.SQLException", "java.util.List" ]
import java.sql.SQLException; import java.util.List;
import java.sql.*; import java.util.*;
[ "java.sql", "java.util" ]
java.sql; java.util;
1,260,648
@SuppressWarnings("unchecked") public static <F,T> Expression getJoinedEntityField(From<?, F> grandparentJoin, Attribute<?, T> parentJoinAttr, SingularAttribute fieldAttr) { // task -> * -> origJoin -> (fieldParentAttr field in) tojoinType -> fieldAttr Class toAttrJoinType; if( parentJoinAttr instanceof SingularAttribute ) { toAttrJoinType = parentJoinAttr.getJavaType(); } else if( parentJoinAttr instanceof PluralAttribute ) { toAttrJoinType = ((PluralAttribute) parentJoinAttr).getElementType().getJavaType(); } else { String joinName = parentJoinAttr.getDeclaringType().getJavaType().getSimpleName() + "." + parentJoinAttr.getName(); throw new IllegalStateException("Unknown attribute type encountered when trying to join " + joinName ); } Join<F, T> fieldParentJoin = null; for( Join<F, ?> join : grandparentJoin.getJoins() ) { if( join.getJavaType().equals(toAttrJoinType) ) { if( join.getAttribute().equals(parentJoinAttr) ) { fieldParentJoin = (Join<F, T>) join; if( ! JoinType.INNER.equals(fieldParentJoin.getJoinType()) ) { // This a criteria set by the user (as opposed to the user-limiting criteria) -- it MUST be followed // This means that the join is not optional (LEFT) but mandatory (INNER) fieldParentJoin = null; } break; } } } if( fieldParentJoin == null ) { if( parentJoinAttr instanceof SingularAttribute) { fieldParentJoin = grandparentJoin.join((SingularAttribute) parentJoinAttr); } else if( parentJoinAttr instanceof CollectionAttribute) { fieldParentJoin = grandparentJoin.join((CollectionAttribute) parentJoinAttr); } else if( parentJoinAttr instanceof ListAttribute) { fieldParentJoin = grandparentJoin.join((ListAttribute) parentJoinAttr); } else if( parentJoinAttr instanceof SetAttribute) { fieldParentJoin = grandparentJoin.join((SetAttribute) parentJoinAttr); } else { throw new IllegalStateException("Unknown attribute type encountered when trying to join" + parentJoinAttr.getName() ); } } return fieldParentJoin.get(fieldAttr); }
@SuppressWarnings(STR) static <F,T> Expression function(From<?, F> grandparentJoin, Attribute<?, T> parentJoinAttr, SingularAttribute fieldAttr) { Class toAttrJoinType; if( parentJoinAttr instanceof SingularAttribute ) { toAttrJoinType = parentJoinAttr.getJavaType(); } else if( parentJoinAttr instanceof PluralAttribute ) { toAttrJoinType = ((PluralAttribute) parentJoinAttr).getElementType().getJavaType(); } else { String joinName = parentJoinAttr.getDeclaringType().getJavaType().getSimpleName() + "." + parentJoinAttr.getName(); throw new IllegalStateException(STR + joinName ); } Join<F, T> fieldParentJoin = null; for( Join<F, ?> join : grandparentJoin.getJoins() ) { if( join.getJavaType().equals(toAttrJoinType) ) { if( join.getAttribute().equals(parentJoinAttr) ) { fieldParentJoin = (Join<F, T>) join; if( ! JoinType.INNER.equals(fieldParentJoin.getJoinType()) ) { fieldParentJoin = null; } break; } } } if( fieldParentJoin == null ) { if( parentJoinAttr instanceof SingularAttribute) { fieldParentJoin = grandparentJoin.join((SingularAttribute) parentJoinAttr); } else if( parentJoinAttr instanceof CollectionAttribute) { fieldParentJoin = grandparentJoin.join((CollectionAttribute) parentJoinAttr); } else if( parentJoinAttr instanceof ListAttribute) { fieldParentJoin = grandparentJoin.join((ListAttribute) parentJoinAttr); } else if( parentJoinAttr instanceof SetAttribute) { fieldParentJoin = grandparentJoin.join((SetAttribute) parentJoinAttr); } else { throw new IllegalStateException(STR + parentJoinAttr.getName() ); } } return fieldParentJoin.get(fieldAttr); }
/** * This retrieves the correct field ({@link Expression}) that should be used when building the {@link Predicate}. * </p> * This field is necessary because of the amount of joins and the complexity in the human-task schema. * * @param grandparentJoin This is the parent join, * for example the join between TaskDataImpl -> PeopleAssignments * @param parentJoinAttr This is the {@link Attribute} with the information over the join (from the parent) that we need to create, * for example the {@link SingularAttribute}<{@link PeopleAssignmentsImpl}, {@link OrganizationalEntityImpl}> {@link Attribute}. * @param fieldAttr This is the {@link Attribute} with the actual attribute that we create an {@link Expression} to build a {@link Predicate} for, * for example the {@link OrganizationalEntityImpl_#id} field. * @return an {@link Expression} that can be used in a predicate with the values/parameters from a {@link QueryCriteria} instance */
This retrieves the correct field (<code>Expression</code>) that should be used when building the <code>Predicate</code>. This field is necessary because of the amount of joins and the complexity in the human-task schema
getJoinedEntityField
{ "repo_name": "OnePaaS/jbpm", "path": "jbpm-human-task/jbpm-human-task-jpa/src/main/java/org/jbpm/services/task/persistence/TaskQueryCriteriaUtil.java", "license": "apache-2.0", "size": 31036 }
[ "javax.persistence.criteria.Expression", "javax.persistence.criteria.From", "javax.persistence.criteria.Join", "javax.persistence.criteria.JoinType", "javax.persistence.metamodel.Attribute", "javax.persistence.metamodel.CollectionAttribute", "javax.persistence.metamodel.ListAttribute", "javax.persistence.metamodel.PluralAttribute", "javax.persistence.metamodel.SetAttribute", "javax.persistence.metamodel.SingularAttribute" ]
import javax.persistence.criteria.Expression; import javax.persistence.criteria.From; import javax.persistence.criteria.Join; import javax.persistence.criteria.JoinType; import javax.persistence.metamodel.Attribute; import javax.persistence.metamodel.CollectionAttribute; import javax.persistence.metamodel.ListAttribute; import javax.persistence.metamodel.PluralAttribute; import javax.persistence.metamodel.SetAttribute; import javax.persistence.metamodel.SingularAttribute;
import javax.persistence.criteria.*; import javax.persistence.metamodel.*;
[ "javax.persistence" ]
javax.persistence;
1,816,751
InfluxMetric metric(NodeId nodeId, String metricName);
InfluxMetric metric(NodeId nodeId, String metricName);
/** * Returns a last metric value from a given node. * * @param nodeId node identification * @param metricName metric name * @return a metric value from a given node */
Returns a last metric value from a given node
metric
{ "repo_name": "donNewtonAlpha/onos", "path": "apps/influxdbmetrics/src/main/java/org/onosproject/influxdbmetrics/InfluxDbMetricsRetriever.java", "license": "apache-2.0", "size": 3650 }
[ "org.onosproject.cluster.NodeId" ]
import org.onosproject.cluster.NodeId;
import org.onosproject.cluster.*;
[ "org.onosproject.cluster" ]
org.onosproject.cluster;
2,389,919
//----------------------------------------------------------------------- public ObservableId getRateId() { return rateId; }
ObservableId function() { return rateId; }
/** * Gets the identifier of the market data value that provides the rate. * @return the value of the property, not null */
Gets the identifier of the market data value that provides the rate
getRateId
{ "repo_name": "OpenGamma/Strata", "path": "modules/market/src/main/java/com/opengamma/strata/market/curve/node/FixedInflationSwapCurveNode.java", "license": "apache-2.0", "size": 28897 }
[ "com.opengamma.strata.data.ObservableId" ]
import com.opengamma.strata.data.ObservableId;
import com.opengamma.strata.data.*;
[ "com.opengamma.strata" ]
com.opengamma.strata;
1,486,352
public synchronized void clear() { if(tails.length != 0) throw new IllegalStateException("You cannot clear() a " + RingBufferControlWorkerPool.class.getSimpleName() + " with outstanding consumers."); cursor.set(RingBufferConsumerControl.INITIAL_CURSOR_VALUE); pubHeadAndTailCache.nextValue = RingBufferConsumerControl.INITIAL_CURSOR_VALUE; pubHeadAndTailCache.tailCache = RingBufferConsumerControl.INITIAL_CURSOR_VALUE; workSequence.set(RingBufferConsumerControl.INITIAL_CURSOR_VALUE); tails = new Sequence[0]; commonStop.set(Long.MAX_VALUE); firstWorker = newWorker(true); firstWorkerGiven = false; }
synchronized void function() { if(tails.length != 0) throw new IllegalStateException(STR + RingBufferControlWorkerPool.class.getSimpleName() + STR); cursor.set(RingBufferConsumerControl.INITIAL_CURSOR_VALUE); pubHeadAndTailCache.nextValue = RingBufferConsumerControl.INITIAL_CURSOR_VALUE; pubHeadAndTailCache.tailCache = RingBufferConsumerControl.INITIAL_CURSOR_VALUE; workSequence.set(RingBufferConsumerControl.INITIAL_CURSOR_VALUE); tails = new Sequence[0]; commonStop.set(Long.MAX_VALUE); firstWorker = newWorker(true); firstWorkerGiven = false; }
/** * The {@link RingBufferControlWorkerPool} can be cleared or "reset" so that it can be * reused from scratch HOWEVER, the workers must be re-retrieved via newWorker(); none * of the outstanding workers can be reused. Also, if any outstanding workers are still working * when clear() is called they will likely deadlock. */
The <code>RingBufferControlWorkerPool</code> can be cleared or "reset" so that it can be reused from scratch HOWEVER, the workers must be re-retrieved via newWorker(); none of the outstanding workers can be reused. Also, if any outstanding workers are still working when clear() is called they will likely deadlock
clear
{ "repo_name": "Dempsy/dempsy-commons", "path": "dempsy-ringbuffer/src/main/java/net/dempsy/ringbuffer/RingBufferControlWorkerPool.java", "license": "apache-2.0", "size": 14932 }
[ "com.lmax.disruptor.Sequence" ]
import com.lmax.disruptor.Sequence;
import com.lmax.disruptor.*;
[ "com.lmax.disruptor" ]
com.lmax.disruptor;
476,311
public static List<String> expandLinkopts( RuleContext ruleContext, String attrName, Iterable<String> values) { List<String> result = new ArrayList<>(); Expander expander = ruleContext.getExpander().withDataExecLocations(); for (String value : values) { if (isLinkoptLabel(value)) { if (!expandLabel(ruleContext, result, value)) { ruleContext.attributeError(attrName, "could not resolve label '" + value + "'"); } } else { expander .tokenizeAndExpandMakeVars( result, attrName, value); } } return result; }
static List<String> function( RuleContext ruleContext, String attrName, Iterable<String> values) { List<String> result = new ArrayList<>(); Expander expander = ruleContext.getExpander().withDataExecLocations(); for (String value : values) { if (isLinkoptLabel(value)) { if (!expandLabel(ruleContext, result, value)) { ruleContext.attributeError(attrName, STR + value + "'"); } } else { expander .tokenizeAndExpandMakeVars( result, attrName, value); } } return result; }
/** * Expands attribute value either using label expansion * (if attemptLabelExpansion == {@code true} and it does not look like make * variable or flag) or tokenizes and expands make variables. */
Expands attribute value either using label expansion (if attemptLabelExpansion == true and it does not look like make variable or flag) or tokenizes and expands make variables
expandLinkopts
{ "repo_name": "spxtr/bazel", "path": "src/main/java/com/google/devtools/build/lib/rules/cpp/CppHelper.java", "license": "apache-2.0", "size": 36294 }
[ "com.google.devtools.build.lib.analysis.Expander", "com.google.devtools.build.lib.analysis.RuleContext", "java.util.ArrayList", "java.util.List" ]
import com.google.devtools.build.lib.analysis.Expander; import com.google.devtools.build.lib.analysis.RuleContext; import java.util.ArrayList; import java.util.List;
import com.google.devtools.build.lib.analysis.*; import java.util.*;
[ "com.google.devtools", "java.util" ]
com.google.devtools; java.util;
152,816
private synchronized void initialize() throws NoSuchAlgorithmException, InvalidKeySpecException, NoSuchPaddingException, InvalidKeyException, InvalidAlgorithmParameterException { if (!isInitialized()) { this.cipher = CipherFactory.newPBECipher(this.normalizedPassword.toCharArray(), newOperationMode(), CompoundAlgorithm.PBE_WITH_MD5_AND_DES.getAlgorithm()); resetPassword(); initialized = true; } }
synchronized void function() throws NoSuchAlgorithmException, InvalidKeySpecException, NoSuchPaddingException, InvalidKeyException, InvalidAlgorithmParameterException { if (!isInitialized()) { this.cipher = CipherFactory.newPBECipher(this.normalizedPassword.toCharArray(), newOperationMode(), CompoundAlgorithm.PBE_WITH_MD5_AND_DES.getAlgorithm()); resetPassword(); initialized = true; } }
/** * Initializes the {@link PasswordFileEncryptor} object. * * @throws InvalidAlgorithmParameterException * is thrown if initialization of the cipher object fails. * @throws NoSuchPaddingException * is thrown if instantiation of the cipher object fails. * @throws InvalidKeySpecException * is thrown if generation of the SecretKey object fails. * @throws NoSuchAlgorithmException * is thrown if instantiation of the SecretKeyFactory object fails. * @throws InvalidKeyException * is thrown if initialization of the cipher object fails. */
Initializes the <code>PasswordFileEncryptor</code> object
initialize
{ "repo_name": "astrapi69/mystic-crypt", "path": "src/main/java/io/github/astrapi69/crypto/pw/PasswordFileEncryptor.java", "license": "mit", "size": 6043 }
[ "io.github.astrapi69.crypto.compound.CompoundAlgorithm", "io.github.astrapi69.crypto.factory.CipherFactory", "java.security.InvalidAlgorithmParameterException", "java.security.InvalidKeyException", "java.security.NoSuchAlgorithmException", "java.security.spec.InvalidKeySpecException", "javax.crypto.NoSuchPaddingException" ]
import io.github.astrapi69.crypto.compound.CompoundAlgorithm; import io.github.astrapi69.crypto.factory.CipherFactory; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.security.spec.InvalidKeySpecException; import javax.crypto.NoSuchPaddingException;
import io.github.astrapi69.crypto.compound.*; import io.github.astrapi69.crypto.factory.*; import java.security.*; import java.security.spec.*; import javax.crypto.*;
[ "io.github.astrapi69", "java.security", "javax.crypto" ]
io.github.astrapi69; java.security; javax.crypto;
1,357,965
public void addSeries(MatrixSeries series) { ParamChecks.nullNotPermitted(series, "series"); // FIXME: Check that there isn't already a series with the same key // add the series... this.seriesList.add(series); series.addChangeListener(this); fireDatasetChanged(); }
void function(MatrixSeries series) { ParamChecks.nullNotPermitted(series, STR); this.seriesList.add(series); series.addChangeListener(this); fireDatasetChanged(); }
/** * Adds a series to the collection. * <P> * Notifies all registered listeners that the dataset has changed. * </p> * * @param series the series (<code>null</code> not permitted). */
Adds a series to the collection. Notifies all registered listeners that the dataset has changed.
addSeries
{ "repo_name": "Mr-Steve/LTSpice_Library_Manager", "path": "libs/jfreechart-1.0.16/source/org/jfree/data/xy/MatrixSeriesCollection.java", "license": "gpl-2.0", "size": 10218 }
[ "org.jfree.chart.util.ParamChecks" ]
import org.jfree.chart.util.ParamChecks;
import org.jfree.chart.util.*;
[ "org.jfree.chart" ]
org.jfree.chart;
1,420,798