method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
public boolean match(Element e, String pseudoE) {
return ((ExtendedSelector)getSimpleSelector()).match(e, pseudoE) &&
((ExtendedCondition)getCondition()).match(e, pseudoE);
} | boolean function(Element e, String pseudoE) { return ((ExtendedSelector)getSimpleSelector()).match(e, pseudoE) && ((ExtendedCondition)getCondition()).match(e, pseudoE); } | /**
* Tests whether this selector matches the given element.
*/ | Tests whether this selector matches the given element | match | {
"repo_name": "Squeegee/batik",
"path": "sources/org/apache/batik/css/engine/sac/CSSConditionalSelector.java",
"license": "apache-2.0",
"size": 3711
} | [
"org.w3c.dom.Element"
] | import org.w3c.dom.Element; | import org.w3c.dom.*; | [
"org.w3c.dom"
] | org.w3c.dom; | 475,115 |
@Override
protected void expectConnectionManagerCacheBoth() {
List<UDDI_SPEC_VERSION> list = new ArrayList<UDDI_SPEC_VERSION>();
list.add(UDDI_SPEC_VERSION.SPEC_1_0);
list.add(UDDI_SPEC_VERSION.SPEC_2_0);
expectConnectionManagerCache(list);
} | void function() { List<UDDI_SPEC_VERSION> list = new ArrayList<UDDI_SPEC_VERSION>(); list.add(UDDI_SPEC_VERSION.SPEC_1_0); list.add(UDDI_SPEC_VERSION.SPEC_2_0); expectConnectionManagerCache(list); } | /**
* Setup for both PDDResp specs
*/ | Setup for both PDDResp specs | expectConnectionManagerCacheBoth | {
"repo_name": "sailajaa/CONNECT",
"path": "Product/Production/Common/CONNECTCoreLib/src/test/java/gov/hhs/fha/nhinc/connectmgr/nhinendpointmanager/PDDRespNhinEndpointManagerMockTest.java",
"license": "bsd-3-clause",
"size": 1823
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,705,231 |
private static List<Long> getPredefinedCustomTargetingKeyIds(
DfpServices dfpServices, DfpSession session) throws RemoteException {
List<Long> customTargetingKeyIds = Lists.newArrayList();
// Get the CustomTargetingService.
CustomTargetingServiceInterface customTargetingService =
dfpServices.get(session, CustomTargetingServiceInterface.class);
// Create a statement to get predefined custom targeting keys.
StatementBuilder statementBuilder = new StatementBuilder()
.where("type = :type")
.orderBy("id ASC")
.limit(StatementBuilder.SUGGESTED_PAGE_LIMIT)
.withBindVariableValue("type", CustomTargetingKeyType.PREDEFINED.toString());
// Default for total result set size.
int totalResultSetSize = 0;
do {
// Get custom targeting keys by statement.
CustomTargetingKeyPage page =
customTargetingService.getCustomTargetingKeysByStatement(statementBuilder.toStatement());
if (page.getResults() != null) {
totalResultSetSize = page.getTotalResultSetSize();
int i = page.getStartIndex();
for (CustomTargetingKey customTargetingKey : page.getResults()) {
System.out.printf("%d) Custom targeting key with ID \"%d\", name \"%s\", and "
+ "display name \"%s\" was found.\n", i++, customTargetingKey.getId(),
customTargetingKey.getName(), customTargetingKey.getDisplayName());
customTargetingKeyIds.add(customTargetingKey.getId());
}
}
statementBuilder.increaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT);
} while (statementBuilder.getOffset() < totalResultSetSize);
return customTargetingKeyIds;
} | static List<Long> function( DfpServices dfpServices, DfpSession session) throws RemoteException { List<Long> customTargetingKeyIds = Lists.newArrayList(); CustomTargetingServiceInterface customTargetingService = dfpServices.get(session, CustomTargetingServiceInterface.class); StatementBuilder statementBuilder = new StatementBuilder() .where(STR) .orderBy(STR) .limit(StatementBuilder.SUGGESTED_PAGE_LIMIT) .withBindVariableValue("type", CustomTargetingKeyType.PREDEFINED.toString()); int totalResultSetSize = 0; do { CustomTargetingKeyPage page = customTargetingService.getCustomTargetingKeysByStatement(statementBuilder.toStatement()); if (page.getResults() != null) { totalResultSetSize = page.getTotalResultSetSize(); int i = page.getStartIndex(); for (CustomTargetingKey customTargetingKey : page.getResults()) { System.out.printf(STR%d\STR%s\STR + STR%s\STR, i++, customTargetingKey.getId(), customTargetingKey.getName(), customTargetingKey.getDisplayName()); customTargetingKeyIds.add(customTargetingKey.getId()); } } statementBuilder.increaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT); } while (statementBuilder.getOffset() < totalResultSetSize); return customTargetingKeyIds; } | /**
* Gets predefined custom targeting key IDs.
*/ | Gets predefined custom targeting key IDs | getPredefinedCustomTargetingKeyIds | {
"repo_name": "nafae/developer",
"path": "examples/dfp_axis/src/main/java/dfp/axis/v201408/customtargetingservice/GetPredefinedCustomTargetingKeysAndValues.java",
"license": "apache-2.0",
"size": 6600
} | [
"com.google.api.ads.dfp.axis.factory.DfpServices",
"com.google.api.ads.dfp.axis.utils.v201408.StatementBuilder",
"com.google.api.ads.dfp.axis.v201408.CustomTargetingKey",
"com.google.api.ads.dfp.axis.v201408.CustomTargetingKeyPage",
"com.google.api.ads.dfp.axis.v201408.CustomTargetingKeyType",
"com.google.api.ads.dfp.axis.v201408.CustomTargetingServiceInterface",
"com.google.api.ads.dfp.lib.client.DfpSession",
"com.google.common.collect.Lists",
"java.rmi.RemoteException",
"java.util.List"
] | import com.google.api.ads.dfp.axis.factory.DfpServices; import com.google.api.ads.dfp.axis.utils.v201408.StatementBuilder; import com.google.api.ads.dfp.axis.v201408.CustomTargetingKey; import com.google.api.ads.dfp.axis.v201408.CustomTargetingKeyPage; import com.google.api.ads.dfp.axis.v201408.CustomTargetingKeyType; import com.google.api.ads.dfp.axis.v201408.CustomTargetingServiceInterface; import com.google.api.ads.dfp.lib.client.DfpSession; import com.google.common.collect.Lists; import java.rmi.RemoteException; import java.util.List; | import com.google.api.ads.dfp.axis.factory.*; import com.google.api.ads.dfp.axis.utils.v201408.*; import com.google.api.ads.dfp.axis.v201408.*; import com.google.api.ads.dfp.lib.client.*; import com.google.common.collect.*; import java.rmi.*; import java.util.*; | [
"com.google.api",
"com.google.common",
"java.rmi",
"java.util"
] | com.google.api; com.google.common; java.rmi; java.util; | 1,799,500 |
private void layoutControl() {
controlPanel.add(previousSource, BorderLayout.WEST);
controlPanel.add(nextSource, BorderLayout.EAST);
controlPanel.add(informationButton, BorderLayout.CENTER);
return;
} | void function() { controlPanel.add(previousSource, BorderLayout.WEST); controlPanel.add(nextSource, BorderLayout.EAST); controlPanel.add(informationButton, BorderLayout.CENTER); return; } | /**
* Lays out the control buttons of this frame.
*/ | Lays out the control buttons of this frame | layoutControl | {
"repo_name": "gwenniger/joshua",
"path": "src/joshua/ui/tree_visualizer/browser/DerivationTreeFrame.java",
"license": "lgpl-2.1",
"size": 7490
} | [
"java.awt.BorderLayout"
] | import java.awt.BorderLayout; | import java.awt.*; | [
"java.awt"
] | java.awt; | 1,584,189 |
@Test
public void testHandleUnsupportedXlsFile() throws Exception {
testRunner.enqueue(new File("src/test/resources/Unsupported.xls").toPath());
testRunner.run();
testRunner.assertTransferCount(ConvertExcelToCSVProcessor.SUCCESS, 0);
testRunner.assertTransferCount(ConvertExcelToCSVProcessor.ORIGINAL, 0);
testRunner.assertTransferCount(ConvertExcelToCSVProcessor.FAILURE, 1);
List<LogMessage> errorMessages = testRunner.getLogger().getErrorMessages();
Assert.assertEquals(1, errorMessages.size());
String messageText = errorMessages.get(0).getMsg();
Assert.assertTrue(messageText.contains("Excel") && messageText.contains("OLE2"));
} | void function() throws Exception { testRunner.enqueue(new File(STR).toPath()); testRunner.run(); testRunner.assertTransferCount(ConvertExcelToCSVProcessor.SUCCESS, 0); testRunner.assertTransferCount(ConvertExcelToCSVProcessor.ORIGINAL, 0); testRunner.assertTransferCount(ConvertExcelToCSVProcessor.FAILURE, 1); List<LogMessage> errorMessages = testRunner.getLogger().getErrorMessages(); Assert.assertEquals(1, errorMessages.size()); String messageText = errorMessages.get(0).getMsg(); Assert.assertTrue(messageText.contains("Excel") && messageText.contains("OLE2")); } | /**
* Tests for graceful handling and error messaging of unsupported .XLS files.
*/ | Tests for graceful handling and error messaging of unsupported .XLS files | testHandleUnsupportedXlsFile | {
"repo_name": "jfrazee/nifi",
"path": "nifi-nar-bundles/nifi-poi-bundle/nifi-poi-processors/src/test/java/org/apache/nifi/processors/poi/ConvertExcelToCSVProcessorTest.java",
"license": "apache-2.0",
"size": 34421
} | [
"java.io.File",
"java.util.List",
"org.apache.nifi.util.LogMessage",
"org.junit.Assert"
] | import java.io.File; import java.util.List; import org.apache.nifi.util.LogMessage; import org.junit.Assert; | import java.io.*; import java.util.*; import org.apache.nifi.util.*; import org.junit.*; | [
"java.io",
"java.util",
"org.apache.nifi",
"org.junit"
] | java.io; java.util; org.apache.nifi; org.junit; | 943,823 |
public PublicKey getPublicKey()
{
return subjectPublicKey;
} | PublicKey function() { return subjectPublicKey; } | /**
* Returns the public key of the subject (target) of the certification
* path, including any inherited public key parameters if applicable.
*
* @return the public key of the subject (never <code>null</code>)
*/ | Returns the public key of the subject (target) of the certification path, including any inherited public key parameters if applicable | getPublicKey | {
"repo_name": "GaloisInc/hacrypto",
"path": "src/Java/BouncyCastle/BouncyCastle-1.50/core/src/main/jdk1.1/java/security/cert/PKIXCertPathValidatorResult.java",
"license": "bsd-3-clause",
"size": 4954
} | [
"java.security.PublicKey"
] | import java.security.PublicKey; | import java.security.*; | [
"java.security"
] | java.security; | 203,511 |
protected boolean parsePHPDoc() {
final IDocumentorLexer documentorLexer = getDocumentorLexer(zzReader);
if (documentorLexer == null) {
return false;
}
yypushback(zzMarkedPos - zzStartRead);
int[] parameters = getParameters();
documentorLexer.reset(zzReader, zzBuffer, parameters);
Object phpDocBlock = documentorLexer.parse();
commentList.add(phpDocBlock);
reset(zzReader, documentorLexer.getBuffer(), documentorLexer.getParameters());
return true;
} | boolean function() { final IDocumentorLexer documentorLexer = getDocumentorLexer(zzReader); if (documentorLexer == null) { return false; } yypushback(zzMarkedPos - zzStartRead); int[] parameters = getParameters(); documentorLexer.reset(zzReader, zzBuffer, parameters); Object phpDocBlock = documentorLexer.parse(); commentList.add(phpDocBlock); reset(zzReader, documentorLexer.getBuffer(), documentorLexer.getParameters()); return true; } | /**
* Parses a PHPDoc block comment. Underlying reader (zzReader) can be closed
* in the process (when EOF is reached).
*
* @return true when PHPDoc was parsed, false otherwise (false also implies
* that underlying lexer was unavailable)
*/ | Parses a PHPDoc block comment. Underlying reader (zzReader) can be closed in the process (when EOF is reached) | parsePHPDoc | {
"repo_name": "vovagrechka/fucking-everything",
"path": "phizdets/phizdets-idea/eclipse-src/org.eclipse.php.core/src/org/eclipse/php/internal/core/ast/scanner/php53/PhpAstLexer.java",
"license": "apache-2.0",
"size": 104402
} | [
"org.eclipse.php.core.ast.nodes.IDocumentorLexer"
] | import org.eclipse.php.core.ast.nodes.IDocumentorLexer; | import org.eclipse.php.core.ast.nodes.*; | [
"org.eclipse.php"
] | org.eclipse.php; | 1,894,795 |
private void removeSession(final Session session) {
SESSIONS.remove(session);
final String articleId = (String) Channels.getHttpParameter(session, Article.ARTICLE_T_ID);
if (StringUtils.isBlank(articleId)) {
return;
}
synchronized (ARTICLE_VIEWS) {
if (!ARTICLE_VIEWS.containsKey(articleId)) {
return;
}
final int count = ARTICLE_VIEWS.get(articleId);
final int newCount = count - 1;
if (newCount < 1) {
ARTICLE_VIEWS.remove(articleId);
} else {
ARTICLE_VIEWS.put(articleId, newCount);
}
}
final JSONObject message = new JSONObject();
message.put(Article.ARTICLE_T_ID, articleId);
message.put(Common.OPERATION, "-");
ArticleListChannel.notifyHeat(message);
notifyHeat(message);
final JSONObject user = (JSONObject) Channels.getHttpSessionAttribute(session, User.USER);
if (null == user) {
return;
}
final String userName = user.optString(User.USER_NAME);
// Timeline
final LatkeBeanManager beanManager = LatkeBeanManagerImpl.getInstance();
final ArticleRepository articleRepository = beanManager.getReference(ArticleRepository.class);
final LangPropsService langPropsService = beanManager.getReference(LangPropsServiceImpl.class);
final TimelineMgmtService timelineMgmtService = beanManager.getReference(TimelineMgmtService.class);
try {
final JSONObject article = articleRepository.get(articleId);
final String articleTitle = StringUtils.substring(Jsoup.parse(
article.optString(Article.ARTICLE_TITLE)).text(), 0, 28);
final String articlePermalink = Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK);
final JSONObject timeline = new JSONObject();
timeline.put(Common.TYPE, Article.ARTICLE);
String content = langPropsService.get("timelineOutArticleLabel");
content = content.replace("{user}", "<a target='_blank' rel='nofollow' href='" + Latkes.getServePath()
+ "/member/" + userName + "'>" + userName + "</a>")
.replace("{article}", "<a target='_blank' rel='nofollow' href='" + articlePermalink
+ "'>" + articleTitle + "</a>");
timeline.put(Common.CONTENT, content);
timelineMgmtService.addTimeline(timeline);
} catch (final Exception e) {
LOGGER.log(Level.ERROR, "Timeline error", e);
}
} | void function(final Session session) { SESSIONS.remove(session); final String articleId = (String) Channels.getHttpParameter(session, Article.ARTICLE_T_ID); if (StringUtils.isBlank(articleId)) { return; } synchronized (ARTICLE_VIEWS) { if (!ARTICLE_VIEWS.containsKey(articleId)) { return; } final int count = ARTICLE_VIEWS.get(articleId); final int newCount = count - 1; if (newCount < 1) { ARTICLE_VIEWS.remove(articleId); } else { ARTICLE_VIEWS.put(articleId, newCount); } } final JSONObject message = new JSONObject(); message.put(Article.ARTICLE_T_ID, articleId); message.put(Common.OPERATION, "-"); ArticleListChannel.notifyHeat(message); notifyHeat(message); final JSONObject user = (JSONObject) Channels.getHttpSessionAttribute(session, User.USER); if (null == user) { return; } final String userName = user.optString(User.USER_NAME); final LatkeBeanManager beanManager = LatkeBeanManagerImpl.getInstance(); final ArticleRepository articleRepository = beanManager.getReference(ArticleRepository.class); final LangPropsService langPropsService = beanManager.getReference(LangPropsServiceImpl.class); final TimelineMgmtService timelineMgmtService = beanManager.getReference(TimelineMgmtService.class); try { final JSONObject article = articleRepository.get(articleId); final String articleTitle = StringUtils.substring(Jsoup.parse( article.optString(Article.ARTICLE_TITLE)).text(), 0, 28); final String articlePermalink = Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK); final JSONObject timeline = new JSONObject(); timeline.put(Common.TYPE, Article.ARTICLE); String content = langPropsService.get(STR); content = content.replace(STR, STR + Latkes.getServePath() + STR + userName + "'>" + userName + "</a>") .replace(STR, STR + articlePermalink + "'>" + articleTitle + "</a>"); timeline.put(Common.CONTENT, content); timelineMgmtService.addTimeline(timeline); } catch (final Exception e) { LOGGER.log(Level.ERROR, STR, e); } } | /**
* Removes the specified session.
*
* @param session the specified session
*/ | Removes the specified session | removeSession | {
"repo_name": "jekkro/symphony",
"path": "src/main/java/org/b3log/symphony/processor/channel/ArticleChannel.java",
"license": "apache-2.0",
"size": 13913
} | [
"org.apache.commons.lang.StringUtils",
"org.b3log.latke.Latkes",
"org.b3log.latke.ioc.LatkeBeanManager",
"org.b3log.latke.ioc.LatkeBeanManagerImpl",
"org.b3log.latke.logging.Level",
"org.b3log.latke.model.User",
"org.b3log.latke.service.LangPropsService",
"org.b3log.latke.service.LangPropsServiceImpl",
"org.b3log.symphony.model.Article",
"org.b3log.symphony.model.Common",
"org.b3log.symphony.repository.ArticleRepository",
"org.b3log.symphony.service.TimelineMgmtService",
"org.eclipse.jetty.websocket.api.Session",
"org.json.JSONObject",
"org.jsoup.Jsoup"
] | import org.apache.commons.lang.StringUtils; import org.b3log.latke.Latkes; import org.b3log.latke.ioc.LatkeBeanManager; import org.b3log.latke.ioc.LatkeBeanManagerImpl; import org.b3log.latke.logging.Level; import org.b3log.latke.model.User; import org.b3log.latke.service.LangPropsService; import org.b3log.latke.service.LangPropsServiceImpl; import org.b3log.symphony.model.Article; import org.b3log.symphony.model.Common; import org.b3log.symphony.repository.ArticleRepository; import org.b3log.symphony.service.TimelineMgmtService; import org.eclipse.jetty.websocket.api.Session; import org.json.JSONObject; import org.jsoup.Jsoup; | import org.apache.commons.lang.*; import org.b3log.latke.*; import org.b3log.latke.ioc.*; import org.b3log.latke.logging.*; import org.b3log.latke.model.*; import org.b3log.latke.service.*; import org.b3log.symphony.model.*; import org.b3log.symphony.repository.*; import org.b3log.symphony.service.*; import org.eclipse.jetty.websocket.api.*; import org.json.*; import org.jsoup.*; | [
"org.apache.commons",
"org.b3log.latke",
"org.b3log.symphony",
"org.eclipse.jetty",
"org.json",
"org.jsoup"
] | org.apache.commons; org.b3log.latke; org.b3log.symphony; org.eclipse.jetty; org.json; org.jsoup; | 2,567,254 |
private void done(){
setResult(RESULT_OK, new Intent().putExtra(RESULT_KEY, getSlot()));
finish();
} | void function(){ setResult(RESULT_OK, new Intent().putExtra(RESULT_KEY, getSlot())); finish(); } | /**
* Bundles the result in an intent and finishes the activity.
*/ | Bundles the result in an intent and finishes the activity | done | {
"repo_name": "tndatacommons/OfficeHours-Android",
"path": "app/src/main/java/org/tndata/officehours/activity/TimeSlotPickerActivity.java",
"license": "apache-2.0",
"size": 16432
} | [
"android.content.Intent"
] | import android.content.Intent; | import android.content.*; | [
"android.content"
] | android.content; | 1,918,083 |
public boolean isAluno(String telefone) {
String sql = "select * from " + TABELA + " where telefone = ?";
String[] valores = { telefone };
Cursor cursor = null;
try {
//Abertura da conexao com BD e execucao da consulta
cursor = getReadableDatabase().rawQuery(sql, valores);
//Retorna true, se for devolvida alguma linha
return cursor.getCount() > 0;
} catch (SQLException e) {
Log.e(TAG, e.getMessage());
return false;
} finally {
cursor.close();
}
} | boolean function(String telefone) { String sql = STR + TABELA + STR; String[] valores = { telefone }; Cursor cursor = null; try { cursor = getReadableDatabase().rawQuery(sql, valores); return cursor.getCount() > 0; } catch (SQLException e) { Log.e(TAG, e.getMessage()); return false; } finally { cursor.close(); } } | /**
* Metodo que verifica se um numero de telefone pertence a um aluno
* @param telefone
* @return true, se o telefone pertence a um aluno
*/ | Metodo que verifica se um numero de telefone pertence a um aluno | isAluno | {
"repo_name": "marciopalheta/cursosandroid",
"path": "CadastroAluno/src/br/com/cursoandroid/cadastroaluno/modelo/dao/AlunoDAO.java",
"license": "gpl-2.0",
"size": 5333
} | [
"android.database.Cursor",
"android.database.SQLException",
"android.util.Log"
] | import android.database.Cursor; import android.database.SQLException; import android.util.Log; | import android.database.*; import android.util.*; | [
"android.database",
"android.util"
] | android.database; android.util; | 969,537 |
public Collection<Party> getParties() {
return parties.values();
} | Collection<Party> function() { return parties.values(); } | /**
* Gets the parties in the manager.
*
* @return All of the manager's parties.
*/ | Gets the parties in the manager | getParties | {
"repo_name": "ampayne2/DropParty",
"path": "src/main/java/ninja/amp/dropparty/PartyManager.java",
"license": "lgpl-3.0",
"size": 5227
} | [
"java.util.Collection",
"ninja.amp.dropparty.parties.Party"
] | import java.util.Collection; import ninja.amp.dropparty.parties.Party; | import java.util.*; import ninja.amp.dropparty.parties.*; | [
"java.util",
"ninja.amp.dropparty"
] | java.util; ninja.amp.dropparty; | 1,087,081 |
public static Object run(String expression, Map<String, Object> variables) {
return run(expression, variables, Context.EMPTY_CONTEXT());
} | static Object function(String expression, Map<String, Object> variables) { return run(expression, variables, Context.EMPTY_CONTEXT()); } | /**
* Execute and validate a Stellar expression.
*
* <p>This is intended for use while unit testing Stellar expressions. This ensures that the expression
* validates successfully and produces a result that can be serialized correctly.
*
* @param expression The expression to execute.
* @param variables The variables to expose to the expression.
* @return The result of executing the expression.
*/ | Execute and validate a Stellar expression. This is intended for use while unit testing Stellar expressions. This ensures that the expression validates successfully and produces a result that can be serialized correctly | run | {
"repo_name": "nickwallen/metron",
"path": "metron-stellar/stellar-common/src/main/java/org/apache/metron/stellar/common/utils/StellarProcessorUtils.java",
"license": "apache-2.0",
"size": 12612
} | [
"java.util.Map",
"org.apache.metron.stellar.dsl.Context"
] | import java.util.Map; import org.apache.metron.stellar.dsl.Context; | import java.util.*; import org.apache.metron.stellar.dsl.*; | [
"java.util",
"org.apache.metron"
] | java.util; org.apache.metron; | 411,434 |
public void closeConversation(String conversationId, Long participantId)
throws NoSuchConversationException, SystemException, NoSuchParticipantException {
// Find conversation
Conversation conversation = conversationPersistence.findByConversationId(conversationId);
// Find participant
Participant participant = participantPersistence.findByCidParticipantId(conversation.getCid(), participantId);
// Close conversation
participant.setIsOpened(false);
// Since the panel was closed no active panel is currently there
Panel panel = PanelLocalServiceUtil.getPanelByUser(participant.getParticipantId());
panel.setActivePanelId("");
panelPersistence.update(panel, false);
// Save
participantPersistence.update(participant, false);
} | void function(String conversationId, Long participantId) throws NoSuchConversationException, SystemException, NoSuchParticipantException { Conversation conversation = conversationPersistence.findByConversationId(conversationId); Participant participant = participantPersistence.findByCidParticipantId(conversation.getCid(), participantId); participant.setIsOpened(false); Panel panel = PanelLocalServiceUtil.getPanelByUser(participant.getParticipantId()); panel.setActivePanelId(""); panelPersistence.update(panel, false); participantPersistence.update(participant, false); } | /**
* Closes conversation for the particular participant id by setting isOpened flag to false.
*
* @param conversationId Conversation which should be closed
* @param participantId Participant whose conversation should be closed
* @throws com.marcelmika.lims.persistence.generated.NoSuchConversationException
* @throws SystemException
* @throws NoSuchParticipantException
*/ | Closes conversation for the particular participant id by setting isOpened flag to false | closeConversation | {
"repo_name": "marcelmika/lims",
"path": "docroot/WEB-INF/src/com/marcelmika/lims/persistence/generated/service/impl/ParticipantLocalServiceImpl.java",
"license": "mit",
"size": 9779
} | [
"com.liferay.portal.kernel.exception.SystemException",
"com.marcelmika.lims.persistence.generated.NoSuchConversationException",
"com.marcelmika.lims.persistence.generated.NoSuchParticipantException",
"com.marcelmika.lims.persistence.generated.model.Conversation",
"com.marcelmika.lims.persistence.generated.model.Panel",
"com.marcelmika.lims.persistence.generated.model.Participant",
"com.marcelmika.lims.persistence.generated.service.PanelLocalServiceUtil"
] | import com.liferay.portal.kernel.exception.SystemException; import com.marcelmika.lims.persistence.generated.NoSuchConversationException; import com.marcelmika.lims.persistence.generated.NoSuchParticipantException; import com.marcelmika.lims.persistence.generated.model.Conversation; import com.marcelmika.lims.persistence.generated.model.Panel; import com.marcelmika.lims.persistence.generated.model.Participant; import com.marcelmika.lims.persistence.generated.service.PanelLocalServiceUtil; | import com.liferay.portal.kernel.exception.*; import com.marcelmika.lims.persistence.generated.*; import com.marcelmika.lims.persistence.generated.model.*; import com.marcelmika.lims.persistence.generated.service.*; | [
"com.liferay.portal",
"com.marcelmika.lims"
] | com.liferay.portal; com.marcelmika.lims; | 1,871,081 |
@Override
public String getLabel(ProceedingJoinPoint proceedingJoinPoint) {
return proceedingJoinPoint.getSignature().toString();
} | String function(ProceedingJoinPoint proceedingJoinPoint) { return proceedingJoinPoint.getSignature().toString(); } | /** Uses a jamon label as a method name. Example: void com.stevesouza.spring.MonitorMe3.myMethod2(String)
*
*
* @param proceedingJoinPoint
* @return
*/ | Uses a jamon label as a method name. Example: void com.stevesouza.spring.MonitorMe3.myMethod2(String) | getLabel | {
"repo_name": "appbakers/automon_example",
"path": "jamonapi/jamon/src/main/java/com/jamonapi/aop/spring/JamonAopKeyHelper.java",
"license": "apache-2.0",
"size": 4825
} | [
"org.aspectj.lang.ProceedingJoinPoint"
] | import org.aspectj.lang.ProceedingJoinPoint; | import org.aspectj.lang.*; | [
"org.aspectj.lang"
] | org.aspectj.lang; | 674,117 |
public static <K, V> Collector<Map<K, V>, Map<K, V>, Map<K, V>> mergeMaps() {
return Collector.of(
HashMap::new,
Map::putAll,
(m1, m2) -> {
m1.putAll(m2);
return m1;
});
} | static <K, V> Collector<Map<K, V>, Map<K, V>, Map<K, V>> function() { return Collector.of( HashMap::new, Map::putAll, (m1, m2) -> { m1.putAll(m2); return m1; }); } | /**
* Return a custom collector which is able to merge two maps.
*
* @param <K>
* Type of the key
* @param <V>
* Type of the value
* @return
*/ | Return a custom collector which is able to merge two maps | mergeMaps | {
"repo_name": "gentics/mesh",
"path": "api/src/main/java/com/gentics/mesh/util/StreamUtil.java",
"license": "apache-2.0",
"size": 4641
} | [
"java.util.HashMap",
"java.util.Map",
"java.util.stream.Collector"
] | import java.util.HashMap; import java.util.Map; import java.util.stream.Collector; | import java.util.*; import java.util.stream.*; | [
"java.util"
] | java.util; | 1,687,183 |
public byte[] encode() throws IOException {
byte[] buffer = new byte[8+25];
ByteBuffer dos = ByteBuffer.wrap(buffer).order(ByteOrder.LITTLE_ENDIAN);
dos.put((byte)0xFE);
dos.put((byte)(length & 0x00FF));
dos.put((byte)(sequence & 0x00FF));
dos.put((byte)(sysId & 0x00FF));
dos.put((byte)(componentId & 0x00FF));
dos.put((byte)(messageType & 0x00FF));
dos.putFloat(param_value);
dos.putShort((short)(param_count&0x00FFFF));
dos.putShort((short)(param_index&0x00FFFF));
for (int i=0; i<16; i++) {
dos.put((byte)(param_id[i]));
}
dos.put((byte)(param_type&0x00FF));
int crc = MAVLinkCRC.crc_calculate_encode(buffer, 25);
crc = MAVLinkCRC.crc_accumulate((byte) IMAVLinkCRC.MAVLINK_MESSAGE_CRCS[messageType], crc);
byte crcl = (byte) (crc & 0x00FF);
byte crch = (byte) ((crc >> 8) & 0x00FF);
buffer[31] = crcl;
buffer[32] = crch;
return buffer;
} | byte[] function() throws IOException { byte[] buffer = new byte[8+25]; ByteBuffer dos = ByteBuffer.wrap(buffer).order(ByteOrder.LITTLE_ENDIAN); dos.put((byte)0xFE); dos.put((byte)(length & 0x00FF)); dos.put((byte)(sequence & 0x00FF)); dos.put((byte)(sysId & 0x00FF)); dos.put((byte)(componentId & 0x00FF)); dos.put((byte)(messageType & 0x00FF)); dos.putFloat(param_value); dos.putShort((short)(param_count&0x00FFFF)); dos.putShort((short)(param_index&0x00FFFF)); for (int i=0; i<16; i++) { dos.put((byte)(param_id[i])); } dos.put((byte)(param_type&0x00FF)); int crc = MAVLinkCRC.crc_calculate_encode(buffer, 25); crc = MAVLinkCRC.crc_accumulate((byte) IMAVLinkCRC.MAVLINK_MESSAGE_CRCS[messageType], crc); byte crcl = (byte) (crc & 0x00FF); byte crch = (byte) ((crc >> 8) & 0x00FF); buffer[31] = crcl; buffer[32] = crch; return buffer; } | /**
* Encode message with raw data and other informations
*/ | Encode message with raw data and other informations | encode | {
"repo_name": "geeksville/arduleader",
"path": "thirdparty/org.mavlink.library/generated/org/mavlink/messages/ardupilotmega/msg_param_value.java",
"license": "gpl-3.0",
"size": 3532
} | [
"java.io.IOException",
"java.nio.ByteBuffer",
"java.nio.ByteOrder",
"org.mavlink.IMAVLinkCRC",
"org.mavlink.MAVLinkCRC"
] | import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import org.mavlink.IMAVLinkCRC; import org.mavlink.MAVLinkCRC; | import java.io.*; import java.nio.*; import org.mavlink.*; | [
"java.io",
"java.nio",
"org.mavlink"
] | java.io; java.nio; org.mavlink; | 680,993 |
@ServiceMethod(returns = ReturnType.SINGLE)
void completeRestore(
String resourceGroupName,
String managedInstanceName,
String databaseName,
String lastBackupName,
Context context); | @ServiceMethod(returns = ReturnType.SINGLE) void completeRestore( String resourceGroupName, String managedInstanceName, String databaseName, String lastBackupName, Context context); | /**
* Completes the restore operation on a managed database.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value
* from the Azure Resource Manager API or the portal.
* @param managedInstanceName The name of the managed instance.
* @param databaseName The name of the database.
* @param lastBackupName The last backup name to apply.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/ | Completes the restore operation on a managed database | completeRestore | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-sql/src/main/java/com/azure/resourcemanager/sql/fluent/ManagedDatabasesClient.java",
"license": "mit",
"size": 37573
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.util.Context"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.util.Context; | import com.azure.core.annotation.*; import com.azure.core.util.*; | [
"com.azure.core"
] | com.azure.core; | 808,306 |
public ResourceService getResourceService()
{
return resourceService;
} | ResourceService function() { return resourceService; } | /**
* Retrieve the resource service.
*
* @return The resource service.
*/ | Retrieve the resource service | getResourceService | {
"repo_name": "iritgo/iritgo-aktario",
"path": "aktario-framework/src/main/java/de/iritgo/aktario/core/Engine.java",
"license": "apache-2.0",
"size": 14622
} | [
"de.iritgo.aktario.core.resource.ResourceService"
] | import de.iritgo.aktario.core.resource.ResourceService; | import de.iritgo.aktario.core.resource.*; | [
"de.iritgo.aktario"
] | de.iritgo.aktario; | 62,485 |
public synchronized int getSoTimeout() throws IOException {
if (isClosed())
throw new SocketException("Socket is closed");
Object o = getImpl().getOption(SocketOptions.SO_TIMEOUT);
if (o instanceof Integer) {
return ((Integer) o).intValue();
} else {
return 0;
}
} | synchronized int function() throws IOException { if (isClosed()) throw new SocketException(STR); Object o = getImpl().getOption(SocketOptions.SO_TIMEOUT); if (o instanceof Integer) { return ((Integer) o).intValue(); } else { return 0; } } | /**
* Retrieve setting for SO_TIMEOUT. 0 returns implies that the
* option is disabled (i.e., timeout of infinity).
* @return the SO_TIMEOUT value
* @exception IOException if an I/O error occurs
* @since JDK1.1
* @see #setSoTimeout(int)
*/ | Retrieve setting for SO_TIMEOUT. 0 returns implies that the option is disabled (i.e., timeout of infinity) | getSoTimeout | {
"repo_name": "ZhaoX/jdk-1.7-annotated",
"path": "src/java/net/ServerSocket.java",
"license": "apache-2.0",
"size": 34254
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 485,357 |
protected void bufferTheTagsToBeCleared(String tagName, String... stbIds) {
// Since multi tag addition is supported.
for (String tag : tagName.split(TestConstants.TAG_NAME_SPLIT_REGEX)) {
if (tagsToBeCleared.containsKey(tag)) {
String[] stbIdsAlreadyExist = tagsToBeCleared.get(tagName);
stbIds = (String[]) ArrayUtils.addAll(stbIds, stbIdsAlreadyExist);
}
tagsToBeCleared.put(tag, stbIds);
}
} | void function(String tagName, String... stbIds) { for (String tag : tagName.split(TestConstants.TAG_NAME_SPLIT_REGEX)) { if (tagsToBeCleared.containsKey(tag)) { String[] stbIdsAlreadyExist = tagsToBeCleared.get(tagName); stbIds = (String[]) ArrayUtils.addAll(stbIds, stbIdsAlreadyExist); } tagsToBeCleared.put(tag, stbIds); } } | /**
* Buffer the added tags in-order to clear it at the end of test completion.
*
* @param tagName Tag Name to be added.
* @param stbIds List of stb ids.
*/ | Buffer the added tags in-order to clear it at the end of test completion | bufferTheTagsToBeCleared | {
"repo_name": "trentontrees/dawg",
"path": "testing/src/test/java/com/comcast/dawg/test/base/IndexPageUITestBase.java",
"license": "apache-2.0",
"size": 8851
} | [
"com.comcast.dawg.constants.TestConstants",
"org.apache.commons.lang.ArrayUtils"
] | import com.comcast.dawg.constants.TestConstants; import org.apache.commons.lang.ArrayUtils; | import com.comcast.dawg.constants.*; import org.apache.commons.lang.*; | [
"com.comcast.dawg",
"org.apache.commons"
] | com.comcast.dawg; org.apache.commons; | 716,967 |
@Test(expectedExceptions = OpenGammaRuntimeException.class)
public void testLoadUnavailablePortfolio() {
// creating in-test masters and sources to avoid collisions of portfolios and positions with _toolContext
final PortfolioMaster portfolioMaster = new InMemoryPortfolioMaster();
final PositionMaster positionMaster = new InMemoryPositionMaster();
final PositionSource positionSource = new MasterPositionSource(portfolioMaster, positionMaster);
final SecurityMaster securityMaster = new InMemorySecurityMaster();
final SecuritySource securitySource = new MasterSecuritySource(securityMaster);
final PortfolioManager manager = new PortfolioManager(portfolioMaster, positionMaster, positionSource, securityMaster, securitySource);
manager.loadPortfolio(PortfolioKey.of("Test"));
} | @Test(expectedExceptions = OpenGammaRuntimeException.class) void function() { final PortfolioMaster portfolioMaster = new InMemoryPortfolioMaster(); final PositionMaster positionMaster = new InMemoryPositionMaster(); final PositionSource positionSource = new MasterPositionSource(portfolioMaster, positionMaster); final SecurityMaster securityMaster = new InMemorySecurityMaster(); final SecuritySource securitySource = new MasterSecuritySource(securityMaster); final PortfolioManager manager = new PortfolioManager(portfolioMaster, positionMaster, positionSource, securityMaster, securitySource); manager.loadPortfolio(PortfolioKey.of("Test")); } | /**
* Tests the behaviour when attempting to load a portfolio that has not been saved.
*/ | Tests the behaviour when attempting to load a portfolio that has not been saved | testLoadUnavailablePortfolio | {
"repo_name": "McLeodMoores/starling",
"path": "projects/starling-client/src/test/java/com/mcleodmoores/starling/client/portfolio/PortfolioManagerTest.java",
"license": "apache-2.0",
"size": 39710
} | [
"com.opengamma.OpenGammaRuntimeException",
"com.opengamma.core.position.PositionSource",
"com.opengamma.core.security.SecuritySource",
"com.opengamma.master.portfolio.PortfolioMaster",
"com.opengamma.master.portfolio.impl.InMemoryPortfolioMaster",
"com.opengamma.master.position.PositionMaster",
"com.opengamma.master.position.impl.InMemoryPositionMaster",
"com.opengamma.master.position.impl.MasterPositionSource",
"com.opengamma.master.security.SecurityMaster",
"com.opengamma.master.security.impl.InMemorySecurityMaster",
"com.opengamma.master.security.impl.MasterSecuritySource",
"org.testng.annotations.Test"
] | import com.opengamma.OpenGammaRuntimeException; import com.opengamma.core.position.PositionSource; import com.opengamma.core.security.SecuritySource; import com.opengamma.master.portfolio.PortfolioMaster; import com.opengamma.master.portfolio.impl.InMemoryPortfolioMaster; import com.opengamma.master.position.PositionMaster; import com.opengamma.master.position.impl.InMemoryPositionMaster; import com.opengamma.master.position.impl.MasterPositionSource; import com.opengamma.master.security.SecurityMaster; import com.opengamma.master.security.impl.InMemorySecurityMaster; import com.opengamma.master.security.impl.MasterSecuritySource; import org.testng.annotations.Test; | import com.opengamma.*; import com.opengamma.core.position.*; import com.opengamma.core.security.*; import com.opengamma.master.portfolio.*; import com.opengamma.master.portfolio.impl.*; import com.opengamma.master.position.*; import com.opengamma.master.position.impl.*; import com.opengamma.master.security.*; import com.opengamma.master.security.impl.*; import org.testng.annotations.*; | [
"com.opengamma",
"com.opengamma.core",
"com.opengamma.master",
"org.testng.annotations"
] | com.opengamma; com.opengamma.core; com.opengamma.master; org.testng.annotations; | 1,330,907 |
@Nullable
public static byte[][] splitNalUnits(byte[] data) {
if (!isNalStartCode(data, 0)) {
// data does not consist of NAL start code delimited units.
return null;
}
List<Integer> starts = new ArrayList<>();
int nalUnitIndex = 0;
do {
starts.add(nalUnitIndex);
nalUnitIndex = findNalStartCode(data, nalUnitIndex + NAL_START_CODE.length);
} while (nalUnitIndex != C.INDEX_UNSET);
byte[][] split = new byte[starts.size()][];
for (int i = 0; i < starts.size(); i++) {
int startIndex = starts.get(i);
int endIndex = i < starts.size() - 1 ? starts.get(i + 1) : data.length;
byte[] nal = new byte[endIndex - startIndex];
System.arraycopy(data, startIndex, nal, 0, nal.length);
split[i] = nal;
}
return split;
} | static byte[][] function(byte[] data) { if (!isNalStartCode(data, 0)) { return null; } List<Integer> starts = new ArrayList<>(); int nalUnitIndex = 0; do { starts.add(nalUnitIndex); nalUnitIndex = findNalStartCode(data, nalUnitIndex + NAL_START_CODE.length); } while (nalUnitIndex != C.INDEX_UNSET); byte[][] split = new byte[starts.size()][]; for (int i = 0; i < starts.size(); i++) { int startIndex = starts.get(i); int endIndex = i < starts.size() - 1 ? starts.get(i + 1) : data.length; byte[] nal = new byte[endIndex - startIndex]; System.arraycopy(data, startIndex, nal, 0, nal.length); split[i] = nal; } return split; } | /**
* Splits an array of NAL units.
*
* <p>If the input consists of NAL start code delimited units, then the returned array consists of
* the split NAL units, each of which is still prefixed with the NAL start code. For any other
* input, null is returned.
*
* @param data An array of data.
* @return The individual NAL units, or null if the input did not consist of NAL start code
* delimited units.
*/ | Splits an array of NAL units. If the input consists of NAL start code delimited units, then the returned array consists of the split NAL units, each of which is still prefixed with the NAL start code. For any other input, null is returned | splitNalUnits | {
"repo_name": "androidx/media",
"path": "libraries/common/src/main/java/androidx/media3/common/util/CodecSpecificDataUtil.java",
"license": "apache-2.0",
"size": 7791
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,207,831 |
protected boolean[] canTakeOptions() {
boolean[] result = new boolean[2];
print("options...");
if (m_Associator instanceof OptionHandler) {
println("yes");
if (m_Debug) {
println("\n=== Full report ===");
Enumeration<Option> enu = ((OptionHandler) m_Associator).listOptions();
while (enu.hasMoreElements()) {
Option option = enu.nextElement();
print(option.synopsis() + "\n" + option.description() + "\n");
}
println("\n");
}
result[0] = true;
} else {
println("no");
result[0] = false;
}
return result;
} | boolean[] function() { boolean[] result = new boolean[2]; print(STR); if (m_Associator instanceof OptionHandler) { println("yes"); if (m_Debug) { println(STR); Enumeration<Option> enu = ((OptionHandler) m_Associator).listOptions(); while (enu.hasMoreElements()) { Option option = enu.nextElement(); print(option.synopsis() + "\n" + option.description() + "\n"); } println("\n"); } result[0] = true; } else { println("no"); result[0] = false; } return result; } | /**
* Checks whether the scheme can take command line options.
*
* @return index 0 is true if the associator can take options
*/ | Checks whether the scheme can take command line options | canTakeOptions | {
"repo_name": "ahmedvc/umple",
"path": "Umplificator/UmplifiedProjects/weka-umplified-0/src/main/java/weka/associations/CheckAssociator.java",
"license": "mit",
"size": 52024
} | [
"java.util.Enumeration"
] | import java.util.Enumeration; | import java.util.*; | [
"java.util"
] | java.util; | 2,195,346 |
public boolean monitorAndPrintJob(JobConf conf,
RunningJob job
) throws IOException, InterruptedException {
String lastReport = null;
TaskStatusFilter filter;
filter = getTaskOutputFilter(conf);
JobID jobId = job.getID();
LOG.info("Running job: " + jobId);
int eventCounter = 0;
boolean profiling = conf.getProfileEnabled();
Configuration.IntegerRanges mapRanges = conf.getProfileTaskRange(true);
Configuration.IntegerRanges reduceRanges = conf.getProfileTaskRange(false);
while (!job.isComplete()) {
Thread.sleep(1000);
String report =
(" map " + StringUtils.formatPercent(job.mapProgress(), 0)+
" reduce " +
StringUtils.formatPercent(job.reduceProgress(), 0));
if (!report.equals(lastReport)) {
LOG.info(report);
lastReport = report;
}
TaskCompletionEvent[] events =
job.getTaskCompletionEvents(eventCounter);
eventCounter += events.length;
for(TaskCompletionEvent event : events){
TaskCompletionEvent.Status status = event.getTaskStatus();
if (profiling &&
(status == TaskCompletionEvent.Status.SUCCEEDED ||
status == TaskCompletionEvent.Status.FAILED) &&
(event.isMap ? mapRanges : reduceRanges).
isIncluded(event.idWithinJob())) {
downloadProfile(event);
}
switch(filter){
case NONE:
break;
case SUCCEEDED:
if (event.getTaskStatus() ==
TaskCompletionEvent.Status.SUCCEEDED){
LOG.info(event.toString());
displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp());
}
break;
case FAILED:
if (event.getTaskStatus() ==
TaskCompletionEvent.Status.FAILED){
LOG.info(event.toString());
// Displaying the task diagnostic information
TaskAttemptID taskId = event.getTaskAttemptId();
String[] taskDiagnostics =
jobSubmitClient.getTaskDiagnostics(taskId);
if (taskDiagnostics != null) {
for(String diagnostics : taskDiagnostics){
System.err.println(diagnostics);
}
}
// Displaying the task logs
displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp());
}
break;
case KILLED:
if (event.getTaskStatus() == TaskCompletionEvent.Status.KILLED){
LOG.info(event.toString());
}
break;
case ALL:
LOG.info(event.toString());
displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp());
break;
}
}
}
LOG.info("Job complete: " + jobId);
job.getCounters().log(LOG);
return job.isSuccessful();
} | boolean function(JobConf conf, RunningJob job ) throws IOException, InterruptedException { String lastReport = null; TaskStatusFilter filter; filter = getTaskOutputFilter(conf); JobID jobId = job.getID(); LOG.info(STR + jobId); int eventCounter = 0; boolean profiling = conf.getProfileEnabled(); Configuration.IntegerRanges mapRanges = conf.getProfileTaskRange(true); Configuration.IntegerRanges reduceRanges = conf.getProfileTaskRange(false); while (!job.isComplete()) { Thread.sleep(1000); String report = (STR + StringUtils.formatPercent(job.mapProgress(), 0)+ STR + StringUtils.formatPercent(job.reduceProgress(), 0)); if (!report.equals(lastReport)) { LOG.info(report); lastReport = report; } TaskCompletionEvent[] events = job.getTaskCompletionEvents(eventCounter); eventCounter += events.length; for(TaskCompletionEvent event : events){ TaskCompletionEvent.Status status = event.getTaskStatus(); if (profiling && (status == TaskCompletionEvent.Status.SUCCEEDED status == TaskCompletionEvent.Status.FAILED) && (event.isMap ? mapRanges : reduceRanges). isIncluded(event.idWithinJob())) { downloadProfile(event); } switch(filter){ case NONE: break; case SUCCEEDED: if (event.getTaskStatus() == TaskCompletionEvent.Status.SUCCEEDED){ LOG.info(event.toString()); displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp()); } break; case FAILED: if (event.getTaskStatus() == TaskCompletionEvent.Status.FAILED){ LOG.info(event.toString()); TaskAttemptID taskId = event.getTaskAttemptId(); String[] taskDiagnostics = jobSubmitClient.getTaskDiagnostics(taskId); if (taskDiagnostics != null) { for(String diagnostics : taskDiagnostics){ System.err.println(diagnostics); } } displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp()); } break; case KILLED: if (event.getTaskStatus() == TaskCompletionEvent.Status.KILLED){ LOG.info(event.toString()); } break; case ALL: LOG.info(event.toString()); displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp()); break; } } } LOG.info(STR + jobId); job.getCounters().log(LOG); return job.isSuccessful(); } | /**
* Monitor a job and print status in real-time as progress is made and tasks
* fail.
* @param conf the job's configuration
* @param job the job to track
* @return true if the job succeeded
* @throws IOException if communication to the JobTracker fails
*/ | Monitor a job and print status in real-time as progress is made and tasks fail | monitorAndPrintJob | {
"repo_name": "toddlipcon/hadoop",
"path": "src/mapred/org/apache/hadoop/mapred/JobClient.java",
"license": "apache-2.0",
"size": 64290
} | [
"java.io.IOException",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.util.StringUtils"
] | import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.StringUtils; | import java.io.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.util.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 464,363 |
protected void masterOperation(Task task, Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
masterOperation(request, state, listener);
} | void function(Task task, Request request, ClusterState state, ActionListener<Response> listener) throws Exception { masterOperation(request, state, listener); } | /**
* Override this operation if access to the task parameter is needed
*/ | Override this operation if access to the task parameter is needed | masterOperation | {
"repo_name": "mmaracic/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java",
"license": "apache-2.0",
"size": 10206
} | [
"org.elasticsearch.action.ActionListener",
"org.elasticsearch.cluster.ClusterState",
"org.elasticsearch.tasks.Task"
] | import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.tasks.Task; | import org.elasticsearch.action.*; import org.elasticsearch.cluster.*; import org.elasticsearch.tasks.*; | [
"org.elasticsearch.action",
"org.elasticsearch.cluster",
"org.elasticsearch.tasks"
] | org.elasticsearch.action; org.elasticsearch.cluster; org.elasticsearch.tasks; | 568,242 |
public static <T> void mux(final SendChannel<T> sinkChannel, final ReceiveChannel<T>... sourceChannels) {
mux(sinkChannel, Arrays.asList(sourceChannels));
} | static <T> void function(final SendChannel<T> sinkChannel, final ReceiveChannel<T>... sourceChannels) { mux(sinkChannel, Arrays.asList(sourceChannels)); } | /**
* Multiplexes messages of type {@link T}, from multiple {@link ReceiveChannel} sources to a single
* {@link SendChannel} sink
* @param sinkChannel
* @param sourceChannels
* @param <T>
*/ | Multiplexes messages of type <code>T</code>, from multiple <code>ReceiveChannel</code> sources to a single <code>SendChannel</code> sink | mux | {
"repo_name": "mrphoebs/Lois",
"path": "src/main/java/com/flipkart/lois/Lois.java",
"license": "apache-2.0",
"size": 4398
} | [
"com.flipkart.lois.channel.api.ReceiveChannel",
"com.flipkart.lois.channel.api.SendChannel",
"java.util.Arrays"
] | import com.flipkart.lois.channel.api.ReceiveChannel; import com.flipkart.lois.channel.api.SendChannel; import java.util.Arrays; | import com.flipkart.lois.channel.api.*; import java.util.*; | [
"com.flipkart.lois",
"java.util"
] | com.flipkart.lois; java.util; | 2,264,932 |
public Hashtable getSensorSetUpInfo() {
return sensorSetUpInfo;
}
| Hashtable function() { return sensorSetUpInfo; } | /**
* Get the sensor setup info.
*/ | Get the sensor setup info | getSensorSetUpInfo | {
"repo_name": "raffy1982/spine-project",
"path": "Spine_apps/nodeEmulator/src/dataSetIO/CommentManager.java",
"license": "lgpl-2.1",
"size": 16032
} | [
"java.util.Hashtable"
] | import java.util.Hashtable; | import java.util.*; | [
"java.util"
] | java.util; | 190,581 |
@VisibleForTesting
public ImmutableList<String> getJavacOpts() {
return customJavacOpts;
} | ImmutableList<String> function() { return customJavacOpts; } | /**
* Gets the value of the "javacopts" attribute combining them with the
* default options. If the current rule has no javacopts attribute, this
* method only returns the default options.
*/ | Gets the value of the "javacopts" attribute combining them with the default options. If the current rule has no javacopts attribute, this method only returns the default options | getJavacOpts | {
"repo_name": "vt09/bazel",
"path": "src/main/java/com/google/devtools/build/lib/rules/java/JavaCompilationHelper.java",
"license": "apache-2.0",
"size": 21824
} | [
"com.google.common.collect.ImmutableList"
] | import com.google.common.collect.ImmutableList; | import com.google.common.collect.*; | [
"com.google.common"
] | com.google.common; | 1,928,411 |
public void routingFailed(JID receipient, Packet packet) {
IQ iq = (IQ) packet;
// If a route to the target address was not found then try to answer a
// service_unavailable error code to the sender of the IQ packet
if (IQ.Type.result != iq.getType() && IQ.Type.error != iq.getType()) {
Log.info("Packet sent to unreachable address " + packet.toXML());
sendErrorPacket(iq, PacketError.Condition.service_unavailable);
}
else {
Log.warn("Error or result packet could not be delivered " + packet.toXML());
}
}
private class TimeoutTask extends TimerTask { | void function(JID receipient, Packet packet) { IQ iq = (IQ) packet; if (IQ.Type.result != iq.getType() && IQ.Type.error != iq.getType()) { Log.info(STR + packet.toXML()); sendErrorPacket(iq, PacketError.Condition.service_unavailable); } else { Log.warn(STR + packet.toXML()); } } private class TimeoutTask extends TimerTask { | /**
* Notification message indicating that a packet has failed to be routed to the receipient.
*
* @param receipient address of the entity that failed to receive the packet.
* @param packet IQ packet that failed to be sent to the receipient.
*/ | Notification message indicating that a packet has failed to be routed to the receipient | routingFailed | {
"repo_name": "derek-wang/ca.rides.openfire",
"path": "src/java/org/jivesoftware/openfire/IQRouter.java",
"license": "apache-2.0",
"size": 22277
} | [
"java.util.TimerTask",
"org.xmpp.packet.Packet",
"org.xmpp.packet.PacketError"
] | import java.util.TimerTask; import org.xmpp.packet.Packet; import org.xmpp.packet.PacketError; | import java.util.*; import org.xmpp.packet.*; | [
"java.util",
"org.xmpp.packet"
] | java.util; org.xmpp.packet; | 1,521,864 |
@ServiceMethod(returns = ReturnType.SINGLE)
public void cancel(String resourceGroupName, String registryName, String runId, Context context) {
cancelAsync(resourceGroupName, registryName, runId, context).block();
} | @ServiceMethod(returns = ReturnType.SINGLE) void function(String resourceGroupName, String registryName, String runId, Context context) { cancelAsync(resourceGroupName, registryName, runId, context).block(); } | /**
* Cancel an existing run.
*
* @param resourceGroupName The name of the resource group to which the container registry belongs.
* @param registryName The name of the container registry.
* @param runId The run ID.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/ | Cancel an existing run | cancel | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-containerregistry/src/main/java/com/azure/resourcemanager/containerregistry/implementation/RunsClientImpl.java",
"license": "mit",
"size": 64706
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.util.Context"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.util.Context; | import com.azure.core.annotation.*; import com.azure.core.util.*; | [
"com.azure.core"
] | com.azure.core; | 985,492 |
private int estimateSupportIStep(Integer item, List<Position> itemBorder) {
// First we need to take the two last items
int support = 0;
for(Position pos : itemBorder) {
Table table = tables[pos.sid];
int numberOfVectors = table.positionVectors.size();
// Scan from last position to first position (they are ordered backward in the table)
for(int j = 0; j < numberOfVectors; j++) {
PositionVector vector = table.positionVectors.get(j);
if(vector.position < pos.position) {
if(vector.bitset.get(item)) {
support += 1;
}
break;
}
}
}
return support;
} | int function(Integer item, List<Position> itemBorder) { int support = 0; for(Position pos : itemBorder) { Table table = tables[pos.sid]; int numberOfVectors = table.positionVectors.size(); for(int j = 0; j < numberOfVectors; j++) { PositionVector vector = table.positionVectors.get(j); if(vector.position < pos.position) { if(vector.bitset.get(item)) { support += 1; } break; } } } return support; } | /**
* Estimate support of appending an item to the current prefix by I-extension
* @param item the item
* @param itemBorder the prefix border
* @return the estimated support (an upper bound)
*/ | Estimate support of appending an item to the current prefix by I-extension | estimateSupportIStep | {
"repo_name": "dragonzhou/humor",
"path": "src/ca/pfv/spmf/algorithms/sequentialpatterns/lapin/AlgoLAPIN_LCI.java",
"license": "apache-2.0",
"size": 36958
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 12,301 |
@ServiceMethod(returns = ReturnType.SINGLE)
Mono<Response<Void>> removeOwnerWithResponseAsync(String objectId, String ownerObjectId, String tenantId); | @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Void>> removeOwnerWithResponseAsync(String objectId, String ownerObjectId, String tenantId); | /**
* Remove a member from owners.
*
* @param objectId The object ID of the group from which to remove the owner.
* @param ownerObjectId Owner object id.
* @param tenantId The tenant ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.authorization.models.GraphErrorException thrown if the request is rejected by
* server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/ | Remove a member from owners | removeOwnerWithResponseAsync | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanagerhybrid/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/fluent/GroupsClient.java",
"license": "mit",
"size": 35920
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; | [
"com.azure.core"
] | com.azure.core; | 905,290 |
public boolean isInDowntime(Calendar now)
{
for (Downtime downtime : this.getDowntime())
{
if (downtime.isInTimeRange(now))
return true;
}
return false;
} | boolean function(Calendar now) { for (Downtime downtime : this.getDowntime()) { if (downtime.isInTimeRange(now)) return true; } return false; } | /**
* Check if this check is in downtime at the given time
*/ | Check if this check is in downtime at the given time | isInDowntime | {
"repo_name": "1024122298/bergamot",
"path": "bergamot-model/src/main/java/com/intrbiz/bergamot/model/Check.java",
"license": "lgpl-3.0",
"size": 10678
} | [
"java.util.Calendar"
] | import java.util.Calendar; | import java.util.*; | [
"java.util"
] | java.util; | 1,100,900 |
EventQueue.invokeLater(new Runnable() {
| EventQueue.invokeLater(new Runnable() { | /**
* The main method (entry point) of the program.
* @param args command line arguments.
*/ | The main method (entry point) of the program | main | {
"repo_name": "isucsp/imgRecSrc",
"path": "gui/com/feradz/HistogramMain.java",
"license": "gpl-2.0",
"size": 964
} | [
"java.awt.EventQueue"
] | import java.awt.EventQueue; | import java.awt.*; | [
"java.awt"
] | java.awt; | 78,763 |
public ISelectionProvider getSelectionProvider() {
if (fSpecialSelectionProvider != null) {
return fSpecialSelectionProvider;
}
return fSite.getSelectionProvider();
}
/**
* Sets a special selection provider which will be used instead of the site's selection provider.
* This method should be used directly after constructing the action and before the action is
* registered as a selection listener. The invocation will not a perform a selection change notification.
*
* @param provider a special selection provider which is used
* instead of the site's selection provider or <code>null</code> to use the site's
* selection provider. Clients can for example use a {@link ConvertingSelectionProvider} | ISelectionProvider function() { if (fSpecialSelectionProvider != null) { return fSpecialSelectionProvider; } return fSite.getSelectionProvider(); } /** * Sets a special selection provider which will be used instead of the site's selection provider. * This method should be used directly after constructing the action and before the action is * registered as a selection listener. The invocation will not a perform a selection change notification. * * @param provider a special selection provider which is used * instead of the site's selection provider or <code>null</code> to use the site's * selection provider. Clients can for example use a {@link ConvertingSelectionProvider} | /**
* Returns the selection provider managed by the site owning this action or the selection
* provider explicitly set in {@link #setSpecialSelectionProvider(ISelectionProvider)}.
*
* @return the site's selection provider
*/ | Returns the selection provider managed by the site owning this action or the selection provider explicitly set in <code>#setSpecialSelectionProvider(ISelectionProvider)</code> | getSelectionProvider | {
"repo_name": "boniatillo-com/PhaserEditor",
"path": "source/thirdparty/jsdt/org.eclipse.wst.jsdt.ui/src/org/eclipse/wst/jsdt/ui/actions/SelectionDispatchAction.java",
"license": "epl-1.0",
"size": 8186
} | [
"org.eclipse.jface.viewers.ISelectionProvider"
] | import org.eclipse.jface.viewers.ISelectionProvider; | import org.eclipse.jface.viewers.*; | [
"org.eclipse.jface"
] | org.eclipse.jface; | 1,494,159 |
public Collection<String> simpleMatchToIndexNames(String pattern) {
if (Regex.isSimpleMatchPattern(pattern) == false) {
// no wildcards
return Collections.singletonList(pattern);
}
return fieldTypes.simpleMatchToFullName(pattern);
} | Collection<String> function(String pattern) { if (Regex.isSimpleMatchPattern(pattern) == false) { return Collections.singletonList(pattern); } return fieldTypes.simpleMatchToFullName(pattern); } | /**
* Returns all the fields that match the given pattern. If the pattern is prefixed with a type
* then the fields will be returned with a type prefix.
*/ | Returns all the fields that match the given pattern. If the pattern is prefixed with a type then the fields will be returned with a type prefix | simpleMatchToIndexNames | {
"repo_name": "palecur/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/index/mapper/MapperService.java",
"license": "apache-2.0",
"size": 30033
} | [
"java.util.Collection",
"java.util.Collections",
"org.elasticsearch.common.regex.Regex"
] | import java.util.Collection; import java.util.Collections; import org.elasticsearch.common.regex.Regex; | import java.util.*; import org.elasticsearch.common.regex.*; | [
"java.util",
"org.elasticsearch.common"
] | java.util; org.elasticsearch.common; | 732,440 |
@Bean
@Override
public AuthenticationManager authenticationManagerBean() throws Exception {
return super.authenticationManagerBean();
} | AuthenticationManager function() throws Exception { return super.authenticationManagerBean(); } | /**
* Returns the authentication manager currently used by Spring. It
* represents a bean definition with the aim allow wiring from other classes
* performing the Inversion of Control (IoC).
*
* @throws Exception
*/ | Returns the authentication manager currently used by Spring. It represents a bean definition with the aim allow wiring from other classes performing the Inversion of Control (IoC) | authenticationManagerBean | {
"repo_name": "pon-prisma/PrismaDemo",
"path": "WEBUI/src/main/java/it/prisma/presentationlayer/webui/configs/WebSecurityConfig.java",
"license": "apache-2.0",
"size": 26032
} | [
"org.springframework.security.authentication.AuthenticationManager"
] | import org.springframework.security.authentication.AuthenticationManager; | import org.springframework.security.authentication.*; | [
"org.springframework.security"
] | org.springframework.security; | 753,897 |
public int getType();
/**
* Returns a {@code Dictionary} of the (public) properties of this
* {@code Role} object. Any changes to the returned {@code Dictionary} will
* change the properties of this {@code Role} object. This will cause a
* {@code UserAdminEvent} object of type {@link UserAdminEvent#ROLE_CHANGED}
* to be broadcast to any {@code UserAdminListener} objects.
*
* <p>
* Only objects of type {@code String} may be used as property keys, and
* only objects of type {@code String} or {@code byte[]} may be used as
* property values. Any other types will cause an exception of type
* {@code IllegalArgumentException} to be raised.
*
* <p>
* In order to add, change, or remove a property in the returned
* {@code Dictionary}, a {@link UserAdminPermission} named after the
* property name (or a prefix of it) with action {@code changeProperty} is
* required.
*
* @return {@code Dictionary} containing the properties of this {@code Role} | int function(); /** * Returns a {@code Dictionary} of the (public) properties of this * {@code Role} object. Any changes to the returned {@code Dictionary} will * change the properties of this {@code Role} object. This will cause a * {@code UserAdminEvent} object of type {@link UserAdminEvent#ROLE_CHANGED} * to be broadcast to any {@code UserAdminListener} objects. * * <p> * Only objects of type {@code String} may be used as property keys, and * only objects of type {@code String} or {@code byte[]} may be used as * property values. Any other types will cause an exception of type * {@code IllegalArgumentException} to be raised. * * <p> * In order to add, change, or remove a property in the returned * {@code Dictionary}, a {@link UserAdminPermission} named after the * property name (or a prefix of it) with action {@code changeProperty} is * required. * * @return {@code Dictionary} containing the properties of this {@code Role} | /**
* Returns the type of this role.
*
* @return The role's type.
*/ | Returns the type of this role | getType | {
"repo_name": "knopflerfish/knopflerfish.org",
"path": "osgi/bundles/useradmin/src/org/osgi/service/useradmin/Role.java",
"license": "bsd-3-clause",
"size": 3977
} | [
"java.util.Dictionary"
] | import java.util.Dictionary; | import java.util.*; | [
"java.util"
] | java.util; | 1,096,743 |
public void calculatePartitionUpdateCounters() throws IgniteTxRollbackCheckedException {
TxCounters counters = txCounters(false);
if (counters != null && F.isEmpty(counters.updateCounters())) {
List<PartitionUpdateCountersMessage> cntrMsgs = new ArrayList<>();
for (Map.Entry<Integer, Map<Integer, AtomicLong>> record : counters.accumulatedUpdateCounters().entrySet()) {
int cacheId = record.getKey();
Map<Integer, AtomicLong> partToCntrs = record.getValue();
assert partToCntrs != null;
if (F.isEmpty(partToCntrs))
continue;
PartitionUpdateCountersMessage msg = new PartitionUpdateCountersMessage(cacheId, partToCntrs.size());
GridCacheContext ctx0 = cctx.cacheContext(cacheId);
GridDhtPartitionTopology top = ctx0.topology();
assert top != null;
for (Map.Entry<Integer, AtomicLong> e : partToCntrs.entrySet()) {
AtomicLong acc = e.getValue();
assert acc != null;
long cntr = acc.get();
assert cntr >= 0;
if (cntr != 0) {
int p = e.getKey();
GridDhtLocalPartition part = top.localPartition(p);
// Verify primary tx mapping.
// LOST state is possible if tx is started over LOST partition.
boolean valid = part != null &&
(part.state() == OWNING || part.state() == LOST) &&
part.primary(top.readyTopologyVersion());
if (!valid) {
// Local node is no longer primary for the partition, need to rollback a transaction.
if (part != null && !part.primary(top.readyTopologyVersion())) {
log.warning("Failed to prepare a transaction on outdated topology, rolling back " +
"[tx=" + CU.txString(this) +
", readyTopVer=" + top.readyTopologyVersion() +
", lostParts=" + top.lostPartitions() +
", part=" + part.toString() + ']');
throw new IgniteTxRollbackCheckedException("Failed to prepare a transaction on outdated " +
"topology, please try again [timeout=" + timeout() + ", tx=" + CU.txString(this) + ']');
}
// Trigger error.
throw new AssertionError("Invalid primary mapping [tx=" + CU.txString(this) +
", readyTopVer=" + top.readyTopologyVersion() +
", lostParts=" + top.lostPartitions() +
", part=" + (part == null ? "NULL" : part.toString()) + ']');
}
msg.add(p, part.getAndIncrementUpdateCounter(cntr), cntr);
}
}
if (msg.size() > 0)
cntrMsgs.add(msg);
}
counters.updateCounters(cntrMsgs);
}
} | void function() throws IgniteTxRollbackCheckedException { TxCounters counters = txCounters(false); if (counters != null && F.isEmpty(counters.updateCounters())) { List<PartitionUpdateCountersMessage> cntrMsgs = new ArrayList<>(); for (Map.Entry<Integer, Map<Integer, AtomicLong>> record : counters.accumulatedUpdateCounters().entrySet()) { int cacheId = record.getKey(); Map<Integer, AtomicLong> partToCntrs = record.getValue(); assert partToCntrs != null; if (F.isEmpty(partToCntrs)) continue; PartitionUpdateCountersMessage msg = new PartitionUpdateCountersMessage(cacheId, partToCntrs.size()); GridCacheContext ctx0 = cctx.cacheContext(cacheId); GridDhtPartitionTopology top = ctx0.topology(); assert top != null; for (Map.Entry<Integer, AtomicLong> e : partToCntrs.entrySet()) { AtomicLong acc = e.getValue(); assert acc != null; long cntr = acc.get(); assert cntr >= 0; if (cntr != 0) { int p = e.getKey(); GridDhtLocalPartition part = top.localPartition(p); boolean valid = part != null && (part.state() == OWNING part.state() == LOST) && part.primary(top.readyTopologyVersion()); if (!valid) { if (part != null && !part.primary(top.readyTopologyVersion())) { log.warning(STR + "[tx=" + CU.txString(this) + STR + top.readyTopologyVersion() + STR + top.lostPartitions() + STR + part.toString() + ']'); throw new IgniteTxRollbackCheckedException(STR + STR + timeout() + STR + CU.txString(this) + ']'); } throw new AssertionError(STR + CU.txString(this) + STR + top.readyTopologyVersion() + STR + top.lostPartitions() + STR + (part == null ? "NULL" : part.toString()) + ']'); } msg.add(p, part.getAndIncrementUpdateCounter(cntr), cntr); } } if (msg.size() > 0) cntrMsgs.add(msg); } counters.updateCounters(cntrMsgs); } } | /**
* Calculates partition update counters for current transaction. Each partition will be supplied with
* pair (init, delta) values, where init - initial update counter, and delta - updates count made
* by current transaction for a given partition.
*/ | Calculates partition update counters for current transaction. Each partition will be supplied with pair (init, delta) values, where init - initial update counter, and delta - updates count made by current transaction for a given partition | calculatePartitionUpdateCounters | {
"repo_name": "daradurvs/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/transactions/IgniteTxLocalAdapter.java",
"license": "apache-2.0",
"size": 74224
} | [
"java.util.ArrayList",
"java.util.List",
"java.util.Map",
"java.util.concurrent.atomic.AtomicLong",
"org.apache.ignite.internal.processors.cache.GridCacheContext",
"org.apache.ignite.internal.processors.cache.distributed.dht.PartitionUpdateCountersMessage",
"org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition",
"org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionTopology",
"org.apache.ignite.internal.transactions.IgniteTxRollbackCheckedException",
"org.apache.ignite.internal.util.typedef.F",
"org.apache.ignite.internal.util.typedef.internal.CU"
] | import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.distributed.dht.PartitionUpdateCountersMessage; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionTopology; import org.apache.ignite.internal.transactions.IgniteTxRollbackCheckedException; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.CU; | import java.util.*; import java.util.concurrent.atomic.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.cache.distributed.dht.*; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.*; import org.apache.ignite.internal.transactions.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; | [
"java.util",
"org.apache.ignite"
] | java.util; org.apache.ignite; | 1,027,717 |
Set<DeviceId> getClusterDevices(Topology topology, TopologyCluster cluster); | Set<DeviceId> getClusterDevices(Topology topology, TopologyCluster cluster); | /**
* Returns the cluster of the specified topology.
*
* @param topology topology descriptor
* @param cluster topology cluster
* @return set of cluster links
*/ | Returns the cluster of the specified topology | getClusterDevices | {
"repo_name": "sonu283304/onos",
"path": "core/api/src/main/java/org/onosproject/net/topology/TopologyStore.java",
"license": "apache-2.0",
"size": 6845
} | [
"java.util.Set",
"org.onosproject.net.DeviceId"
] | import java.util.Set; import org.onosproject.net.DeviceId; | import java.util.*; import org.onosproject.net.*; | [
"java.util",
"org.onosproject.net"
] | java.util; org.onosproject.net; | 1,487,047 |
this.setCombatRoundNumber(new CombatRoundNumber(0L));
this.setCombatRoundPosition(position);
} | this.setCombatRoundNumber(new CombatRoundNumber(0L)); this.setCombatRoundPosition(position); } | /**
* Initializes a world date instance with very basic parameters. The initial
* value of the combat round number is 0.
*
* @param position the position the world date should have.
*/ | Initializes a world date instance with very basic parameters. The initial value of the combat round number is 0 | initializeDate | {
"repo_name": "asciiCerebrum/neocortexEngine",
"path": "src/main/java/org/asciicerebrum/neocortexengine/domain/mechanics/WorldDate.java",
"license": "mit",
"size": 4576
} | [
"org.asciicerebrum.neocortexengine.domain.core.particles.CombatRoundNumber"
] | import org.asciicerebrum.neocortexengine.domain.core.particles.CombatRoundNumber; | import org.asciicerebrum.neocortexengine.domain.core.particles.*; | [
"org.asciicerebrum.neocortexengine"
] | org.asciicerebrum.neocortexengine; | 2,602,303 |
private static boolean canEliminateLop(Lop node, ArrayList<Lop> execNodes) {
// this function can only eliminate "aligner" lops such a group
if (!node.isAligner())
return false;
// find the child whose execLoc = 'MapAndReduce'
int ret = getChildAlignment(node, execNodes, ExecLocation.MapAndReduce);
if (ret == CHILD_BREAKS_ALIGNMENT)
return false;
else if (ret == CHILD_DOES_NOT_BREAK_ALIGNMENT)
return true;
else if (ret == MRCHILD_NOT_FOUND)
return false;
else if (ret == MR_CHILD_FOUND_BREAKS_ALIGNMENT)
return false;
else if (ret == MR_CHILD_FOUND_DOES_NOT_BREAK_ALIGNMENT)
return true;
else
throw new RuntimeException("Should not happen. \n");
}
| static boolean function(Lop node, ArrayList<Lop> execNodes) { if (!node.isAligner()) return false; int ret = getChildAlignment(node, execNodes, ExecLocation.MapAndReduce); if (ret == CHILD_BREAKS_ALIGNMENT) return false; else if (ret == CHILD_DOES_NOT_BREAK_ALIGNMENT) return true; else if (ret == MRCHILD_NOT_FOUND) return false; else if (ret == MR_CHILD_FOUND_BREAKS_ALIGNMENT) return false; else if (ret == MR_CHILD_FOUND_DOES_NOT_BREAK_ALIGNMENT) return true; else throw new RuntimeException(STR); } | /**
* Method to check if a lop can be eliminated from checking
*
* @param node low-level operator
* @param execNodes list of exec nodes
* @return true if lop can be eliminated
*/ | Method to check if a lop can be eliminated from checking | canEliminateLop | {
"repo_name": "iyounus/incubator-systemml",
"path": "src/main/java/org/apache/sysml/lops/compile/Dag.java",
"license": "apache-2.0",
"size": 143562
} | [
"java.util.ArrayList",
"org.apache.sysml.lops.Lop",
"org.apache.sysml.lops.LopProperties"
] | import java.util.ArrayList; import org.apache.sysml.lops.Lop; import org.apache.sysml.lops.LopProperties; | import java.util.*; import org.apache.sysml.lops.*; | [
"java.util",
"org.apache.sysml"
] | java.util; org.apache.sysml; | 1,285,212 |
public int getMigrationInformation(String systemName, String migrationSettings) throws Exception
{
// The MigrationLauncher is responsible for handling the interaction
// between the PatchTable and the underlying MigrationTasks; as each
// task is executed, the patch level is incremented, etc.
try
{
DistributedJdbcMigrationLauncherFactory factory =
new DistributedJdbcMigrationLauncherFactory();
DistributedJdbcMigrationLauncher launcher = null;
if (migrationSettings == null)
{
log.info("Using migration.properties (default)");
launcher = (DistributedJdbcMigrationLauncher) factory.createMigrationLauncher(systemName);
}
else
{
log.info("Using " + migrationSettings);
launcher = (DistributedJdbcMigrationLauncher) factory.createMigrationLauncher(systemName, migrationSettings);
}
// FIXME test that the migration information is correct
Map contextMap = launcher.getContexts();
JdbcMigrationContext context =
(JdbcMigrationContext) contextMap.keySet().iterator().next();
int currentLevel = launcher.getDatabasePatchLevel(context);
int nextPatchLevel = launcher.getNextPatchLevel();
log.info("Current Database patch level is : " + currentLevel);
int unappliedPatches = nextPatchLevel - launcher.getDatabasePatchLevel(context) - 1;
log.info("Current number of unapplied patches is : " + unappliedPatches);
log.info("The next patch to author should be : " + nextPatchLevel);
return (nextPatchLevel - 1);
}
catch (Exception e)
{
log.error(e);
throw e;
}
} | int function(String systemName, String migrationSettings) throws Exception { try { DistributedJdbcMigrationLauncherFactory factory = new DistributedJdbcMigrationLauncherFactory(); DistributedJdbcMigrationLauncher launcher = null; if (migrationSettings == null) { log.info(STR); launcher = (DistributedJdbcMigrationLauncher) factory.createMigrationLauncher(systemName); } else { log.info(STR + migrationSettings); launcher = (DistributedJdbcMigrationLauncher) factory.createMigrationLauncher(systemName, migrationSettings); } Map contextMap = launcher.getContexts(); JdbcMigrationContext context = (JdbcMigrationContext) contextMap.keySet().iterator().next(); int currentLevel = launcher.getDatabasePatchLevel(context); int nextPatchLevel = launcher.getNextPatchLevel(); log.info(STR + currentLevel); int unappliedPatches = nextPatchLevel - launcher.getDatabasePatchLevel(context) - 1; log.info(STR + unappliedPatches); log.info(STR + nextPatchLevel); return (nextPatchLevel - 1); } catch (Exception e) { log.error(e); throw e; } } | /**
* Get the migration level information for the given system name
*
* @param systemName the name of the system
* @param migrationSettings name of alternate migration.properties file to use
* @return returns the current highest source code patch number
* @throws Exception if anything goes wrong
*/ | Get the migration level information for the given system name | getMigrationInformation | {
"repo_name": "lamsfoundation/lams",
"path": "3rdParty_sources/tacitknowledge/autopatch/src/main/java/com/tacitknowledge/util/migration/jdbc/DistributedMigrationInformation.java",
"license": "gpl-2.0",
"size": 5868
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 816,223 |
public final Log getLog() {
return log;
} | final Log function() { return log; } | /**
* Get the log for this MMTk thread (mutator or collector).
*/ | Get the log for this MMTk thread (mutator or collector) | getLog | {
"repo_name": "ut-osa/laminar",
"path": "jikesrvm-3.0.0/MMTk/harness/src/org/mmtk/harness/MMTkThread.java",
"license": "bsd-3-clause",
"size": 1556
} | [
"org.mmtk.utility.Log"
] | import org.mmtk.utility.Log; | import org.mmtk.utility.*; | [
"org.mmtk.utility"
] | org.mmtk.utility; | 850,537 |
void stopAllRegionCqs(String regionName) throws CqException; | void stopAllRegionCqs(String regionName) throws CqException; | /**
* Stops all the cqs on a given region.
*/ | Stops all the cqs on a given region | stopAllRegionCqs | {
"repo_name": "smanvi-pivotal/geode",
"path": "geode-core/src/main/java/org/apache/geode/cache/query/internal/cq/CqService.java",
"license": "apache-2.0",
"size": 8587
} | [
"org.apache.geode.cache.query.CqException"
] | import org.apache.geode.cache.query.CqException; | import org.apache.geode.cache.query.*; | [
"org.apache.geode"
] | org.apache.geode; | 1,755,572 |
public static boolean isEmpty(final String[] a) {
if (a == null) {
return true;
}
for (final String s : a) {
if (StringUtils.isNotEmpty(s)) {
return false;
}
}
return true;
} | static boolean function(final String[] a) { if (a == null) { return true; } for (final String s : a) { if (StringUtils.isNotEmpty(s)) { return false; } } return true; } | /**
* checks if an Array of Strings is empty or not. Empty means:
* - Array is null
* - or all elements are null or empty strings
*/ | checks if an Array of Strings is empty or not. Empty means: - Array is null - or all elements are null or empty strings | isEmpty | {
"repo_name": "xiaoyanit/cgeo",
"path": "main/src/cgeo/geocaching/connector/gc/GCLogin.java",
"license": "apache-2.0",
"size": 18924
} | [
"org.apache.commons.lang3.StringUtils"
] | import org.apache.commons.lang3.StringUtils; | import org.apache.commons.lang3.*; | [
"org.apache.commons"
] | org.apache.commons; | 2,657,903 |
private void setRenderingHints()
{
graphics.setRenderingHint(RenderingHints.KEY_INTERPOLATION,
RenderingHints.VALUE_INTERPOLATION_BICUBIC);
graphics.setRenderingHint(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_QUALITY);
graphics.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
}
| void function() { graphics.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC); graphics.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); graphics.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); } | /**
* Sets high-quality rendering hints on the current Graphics2D.
*/ | Sets high-quality rendering hints on the current Graphics2D | setRenderingHints | {
"repo_name": "mdamt/pdfbox",
"path": "pdfbox/src/main/java/org/apache/pdfbox/rendering/PageDrawer.java",
"license": "apache-2.0",
"size": 34519
} | [
"java.awt.RenderingHints"
] | import java.awt.RenderingHints; | import java.awt.*; | [
"java.awt"
] | java.awt; | 891,584 |
public void onEntityCollidedWithBlock(World worldIn, BlockPos pos, IBlockState state, Entity entityIn)
{
if (!entityIn.isRiding() && !entityIn.isBeingRidden() && entityIn.isNonBoss())
{
entityIn.setPortal(pos);
}
}
| void function(World worldIn, BlockPos pos, IBlockState state, Entity entityIn) { if (!entityIn.isRiding() && !entityIn.isBeingRidden() && entityIn.isNonBoss()) { entityIn.setPortal(pos); } } | /**
* Called When an Entity Collided with the Block
*/ | Called When an Entity Collided with the Block | onEntityCollidedWithBlock | {
"repo_name": "InverMN/MinecraftForgeReference",
"path": "MinecraftBlocks2/BlockPortal.java",
"license": "unlicense",
"size": 21200
} | [
"net.minecraft.block.state.IBlockState",
"net.minecraft.entity.Entity",
"net.minecraft.util.math.BlockPos",
"net.minecraft.world.World"
] | import net.minecraft.block.state.IBlockState; import net.minecraft.entity.Entity; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; | import net.minecraft.block.state.*; import net.minecraft.entity.*; import net.minecraft.util.math.*; import net.minecraft.world.*; | [
"net.minecraft.block",
"net.minecraft.entity",
"net.minecraft.util",
"net.minecraft.world"
] | net.minecraft.block; net.minecraft.entity; net.minecraft.util; net.minecraft.world; | 282,064 |
public Multimap<Integer, InputSplit> group(Configuration conf,
Multimap<Integer, InputSplit> bucketSplitMultimap, int availableSlots, float waves)
throws IOException {
// figure out how many tasks we want for each bucket
Map<Integer, Integer> bucketTaskMap =
estimateBucketSizes(availableSlots, waves, bucketSplitMultimap.asMap());
// allocate map bucket id to grouped splits
Multimap<Integer, InputSplit> bucketGroupedSplitMultimap =
ArrayListMultimap.<Integer, InputSplit> create();
// use the tez grouper to combine splits once per bucket
for (int bucketId : bucketSplitMultimap.keySet()) {
Collection<InputSplit> inputSplitCollection = bucketSplitMultimap.get(bucketId);
InputSplit[] rawSplits = inputSplitCollection.toArray(new InputSplit[0]);
InputSplit[] groupedSplits =
tezGrouper.getGroupedSplits(conf, rawSplits, bucketTaskMap.get(bucketId),
HiveInputFormat.class.getName());
LOG.info("Original split size is " + rawSplits.length + " grouped split size is "
+ groupedSplits.length + ", for bucket: " + bucketId);
for (InputSplit inSplit : groupedSplits) {
bucketGroupedSplitMultimap.put(bucketId, inSplit);
}
}
return bucketGroupedSplitMultimap;
} | Multimap<Integer, InputSplit> function(Configuration conf, Multimap<Integer, InputSplit> bucketSplitMultimap, int availableSlots, float waves) throws IOException { Map<Integer, Integer> bucketTaskMap = estimateBucketSizes(availableSlots, waves, bucketSplitMultimap.asMap()); Multimap<Integer, InputSplit> bucketGroupedSplitMultimap = ArrayListMultimap.<Integer, InputSplit> create(); for (int bucketId : bucketSplitMultimap.keySet()) { Collection<InputSplit> inputSplitCollection = bucketSplitMultimap.get(bucketId); InputSplit[] rawSplits = inputSplitCollection.toArray(new InputSplit[0]); InputSplit[] groupedSplits = tezGrouper.getGroupedSplits(conf, rawSplits, bucketTaskMap.get(bucketId), HiveInputFormat.class.getName()); LOG.info(STR + rawSplits.length + STR + groupedSplits.length + STR + bucketId); for (InputSplit inSplit : groupedSplits) { bucketGroupedSplitMultimap.put(bucketId, inSplit); } } return bucketGroupedSplitMultimap; } | /**
* group splits for each bucket separately - while evenly filling all the
* available slots with tasks
*/ | group splits for each bucket separately - while evenly filling all the available slots with tasks | group | {
"repo_name": "winningsix/hive",
"path": "ql/src/java/org/apache/hadoop/hive/ql/exec/tez/SplitGrouper.java",
"license": "apache-2.0",
"size": 6431
} | [
"com.google.common.collect.ArrayListMultimap",
"com.google.common.collect.Multimap",
"java.io.IOException",
"java.util.Collection",
"java.util.Map",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.hive.ql.io.HiveInputFormat",
"org.apache.hadoop.mapred.InputSplit"
] | import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import java.io.IOException; import java.util.Collection; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.io.HiveInputFormat; import org.apache.hadoop.mapred.InputSplit; | import com.google.common.collect.*; import java.io.*; import java.util.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hive.ql.io.*; import org.apache.hadoop.mapred.*; | [
"com.google.common",
"java.io",
"java.util",
"org.apache.hadoop"
] | com.google.common; java.io; java.util; org.apache.hadoop; | 2,248,953 |
public PutIndexTemplateRequestBuilder setSource(XContentBuilder templateBuilder) {
request.source(templateBuilder);
return this;
} | PutIndexTemplateRequestBuilder function(XContentBuilder templateBuilder) { request.source(templateBuilder); return this; } | /**
* The template source definition.
*/ | The template source definition | setSource | {
"repo_name": "strapdata/elassandra5-rc",
"path": "core/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java",
"license": "apache-2.0",
"size": 9687
} | [
"org.elasticsearch.common.xcontent.XContentBuilder"
] | import org.elasticsearch.common.xcontent.XContentBuilder; | import org.elasticsearch.common.xcontent.*; | [
"org.elasticsearch.common"
] | org.elasticsearch.common; | 2,359,790 |
public static int frequency(Iterator<?> iterator, @Nullable Object element) {
int result = 0;
if (element == null) {
while (iterator.hasNext()) {
if (iterator.next() == null) {
result++;
}
}
} else {
while (iterator.hasNext()) {
if (element.equals(iterator.next())) {
result++;
}
}
}
return result;
} | static int function(Iterator<?> iterator, @Nullable Object element) { int result = 0; if (element == null) { while (iterator.hasNext()) { if (iterator.next() == null) { result++; } } } else { while (iterator.hasNext()) { if (element.equals(iterator.next())) { result++; } } } return result; } | /**
* Returns the number of elements in the specified iterator that equal the
* specified object. The iterator will be left exhausted: its
* {@code hasNext()} method will return {@code false}.
*
* @see Collections#frequency
*/ | Returns the number of elements in the specified iterator that equal the specified object. The iterator will be left exhausted: its hasNext() method will return false | frequency | {
"repo_name": "tracylihui/google-collections",
"path": "src/com/google/common/collect/Iterators.java",
"license": "apache-2.0",
"size": 33039
} | [
"java.util.Iterator",
"javax.annotation.Nullable"
] | import java.util.Iterator; import javax.annotation.Nullable; | import java.util.*; import javax.annotation.*; | [
"java.util",
"javax.annotation"
] | java.util; javax.annotation; | 2,403,923 |
//-----------------------------------------------------------------------
public Currency getPaymentCurrency() {
return _paymentCurrency;
} | Currency function() { return _paymentCurrency; } | /**
* Gets the payment currency.
* @return the value of the property, not null
*/ | Gets the payment currency | getPaymentCurrency | {
"repo_name": "jeorme/OG-Platform",
"path": "projects/OG-FinancialTypes/src/main/java/com/opengamma/financial/security/option/NonDeliverableFXDigitalOptionSecurity.java",
"license": "apache-2.0",
"size": 24872
} | [
"com.opengamma.util.money.Currency"
] | import com.opengamma.util.money.Currency; | import com.opengamma.util.money.*; | [
"com.opengamma.util"
] | com.opengamma.util; | 677,217 |
public DropboxMoveResult move(String remotePath, String newRemotePath) throws DropboxException {
try {
client.files().moveV2(remotePath, newRemotePath);
return new DropboxMoveResult(remotePath, newRemotePath);
} catch (DbxException e) {
throw new DropboxException(remotePath + " does not exist or cannot obtain metadata", e);
}
} | DropboxMoveResult function(String remotePath, String newRemotePath) throws DropboxException { try { client.files().moveV2(remotePath, newRemotePath); return new DropboxMoveResult(remotePath, newRemotePath); } catch (DbxException e) { throw new DropboxException(remotePath + STR, e); } } | /**
* Rename a remote path with the new path location.
*
* @param remotePath the existing remote path to be renamed
* @param newRemotePath the new remote path substituting the old one
* @return a result object with the result of the move operation.
* @throws DropboxException
*/ | Rename a remote path with the new path location | move | {
"repo_name": "nikhilvibhav/camel",
"path": "components/camel-dropbox/src/main/java/org/apache/camel/component/dropbox/core/DropboxAPIFacade.java",
"license": "apache-2.0",
"size": 16564
} | [
"com.dropbox.core.DbxException",
"org.apache.camel.component.dropbox.dto.DropboxMoveResult",
"org.apache.camel.component.dropbox.util.DropboxException"
] | import com.dropbox.core.DbxException; import org.apache.camel.component.dropbox.dto.DropboxMoveResult; import org.apache.camel.component.dropbox.util.DropboxException; | import com.dropbox.core.*; import org.apache.camel.component.dropbox.dto.*; import org.apache.camel.component.dropbox.util.*; | [
"com.dropbox.core",
"org.apache.camel"
] | com.dropbox.core; org.apache.camel; | 1,426,674 |
@Test(expected = IllegalStateException.class)
public void testMaxIsoPacketSizeWithUninitializedDevice()
{
assumeUsbTestsEnabled();
LibUsb.getMaxIsoPacketSize(new Device(), (byte) 0);
} | @Test(expected = IllegalStateException.class) void function() { assumeUsbTestsEnabled(); LibUsb.getMaxIsoPacketSize(new Device(), (byte) 0); } | /**
* Tests the {@link LibUsb#getMaxIsoPacketSize(Device, byte)} method with
* uninitialized device.
*/ | Tests the <code>LibUsb#getMaxIsoPacketSize(Device, byte)</code> method with uninitialized device | testMaxIsoPacketSizeWithUninitializedDevice | {
"repo_name": "usb4java/usb4java",
"path": "src/test/java/org/usb4java/LibUsbTest.java",
"license": "mit",
"size": 45109
} | [
"org.junit.Test",
"org.usb4java.Device",
"org.usb4java.LibUsb",
"org.usb4java.test.UsbAssume"
] | import org.junit.Test; import org.usb4java.Device; import org.usb4java.LibUsb; import org.usb4java.test.UsbAssume; | import org.junit.*; import org.usb4java.*; import org.usb4java.test.*; | [
"org.junit",
"org.usb4java",
"org.usb4java.test"
] | org.junit; org.usb4java; org.usb4java.test; | 534,672 |
public ContentHandler getContentHandler() {
return (contentHandler == base) ? null : contentHandler;
} | ContentHandler function() { return (contentHandler == base) ? null : contentHandler; } | /**
* <b>SAX2</b>: Returns the object used to report the logical content of an
* XML document.
*/ | SAX2: Returns the object used to report the logical content of an XML document | getContentHandler | {
"repo_name": "YOTOV-LIMITED/validator",
"path": "src/nu/validator/gnu/xml/aelfred2/SAXDriver.java",
"license": "mit",
"size": 47312
} | [
"org.xml.sax.ContentHandler"
] | import org.xml.sax.ContentHandler; | import org.xml.sax.*; | [
"org.xml.sax"
] | org.xml.sax; | 1,021,581 |
private void doDelete(DepartmentEditForm frm) throws Exception{
sessionContext.checkPermission(frm.getId(), "Department", Right.DepartmentDelete);
org.hibernate.Session hibSession = new DepartmentDAO().getSession();
Transaction tx = null;
try {
tx = hibSession.beginTransaction();
Department department = new DepartmentDAO().get(frm.getId(), hibSession);
if (department.isExternalManager().booleanValue()) {
for (Iterator i=hibSession.
createQuery("select c from Class_ c where c.managingDept.uniqueId=:deptId").
setLong("deptId", department.getUniqueId()).iterate(); i.hasNext();) {
Class_ clazz = (Class_)i.next();
if (clazz.getSchedulingSubpart().getManagingDept().equals(department)) {
// Clear all room preferences from the subpart
for (Iterator j = clazz.getSchedulingSubpart().getPreferences().iterator(); j.hasNext(); ) {
Object pref = j.next();
if (!(pref instanceof TimePref)) j.remove();
}
clazz.getSchedulingSubpart().deleteAllDistributionPreferences(hibSession);
hibSession.saveOrUpdate(clazz.getSchedulingSubpart());
}
clazz.setManagingDept(clazz.getControllingDept());
// Clear all room preferences from the class
for (Iterator j = clazz.getPreferences().iterator(); j.hasNext(); ) {
Object pref = j.next();
if (!(pref instanceof TimePref)) j.remove();
}
clazz.deleteAllDistributionPreferences(hibSession);
hibSession.saveOrUpdate(clazz);
}
} else {
hibSession.createQuery(
"delete StudentClassEnrollment e where e.clazz.uniqueId in " +
"(select c.uniqueId from Class_ c, CourseOffering co where " +
"co.isControl=1 and " +
"c.schedulingSubpart.instrOfferingConfig.instructionalOffering=co.instructionalOffering and "+
"co.subjectArea.department.uniqueId=:deptId)").
setLong("deptId", department.getUniqueId()).
executeUpdate();
}
ChangeLog.addChange(
hibSession,
sessionContext,
department,
ChangeLog.Source.DEPARTMENT_EDIT,
ChangeLog.Operation.DELETE,
null,
null);
hibSession.delete(department);
tx.commit();
HibernateUtil.clearCache();
} catch (HibernateException e) {
try {
if (tx!=null && tx.isActive()) tx.rollback();
} catch (Exception e1) { }
throw e;
}
} | void function(DepartmentEditForm frm) throws Exception{ sessionContext.checkPermission(frm.getId(), STR, Right.DepartmentDelete); org.hibernate.Session hibSession = new DepartmentDAO().getSession(); Transaction tx = null; try { tx = hibSession.beginTransaction(); Department department = new DepartmentDAO().get(frm.getId(), hibSession); if (department.isExternalManager().booleanValue()) { for (Iterator i=hibSession. createQuery(STR). setLong(STR, department.getUniqueId()).iterate(); i.hasNext();) { Class_ clazz = (Class_)i.next(); if (clazz.getSchedulingSubpart().getManagingDept().equals(department)) { for (Iterator j = clazz.getSchedulingSubpart().getPreferences().iterator(); j.hasNext(); ) { Object pref = j.next(); if (!(pref instanceof TimePref)) j.remove(); } clazz.getSchedulingSubpart().deleteAllDistributionPreferences(hibSession); hibSession.saveOrUpdate(clazz.getSchedulingSubpart()); } clazz.setManagingDept(clazz.getControllingDept()); for (Iterator j = clazz.getPreferences().iterator(); j.hasNext(); ) { Object pref = j.next(); if (!(pref instanceof TimePref)) j.remove(); } clazz.deleteAllDistributionPreferences(hibSession); hibSession.saveOrUpdate(clazz); } } else { hibSession.createQuery( STR + STR + STR + STR+ STR). setLong(STR, department.getUniqueId()). executeUpdate(); } ChangeLog.addChange( hibSession, sessionContext, department, ChangeLog.Source.DEPARTMENT_EDIT, ChangeLog.Operation.DELETE, null, null); hibSession.delete(department); tx.commit(); HibernateUtil.clearCache(); } catch (HibernateException e) { try { if (tx!=null && tx.isActive()) tx.rollback(); } catch (Exception e1) { } throw e; } } | /**
* Delete a department
* @param request
* @param myForm
*/ | Delete a department | doDelete | {
"repo_name": "zuzanamullerova/unitime",
"path": "JavaSource/org/unitime/timetable/action/DepartmentEditAction.java",
"license": "apache-2.0",
"size": 7974
} | [
"java.util.Iterator",
"org.hibernate.HibernateException",
"org.hibernate.Transaction",
"org.unitime.commons.hibernate.util.HibernateUtil",
"org.unitime.timetable.form.DepartmentEditForm",
"org.unitime.timetable.model.ChangeLog",
"org.unitime.timetable.model.Department",
"org.unitime.timetable.model.TimePref",
"org.unitime.timetable.model.dao.DepartmentDAO",
"org.unitime.timetable.security.rights.Right"
] | import java.util.Iterator; import org.hibernate.HibernateException; import org.hibernate.Transaction; import org.unitime.commons.hibernate.util.HibernateUtil; import org.unitime.timetable.form.DepartmentEditForm; import org.unitime.timetable.model.ChangeLog; import org.unitime.timetable.model.Department; import org.unitime.timetable.model.TimePref; import org.unitime.timetable.model.dao.DepartmentDAO; import org.unitime.timetable.security.rights.Right; | import java.util.*; import org.hibernate.*; import org.unitime.commons.hibernate.util.*; import org.unitime.timetable.form.*; import org.unitime.timetable.model.*; import org.unitime.timetable.model.dao.*; import org.unitime.timetable.security.rights.*; | [
"java.util",
"org.hibernate",
"org.unitime.commons",
"org.unitime.timetable"
] | java.util; org.hibernate; org.unitime.commons; org.unitime.timetable; | 81,606 |
JsonNumber add(int value);
| JsonNumber add(int value); | /**
* Adds an integer value to this context.
* @param value the value to be added.
* @return a temporary {@link JsonNumber} which has the given value.
*/ | Adds an integer value to this context | add | {
"repo_name": "i49/Hibiscus",
"path": "hibiscus/src/main/java/com/github/i49/hibiscus/validation/JsonContext.java",
"license": "apache-2.0",
"size": 2193
} | [
"javax.json.JsonNumber"
] | import javax.json.JsonNumber; | import javax.json.*; | [
"javax.json"
] | javax.json; | 1,538,301 |
private boolean pertenece(java.util.ArrayList al, Usuario u)
throws Exception {
UsrGrpAdm uga = new UsrGrpAdm();
// por cada grupo
for (Iterator iter = al.iterator(); iter.hasNext(); ) {
Grupo g = (Grupo) iter.next();
// Si el usuario pertenece al grupo...
if (uga.perteneceusr(u, g))
return true;
if (uga.perteneceusr( getPublico() , g))
return true;
}
return false;
} | boolean function(java.util.ArrayList al, Usuario u) throws Exception { UsrGrpAdm uga = new UsrGrpAdm(); for (Iterator iter = al.iterator(); iter.hasNext(); ) { Grupo g = (Grupo) iter.next(); if (uga.perteneceusr(u, g)) return true; if (uga.perteneceusr( getPublico() , g)) return true; } return false; } | /**
* Esta toma una lista de Grupo e indica si un uid pertenece a alguno de
* esos grupos o public pertenece a alguno de esos grupos
*
* @param al
* @param uid
* @return
* @throws Exception
*/ | Esta toma una lista de Grupo e indica si un uid pertenece a alguno de esos grupos o public pertenece a alguno de esos grupos | pertenece | {
"repo_name": "redondomarco/useradm",
"path": "doc/mcrlib/mcr/seguridadweb/CtrlAcceso.java",
"license": "gpl-3.0",
"size": 10906
} | [
"java.util.ArrayList",
"java.util.Iterator"
] | import java.util.ArrayList; import java.util.Iterator; | import java.util.*; | [
"java.util"
] | java.util; | 535,021 |
protected ActionRequest checkMultipart(ActionRequest request) throws MultipartException {
if (this.multipartResolver != null && this.multipartResolver.isMultipart(request)) {
if (request instanceof MultipartActionRequest) {
logger.debug("Request is already a MultipartActionRequest - probably in a forward");
}
else {
return this.multipartResolver.resolveMultipart(request);
}
}
// If not returned before: return original request.
return request;
} | ActionRequest function(ActionRequest request) throws MultipartException { if (this.multipartResolver != null && this.multipartResolver.isMultipart(request)) { if (request instanceof MultipartActionRequest) { logger.debug(STR); } else { return this.multipartResolver.resolveMultipart(request); } } return request; } | /**
* Convert the request into a multipart request, and make multipart resolver available.
* If no multipart resolver is set, simply use the existing request.
* @param request current HTTP request
* @return the processed request (multipart wrapper if necessary)
*/ | Convert the request into a multipart request, and make multipart resolver available. If no multipart resolver is set, simply use the existing request | checkMultipart | {
"repo_name": "kingtang/spring-learn",
"path": "spring-webmvc-portlet/src/main/java/org/springframework/web/portlet/DispatcherPortlet.java",
"license": "gpl-3.0",
"size": 55871
} | [
"javax.portlet.ActionRequest",
"org.springframework.web.multipart.MultipartException",
"org.springframework.web.portlet.multipart.MultipartActionRequest"
] | import javax.portlet.ActionRequest; import org.springframework.web.multipart.MultipartException; import org.springframework.web.portlet.multipart.MultipartActionRequest; | import javax.portlet.*; import org.springframework.web.multipart.*; import org.springframework.web.portlet.multipart.*; | [
"javax.portlet",
"org.springframework.web"
] | javax.portlet; org.springframework.web; | 2,250,655 |
public GetRecordByIdResult query( GetRecordById getRecordById )
throws OGCWebServiceException {
GetFeature getFeature = null;
XMLFragment getFeatureDocument = null;
Object wfsResponse = null;
GetRecordByIdResult cswResponse = null;
String outputSchema = cswConfiguration.getDeegreeParams().getDefaultOutputSchema();
XMLFragment getRecordsDocument = new XMLFragment( XMLFactory.export( getRecordById ).getRootElement() );
try {
XSLTDocument xslSheet = IN_XSL.get( outputSchema.toUpperCase() );
getFeatureDocument = xslSheet.transform( getRecordsDocument );
LOG.logDebug( "Generated WFS GetFeature request:\n" + getFeatureDocument );
} catch ( TransformerException e ) {
String msg = "Can't transform GetRecordById request to WFS GetFeature request: " + e.getMessage();
LOG.logError( msg, e );
throw new OGCWebServiceException( msg );
}
if ( LOG.getLevel() == ILogger.LOG_DEBUG ) {
StringWriter sw = new StringWriter( 5000 );
getFeatureDocument.write( sw );
LOG.logDebug( sw.getBuffer().toString() );
}
try {
getFeature = GetFeature.create( getRecordById.getId(), getFeatureDocument.getRootElement() );
} catch ( Exception e ) {
String msg = "Cannot generate object representation for GetFeature request: " + e.getMessage();
LOG.logError( msg, e );
throw new OGCWebServiceException( msg );
}
try {
wfsResponse = wfsResource.doService( getFeature );
} catch ( OGCWebServiceException e ) {
String msg = "Generated WFS GetFeature request failed: " + e.getMessage();
LOG.logError( msg, e );
throw new OGCWebServiceException( msg );
}
if ( !( wfsResponse instanceof FeatureResult ) ) {
String msg = "Unexpected result type '" + wfsResponse.getClass().getName()
+ "' from WFS (must be FeatureResult)." + " Maybe a FeatureType is not correctly registered!?";
LOG.logError( msg );
throw new OGCWebServiceException( msg );
}
FeatureResult featureResult = (FeatureResult) wfsResponse;
if ( !( featureResult.getResponse() instanceof FeatureCollection ) ) {
String msg = "Unexpected reponse type: '" + featureResult.getResponse().getClass().getName() + " "
+ featureResult.getResponse().getClass()
+ "' in FeatureResult of WFS (must be a FeatureCollection).";
LOG.logError( msg );
throw new OGCWebServiceException( msg );
}
FeatureCollection featureCollection = (FeatureCollection) featureResult.getResponse();
try {
int numberOfMatchedRecords = featureCollection == null ? 0 : featureCollection.size();
int startPosition = 1;
long maxRecords = Integer.MAX_VALUE;
long numberOfRecordsReturned = startPosition + maxRecords < numberOfMatchedRecords ? maxRecords
: numberOfMatchedRecords
- startPosition + 1;
long nextRecord = numberOfRecordsReturned + startPosition > numberOfMatchedRecords ? 0
: numberOfRecordsReturned
+ startPosition;
HashMap<String, String> params = new HashMap<String, String>();
params.put( "REQUEST_ID", getRecordById.getId() );
if ( numberOfRecordsReturned != 0 ) {
params.put( "SEARCH_STATUS", "complete" );
} else {
params.put( "SEARCH_STATUS", "none" );
}
params.put( "TIMESTAMP", TimeTools.getISOFormattedTime() );
String s = OGCServletController.address + "?service=CSW&version=2.0.0&request=DescribeRecord";
params.put( "RECORD_SCHEMA", s );
params.put( "RECORDS_MATCHED", "" + numberOfMatchedRecords );
params.put( "RECORDS_RETURNED", "" + numberOfRecordsReturned );
params.put( "NEXT_RECORD", "" + nextRecord );
params.put( "ELEMENT_SET", "full" );
params.put( "REQUEST_NAME", "GetRecordById" );
featureCollection.setAttribute( "byID", "true" );
ByteArrayOutputStream bos = new ByteArrayOutputStream( 50000 );
GMLFeatureAdapter ada = new GMLFeatureAdapter( true );
ada.export( featureCollection, bos );
if ( LOG.getLevel() == ILogger.LOG_DEBUG ) {
LOG.logDebug( new String( bos.toByteArray() ) );
}
ByteArrayInputStream bis = new ByteArrayInputStream( bos.toByteArray() );
if ( "2.0.2".equals( getRecordById.getVersion() ) && getRecordById.getOutputSchema() != null ) {
outputSchema = normalizeOutputSchema( getRecordById.getOutputSchema() );
}
XSLTDocument xslSheet = OUT_XSL.get( outputSchema.toUpperCase() );
XMLFragment resultDocument = xslSheet.transform( bis, null, null, params );
GetRecordByIdResultDocument cswResponseDocument = new GetRecordByIdResultDocument();
cswResponseDocument.setRootElement( resultDocument.getRootElement() );
cswResponse = cswResponseDocument.parseGetRecordByIdResponse( getRecordById );
} catch ( Exception e ) {
e.printStackTrace();
String msg = "Can't transform WFS response (FeatureCollection) " + "to CSW response: " + e.getMessage();
LOG.logError( msg, e );
throw new OGCWebServiceException( msg );
}
return cswResponse;
}
| GetRecordByIdResult function( GetRecordById getRecordById ) throws OGCWebServiceException { GetFeature getFeature = null; XMLFragment getFeatureDocument = null; Object wfsResponse = null; GetRecordByIdResult cswResponse = null; String outputSchema = cswConfiguration.getDeegreeParams().getDefaultOutputSchema(); XMLFragment getRecordsDocument = new XMLFragment( XMLFactory.export( getRecordById ).getRootElement() ); try { XSLTDocument xslSheet = IN_XSL.get( outputSchema.toUpperCase() ); getFeatureDocument = xslSheet.transform( getRecordsDocument ); LOG.logDebug( STR + getFeatureDocument ); } catch ( TransformerException e ) { String msg = STR + e.getMessage(); LOG.logError( msg, e ); throw new OGCWebServiceException( msg ); } if ( LOG.getLevel() == ILogger.LOG_DEBUG ) { StringWriter sw = new StringWriter( 5000 ); getFeatureDocument.write( sw ); LOG.logDebug( sw.getBuffer().toString() ); } try { getFeature = GetFeature.create( getRecordById.getId(), getFeatureDocument.getRootElement() ); } catch ( Exception e ) { String msg = STR + e.getMessage(); LOG.logError( msg, e ); throw new OGCWebServiceException( msg ); } try { wfsResponse = wfsResource.doService( getFeature ); } catch ( OGCWebServiceException e ) { String msg = STR + e.getMessage(); LOG.logError( msg, e ); throw new OGCWebServiceException( msg ); } if ( !( wfsResponse instanceof FeatureResult ) ) { String msg = STR + wfsResponse.getClass().getName() + STR + STR; LOG.logError( msg ); throw new OGCWebServiceException( msg ); } FeatureResult featureResult = (FeatureResult) wfsResponse; if ( !( featureResult.getResponse() instanceof FeatureCollection ) ) { String msg = STR + featureResult.getResponse().getClass().getName() + " " + featureResult.getResponse().getClass() + STR; LOG.logError( msg ); throw new OGCWebServiceException( msg ); } FeatureCollection featureCollection = (FeatureCollection) featureResult.getResponse(); try { int numberOfMatchedRecords = featureCollection == null ? 0 : featureCollection.size(); int startPosition = 1; long maxRecords = Integer.MAX_VALUE; long numberOfRecordsReturned = startPosition + maxRecords < numberOfMatchedRecords ? maxRecords : numberOfMatchedRecords - startPosition + 1; long nextRecord = numberOfRecordsReturned + startPosition > numberOfMatchedRecords ? 0 : numberOfRecordsReturned + startPosition; HashMap<String, String> params = new HashMap<String, String>(); params.put( STR, getRecordById.getId() ); if ( numberOfRecordsReturned != 0 ) { params.put( STR, STR ); } else { params.put( STR, "none" ); } params.put( STR, TimeTools.getISOFormattedTime() ); String s = OGCServletController.address + STR; params.put( STR, s ); params.put( STR, STRRECORDS_RETURNEDSTRSTRNEXT_RECORDSTRSTRELEMENT_SETSTRfullSTRREQUEST_NAMESTRGetRecordByIdSTRbyIDSTRtrueSTR2.0.2STRCan't transform WFS response (FeatureCollection) STRto CSW response: " + e.getMessage(); LOG.logError( msg, e ); throw new OGCWebServiceException( msg ); } return cswResponse; } | /**
* Performs a <code>GetRecordById</code> request.
* <p>
* This involves the following steps:
* <ul>
* <li><code>GetRecordById</code>-><code>GetRecordByIdDocument</code></li>
* <li><code>GetRecordByIdDocument</code>-><code>GetFeatureDocument</code> using XSLT</li>
* <li><code>GetFeatureDocument</code>-><code>GetFeature</code></li>
* <li><code>GetFeature</code> request is performed against the underlying WFS</li>
* <li>WFS answers with a <code>FeatureResult</code> object (which contains a <code>FeatureCollection</code>)</li>
* <li><code>FeatureCollection</code>-> GMLFeatureCollectionDocument (as a String)</li>
* <li>GMLFeatureCollectionDocument</code>-><code>GetRecordsResultDocument</code> using XSLT</li>
* <li><code>GetRecordsResultDocument</code>-><code>GetRecordsResult</code></li>
* </ul>
* </p>
*
* @param getRecordById
* @return The GetRecordByIdResult created from teh given GetRecordById
* @throws OGCWebServiceException
*/ | Performs a <code>GetRecordById</code> request. This involves the following steps: <code>GetRecordById</code>-><code>GetRecordByIdDocument</code> <code>GetRecordByIdDocument</code>-><code>GetFeatureDocument</code> using XSLT <code>GetFeatureDocument</code>-><code>GetFeature</code> <code>GetFeature</code> request is performed against the underlying WFS WFS answers with a <code>FeatureResult</code> object (which contains a <code>FeatureCollection</code>) <code>FeatureCollection</code>-> GMLFeatureCollectionDocument (as a String) GMLFeatureCollectionDocument</code>-><code>GetRecordsResultDocument</code> using XSLT <code>GetRecordsResultDocument</code>-><code>GetRecordsResult</code> | query | {
"repo_name": "lat-lon/deegree2-base",
"path": "deegree2-core/src/main/java/org/deegree/ogcwebservices/csw/discovery/Discovery.java",
"license": "lgpl-2.1",
"size": 49430
} | [
"java.io.StringWriter",
"java.util.HashMap",
"javax.xml.transform.TransformerException",
"org.deegree.enterprise.servlet.OGCServletController",
"org.deegree.framework.log.ILogger",
"org.deegree.framework.util.TimeTools",
"org.deegree.framework.xml.XMLFragment",
"org.deegree.framework.xml.XSLTDocument",
"org.deegree.model.feature.FeatureCollection",
"org.deegree.ogcwebservices.OGCWebServiceException",
"org.deegree.ogcwebservices.wfs.operation.FeatureResult",
"org.deegree.ogcwebservices.wfs.operation.GetFeature"
] | import java.io.StringWriter; import java.util.HashMap; import javax.xml.transform.TransformerException; import org.deegree.enterprise.servlet.OGCServletController; import org.deegree.framework.log.ILogger; import org.deegree.framework.util.TimeTools; import org.deegree.framework.xml.XMLFragment; import org.deegree.framework.xml.XSLTDocument; import org.deegree.model.feature.FeatureCollection; import org.deegree.ogcwebservices.OGCWebServiceException; import org.deegree.ogcwebservices.wfs.operation.FeatureResult; import org.deegree.ogcwebservices.wfs.operation.GetFeature; | import java.io.*; import java.util.*; import javax.xml.transform.*; import org.deegree.enterprise.servlet.*; import org.deegree.framework.log.*; import org.deegree.framework.util.*; import org.deegree.framework.xml.*; import org.deegree.model.feature.*; import org.deegree.ogcwebservices.*; import org.deegree.ogcwebservices.wfs.operation.*; | [
"java.io",
"java.util",
"javax.xml",
"org.deegree.enterprise",
"org.deegree.framework",
"org.deegree.model",
"org.deegree.ogcwebservices"
] | java.io; java.util; javax.xml; org.deegree.enterprise; org.deegree.framework; org.deegree.model; org.deegree.ogcwebservices; | 197,475 |
MetadataViewer getMetadataViewer() { return metadataViewer; } | MetadataViewer getMetadataViewer() { return metadataViewer; } | /**
* Returns the {@link Browser}.
*
* @return See above.
*/ | Returns the <code>Browser</code> | getBrowser | {
"repo_name": "MontpellierRessourcesImagerie/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/agents/imviewer/view/ImViewerModel.java",
"license": "gpl-2.0",
"size": 85080
} | [
"org.openmicroscopy.shoola.agents.metadata.view.MetadataViewer"
] | import org.openmicroscopy.shoola.agents.metadata.view.MetadataViewer; | import org.openmicroscopy.shoola.agents.metadata.view.*; | [
"org.openmicroscopy.shoola"
] | org.openmicroscopy.shoola; | 2,881,855 |
private static void copyHFileHalf(
Configuration conf, Path inFile, Path outFile, Reference reference,
HColumnDescriptor familyDescriptor)
throws IOException {
FileSystem fs = inFile.getFileSystem(conf);
CacheConfig cacheConf = new CacheConfig(conf);
HalfStoreFileReader halfReader = null;
StoreFile.Writer halfWriter = null;
try {
halfReader = new HalfStoreFileReader(fs, inFile, cacheConf, reference, conf);
Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();
int blocksize = familyDescriptor.getBlocksize();
Algorithm compression = familyDescriptor.getCompression();
BloomType bloomFilterType = familyDescriptor.getBloomFilterType();
HFileContext hFileContext = new HFileContextBuilder()
.withCompression(compression)
.withChecksumType(HStore.getChecksumType(conf))
.withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))
.withBlockSize(blocksize)
.withDataBlockEncoding(familyDescriptor.getDataBlockEncoding())
.build();
halfWriter = new StoreFile.WriterBuilder(conf, cacheConf,
fs)
.withFilePath(outFile)
.withBloomType(bloomFilterType)
.withFileContext(hFileContext)
.build();
HFileScanner scanner = halfReader.getScanner(false, false, false);
scanner.seekTo();
do {
KeyValue kv = scanner.getKeyValue();
halfWriter.append(kv);
} while (scanner.next());
for (Map.Entry<byte[],byte[]> entry : fileInfo.entrySet()) {
if (shouldCopyHFileMetaKey(entry.getKey())) {
halfWriter.appendFileInfo(entry.getKey(), entry.getValue());
}
}
} finally {
if (halfWriter != null) halfWriter.close();
if (halfReader != null) halfReader.close(cacheConf.shouldEvictOnClose());
}
} | static void function( Configuration conf, Path inFile, Path outFile, Reference reference, HColumnDescriptor familyDescriptor) throws IOException { FileSystem fs = inFile.getFileSystem(conf); CacheConfig cacheConf = new CacheConfig(conf); HalfStoreFileReader halfReader = null; StoreFile.Writer halfWriter = null; try { halfReader = new HalfStoreFileReader(fs, inFile, cacheConf, reference, conf); Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo(); int blocksize = familyDescriptor.getBlocksize(); Algorithm compression = familyDescriptor.getCompression(); BloomType bloomFilterType = familyDescriptor.getBloomFilterType(); HFileContext hFileContext = new HFileContextBuilder() .withCompression(compression) .withChecksumType(HStore.getChecksumType(conf)) .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf)) .withBlockSize(blocksize) .withDataBlockEncoding(familyDescriptor.getDataBlockEncoding()) .build(); halfWriter = new StoreFile.WriterBuilder(conf, cacheConf, fs) .withFilePath(outFile) .withBloomType(bloomFilterType) .withFileContext(hFileContext) .build(); HFileScanner scanner = halfReader.getScanner(false, false, false); scanner.seekTo(); do { KeyValue kv = scanner.getKeyValue(); halfWriter.append(kv); } while (scanner.next()); for (Map.Entry<byte[],byte[]> entry : fileInfo.entrySet()) { if (shouldCopyHFileMetaKey(entry.getKey())) { halfWriter.appendFileInfo(entry.getKey(), entry.getValue()); } } } finally { if (halfWriter != null) halfWriter.close(); if (halfReader != null) halfReader.close(cacheConf.shouldEvictOnClose()); } } | /**
* Copy half of an HFile into a new HFile.
*/ | Copy half of an HFile into a new HFile | copyHFileHalf | {
"repo_name": "tenggyut/HIndex",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java",
"license": "apache-2.0",
"size": 35737
} | [
"java.io.IOException",
"java.util.Map",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.hbase.HColumnDescriptor",
"org.apache.hadoop.hbase.KeyValue",
"org.apache.hadoop.hbase.io.HalfStoreFileReader",
"org.apache.hadoop.hbase.io.Reference",
"org.apache.hadoop.hbase.io.compress.Compression",
"org.apache.hadoop.hbase.io.hfile.CacheConfig",
"org.apache.hadoop.hbase.io.hfile.HFileContext",
"org.apache.hadoop.hbase.io.hfile.HFileContextBuilder",
"org.apache.hadoop.hbase.io.hfile.HFileScanner",
"org.apache.hadoop.hbase.regionserver.BloomType",
"org.apache.hadoop.hbase.regionserver.HStore",
"org.apache.hadoop.hbase.regionserver.StoreFile"
] | import java.io.IOException; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.HalfStoreFileReader; import org.apache.hadoop.hbase.io.Reference; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.StoreFile; | import java.io.*; import java.util.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.io.*; import org.apache.hadoop.hbase.io.compress.*; import org.apache.hadoop.hbase.io.hfile.*; import org.apache.hadoop.hbase.regionserver.*; | [
"java.io",
"java.util",
"org.apache.hadoop"
] | java.io; java.util; org.apache.hadoop; | 1,815,302 |
protected void parseMessageStartLine(String startLine) throws IOException
{
}
| void function(String startLine) throws IOException { } | /**
* Called when reading a HTTP message to parse the start line (request or status line). This method is provided
* for subclasses that wish to extract information from the start line.<br>
* <br>
* This implementation is empty.
*
* @param startLine the HTTP message start line.
*/ | Called when reading a HTTP message to parse the start line (request or status line). This method is provided for subclasses that wish to extract information from the start line. This implementation is empty | parseMessageStartLine | {
"repo_name": "tolo/JServer",
"path": "src/java/com/teletalk/jserver/tcp/http/HttpMessage.java",
"license": "apache-2.0",
"size": 17857
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,291,406 |
public void analyze() throws IOException, LogAbortException {
TreeMap<String, ClassReader> zipClasses = new TreeMap<>();
Map<String, InputStream> filesFound = new TreeMap<>();
parseZip(mOsSourceJar, zipClasses, filesFound);
mLog.info("Found %d classes in input JAR%s.", zipClasses.size(),
mOsSourceJar.size() > 1 ? "s" : "");
Map<String, ClassReader> found = findIncludes(zipClasses);
Map<String, ClassReader> deps = findDeps(zipClasses, found);
if (mGen != null) {
mGen.setKeep(found);
mGen.setDeps(deps);
mGen.setCopyFiles(filesFound);
mGen.setRewriteMethodCallClasses(mReplaceMethodCallClasses);
}
} | void function() throws IOException, LogAbortException { TreeMap<String, ClassReader> zipClasses = new TreeMap<>(); Map<String, InputStream> filesFound = new TreeMap<>(); parseZip(mOsSourceJar, zipClasses, filesFound); mLog.info(STR, zipClasses.size(), mOsSourceJar.size() > 1 ? "s" : ""); Map<String, ClassReader> found = findIncludes(zipClasses); Map<String, ClassReader> deps = findDeps(zipClasses, found); if (mGen != null) { mGen.setKeep(found); mGen.setDeps(deps); mGen.setCopyFiles(filesFound); mGen.setRewriteMethodCallClasses(mReplaceMethodCallClasses); } } | /**
* Starts the analysis using parameters from the constructor.
* Fills the generator with classes & dependencies found.
*/ | Starts the analysis using parameters from the constructor. Fills the generator with classes & dependencies found | analyze | {
"repo_name": "xorware/android_frameworks_base",
"path": "tools/layoutlib/create/src/com/android/tools/layoutlib/create/AsmAnalyzer.java",
"license": "apache-2.0",
"size": 32821
} | [
"java.io.IOException",
"java.io.InputStream",
"java.util.Map",
"java.util.TreeMap",
"org.objectweb.asm.ClassReader"
] | import java.io.IOException; import java.io.InputStream; import java.util.Map; import java.util.TreeMap; import org.objectweb.asm.ClassReader; | import java.io.*; import java.util.*; import org.objectweb.asm.*; | [
"java.io",
"java.util",
"org.objectweb.asm"
] | java.io; java.util; org.objectweb.asm; | 2,132,373 |
Collection<AuxFile> list(SPProgramID programId, Collection<String> fileNames)
throws AuxFileException; | Collection<AuxFile> list(SPProgramID programId, Collection<String> fileNames) throws AuxFileException; | /**
* Gets the auxiliary file information associated with the given file,
* if it exists. Returns <code>null</code> if the file doesn't exist.
*
* @param programId id of the program whose file should be listed
*
* @param fileNames names of the files whose information should be retrieved
*
* @return information about the indicated files; an empty collection if
* there are no such files
*
* @throws AuxFileException if there are any problems with the request
*/ | Gets the auxiliary file information associated with the given file, if it exists. Returns <code>null</code> if the file doesn't exist | list | {
"repo_name": "arturog8m/ocs",
"path": "bundle/edu.gemini.auxfile.workflow/src/main/java/edu/gemini/auxfile/api/AuxFileSystem.java",
"license": "bsd-3-clause",
"size": 6079
} | [
"edu.gemini.spModel.core.SPProgramID",
"java.util.Collection"
] | import edu.gemini.spModel.core.SPProgramID; import java.util.Collection; | import edu.gemini.*; import java.util.*; | [
"edu.gemini",
"java.util"
] | edu.gemini; java.util; | 2,904,221 |
@Nonnull
public ManagedMobileLobAppCollectionRequest expand(@Nonnull final String value) {
addExpandOption(value);
return this;
} | ManagedMobileLobAppCollectionRequest function(@Nonnull final String value) { addExpandOption(value); return this; } | /**
* Sets the expand clause for the request
*
* @param value the expand clause
* @return the updated request
*/ | Sets the expand clause for the request | expand | {
"repo_name": "microsoftgraph/msgraph-sdk-java",
"path": "src/main/java/com/microsoft/graph/requests/ManagedMobileLobAppCollectionRequest.java",
"license": "mit",
"size": 6092
} | [
"com.microsoft.graph.requests.ManagedMobileLobAppCollectionRequest",
"javax.annotation.Nonnull"
] | import com.microsoft.graph.requests.ManagedMobileLobAppCollectionRequest; import javax.annotation.Nonnull; | import com.microsoft.graph.requests.*; import javax.annotation.*; | [
"com.microsoft.graph",
"javax.annotation"
] | com.microsoft.graph; javax.annotation; | 632,956 |
public int read(final byte[] b, final int off, final int len)
throws IOException {
return endpoint.readStream(streamID, b, off, len);
} | int function(final byte[] b, final int off, final int len) throws IOException { return endpoint.readStream(streamID, b, off, len); } | /**
* Read from the stream.
*
* @see java.io.InputStream#read(byte[], int, int)
*/ | Read from the stream | read | {
"repo_name": "nmizoguchi/pfg-r-osgi",
"path": "ch.ethz.iks.r_osgi.remote/src/main/java/ch/ethz/iks/r_osgi/streams/InputStreamProxy.java",
"license": "mit",
"size": 2787
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,253,474 |
@SuppressWarnings("deprecation")
public void failNotEquals(String message, Object expected, Object actual) {
try {
Assert.failNotEquals(message, expected, actual);
} catch (Throwable e) {
ea.addError(e);
}
}
| @SuppressWarnings(STR) void function(String message, Object expected, Object actual) { try { Assert.failNotEquals(message, expected, actual); } catch (Throwable e) { ea.addError(e); } } | /**
* Calls the Assert classes failNotEquals method which creates an exception
* using the given message and objects
*
* @param message
* message to be thrown if they are the same
* @param expected
* expected object
* @param actual
* actual object that is being checked
*/ | Calls the Assert classes failNotEquals method which creates an exception using the given message and objects | failNotEquals | {
"repo_name": "cshao239/JTAF-XCore",
"path": "src/main/java/org/finra/jtaf/core/asserts/IgnoreErrorsAssert.java",
"license": "apache-2.0",
"size": 23703
} | [
"junit.framework.Assert"
] | import junit.framework.Assert; | import junit.framework.*; | [
"junit.framework"
] | junit.framework; | 1,059,869 |
public void setTrackBank (final ITrackBank trackBank)
{
this.trackBank = trackBank;
} | void function (final ITrackBank trackBank) { this.trackBank = trackBank; } | /**
* Set the track bank for which the auto coloring should be applied.
*
* @param trackBank The track bank
*/ | Set the track bank for which the auto coloring should be applied | setTrackBank | {
"repo_name": "git-moss/DrivenByMoss",
"path": "src/main/java/de/mossgrabers/controller/utilities/autocolor/AutoColor.java",
"license": "lgpl-3.0",
"size": 4685
} | [
"de.mossgrabers.framework.daw.data.bank.ITrackBank"
] | import de.mossgrabers.framework.daw.data.bank.ITrackBank; | import de.mossgrabers.framework.daw.data.bank.*; | [
"de.mossgrabers.framework"
] | de.mossgrabers.framework; | 1,100,939 |
public List<EmailTemplateParametersContractProperties> parameters() {
return this.parameters;
} | List<EmailTemplateParametersContractProperties> function() { return this.parameters; } | /**
* Get the parameters property: Email Template Parameter values.
*
* @return the parameters value.
*/ | Get the parameters property: Email Template Parameter values | parameters | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/apimanagement/azure-resourcemanager-apimanagement/src/main/java/com/azure/resourcemanager/apimanagement/fluent/models/EmailTemplateContractProperties.java",
"license": "mit",
"size": 5460
} | [
"com.azure.resourcemanager.apimanagement.models.EmailTemplateParametersContractProperties",
"java.util.List"
] | import com.azure.resourcemanager.apimanagement.models.EmailTemplateParametersContractProperties; import java.util.List; | import com.azure.resourcemanager.apimanagement.models.*; import java.util.*; | [
"com.azure.resourcemanager",
"java.util"
] | com.azure.resourcemanager; java.util; | 2,679,367 |
public void setVideoDebugListener(VideoRendererEventListener listener) {
videoDebugListener = listener;
} | void function(VideoRendererEventListener listener) { videoDebugListener = listener; } | /**
* Sets a listener to receive debug events from the video renderer.
*
* @param listener The listener.
*/ | Sets a listener to receive debug events from the video renderer | setVideoDebugListener | {
"repo_name": "yangwuan55/ExoPlayer",
"path": "library/src/main/java/com/google/android/exoplayer2/SimpleExoPlayer.java",
"license": "apache-2.0",
"size": 36097
} | [
"com.google.android.exoplayer2.video.VideoRendererEventListener"
] | import com.google.android.exoplayer2.video.VideoRendererEventListener; | import com.google.android.exoplayer2.video.*; | [
"com.google.android"
] | com.google.android; | 2,702,036 |
public boolean isVSysIdValid(PropertyHandler paramHandler) throws Exception; | boolean function(PropertyHandler paramHandler) throws Exception; | /**
* Checks if the virtual system ID is valid.
*
* @param paramHandler
* the parameter handler
* @return <code>true</code> if the VSYS id is available otherwise
* <code>false</code>
* @throws Exception
*/ | Checks if the virtual system ID is valid | isVSysIdValid | {
"repo_name": "opetrovski/development",
"path": "oscm-app-iaas/javasrc/org/oscm/app/iaas/intf/VServerCommunication.java",
"license": "apache-2.0",
"size": 4956
} | [
"org.oscm.app.iaas.PropertyHandler"
] | import org.oscm.app.iaas.PropertyHandler; | import org.oscm.app.iaas.*; | [
"org.oscm.app"
] | org.oscm.app; | 843,554 |
@Test
public void testAllRegionsFlushed() {
LOG.debug("testAllRegionsFlushed");
Map<byte[], Long> oldestFlushingSeqNo = new HashMap<byte[], Long>();
Map<byte[], Long> oldestUnFlushedSeqNo = new HashMap<byte[], Long>();
Map<byte[], Long> seqNo = new HashMap<byte[], Long>();
// create a table
TableName t1 = TableName.valueOf("t1");
// create a region
HRegionInfo hri1 = new HRegionInfo(t1, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW);
// variables to mock region sequenceIds
final AtomicLong sequenceId1 = new AtomicLong(1);
// test empty map
assertTrue(FSHLog.areAllRegionsFlushed(seqNo, oldestFlushingSeqNo, oldestUnFlushedSeqNo));
// add entries in the region
seqNo.put(hri1.getEncodedNameAsBytes(), sequenceId1.incrementAndGet());
oldestUnFlushedSeqNo.put(hri1.getEncodedNameAsBytes(), sequenceId1.get());
// should say region1 is not flushed.
assertFalse(FSHLog.areAllRegionsFlushed(seqNo, oldestFlushingSeqNo, oldestUnFlushedSeqNo));
// test with entries in oldestFlushing map.
oldestUnFlushedSeqNo.clear();
oldestFlushingSeqNo.put(hri1.getEncodedNameAsBytes(), sequenceId1.get());
assertFalse(FSHLog.areAllRegionsFlushed(seqNo, oldestFlushingSeqNo, oldestUnFlushedSeqNo));
// simulate region flush, i.e., clear oldestFlushing and oldestUnflushed maps
oldestFlushingSeqNo.clear();
oldestUnFlushedSeqNo.clear();
assertTrue(FSHLog.areAllRegionsFlushed(seqNo, oldestFlushingSeqNo, oldestUnFlushedSeqNo));
// insert some large values for region1
oldestUnFlushedSeqNo.put(hri1.getEncodedNameAsBytes(), 1000l);
seqNo.put(hri1.getEncodedNameAsBytes(), 1500l);
assertFalse(FSHLog.areAllRegionsFlushed(seqNo, oldestFlushingSeqNo, oldestUnFlushedSeqNo));
// tests when oldestUnFlushed/oldestFlushing contains larger value.
// It means region is flushed.
oldestFlushingSeqNo.put(hri1.getEncodedNameAsBytes(), 1200l);
oldestUnFlushedSeqNo.clear();
seqNo.put(hri1.getEncodedNameAsBytes(), 1199l);
assertTrue(FSHLog.areAllRegionsFlushed(seqNo, oldestFlushingSeqNo, oldestUnFlushedSeqNo));
} | void function() { LOG.debug(STR); Map<byte[], Long> oldestFlushingSeqNo = new HashMap<byte[], Long>(); Map<byte[], Long> oldestUnFlushedSeqNo = new HashMap<byte[], Long>(); Map<byte[], Long> seqNo = new HashMap<byte[], Long>(); TableName t1 = TableName.valueOf("t1"); HRegionInfo hri1 = new HRegionInfo(t1, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW); final AtomicLong sequenceId1 = new AtomicLong(1); assertTrue(FSHLog.areAllRegionsFlushed(seqNo, oldestFlushingSeqNo, oldestUnFlushedSeqNo)); seqNo.put(hri1.getEncodedNameAsBytes(), sequenceId1.incrementAndGet()); oldestUnFlushedSeqNo.put(hri1.getEncodedNameAsBytes(), sequenceId1.get()); assertFalse(FSHLog.areAllRegionsFlushed(seqNo, oldestFlushingSeqNo, oldestUnFlushedSeqNo)); oldestUnFlushedSeqNo.clear(); oldestFlushingSeqNo.put(hri1.getEncodedNameAsBytes(), sequenceId1.get()); assertFalse(FSHLog.areAllRegionsFlushed(seqNo, oldestFlushingSeqNo, oldestUnFlushedSeqNo)); oldestFlushingSeqNo.clear(); oldestUnFlushedSeqNo.clear(); assertTrue(FSHLog.areAllRegionsFlushed(seqNo, oldestFlushingSeqNo, oldestUnFlushedSeqNo)); oldestUnFlushedSeqNo.put(hri1.getEncodedNameAsBytes(), 1000l); seqNo.put(hri1.getEncodedNameAsBytes(), 1500l); assertFalse(FSHLog.areAllRegionsFlushed(seqNo, oldestFlushingSeqNo, oldestUnFlushedSeqNo)); oldestFlushingSeqNo.put(hri1.getEncodedNameAsBytes(), 1200l); oldestUnFlushedSeqNo.clear(); seqNo.put(hri1.getEncodedNameAsBytes(), 1199l); assertTrue(FSHLog.areAllRegionsFlushed(seqNo, oldestFlushingSeqNo, oldestUnFlushedSeqNo)); } | /**
* Simulates WAL append ops for a region and tests
* {@link FSHLog#areAllRegionsFlushed(Map, Map, Map)} API.
* It compares the region sequenceIds with oldestFlushing and oldestUnFlushed entries.
* If a region's entries are larger than min of (oldestFlushing, oldestUnFlushed), then the
* region should be flushed before archiving this WAL.
*/ | Simulates WAL append ops for a region and tests <code>FSHLog#areAllRegionsFlushed(Map, Map, Map)</code> API. It compares the region sequenceIds with oldestFlushing and oldestUnFlushed entries. If a region's entries are larger than min of (oldestFlushing, oldestUnFlushed), then the region should be flushed before archiving this WAL | testAllRegionsFlushed | {
"repo_name": "amyvmiwei/hbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestFSHLog.java",
"license": "apache-2.0",
"size": 19763
} | [
"java.util.HashMap",
"java.util.Map",
"java.util.concurrent.atomic.AtomicLong",
"org.apache.hadoop.hbase.HConstants",
"org.apache.hadoop.hbase.HRegionInfo",
"org.apache.hadoop.hbase.TableName",
"org.junit.Assert"
] | import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.TableName; import org.junit.Assert; | import java.util.*; import java.util.concurrent.atomic.*; import org.apache.hadoop.hbase.*; import org.junit.*; | [
"java.util",
"org.apache.hadoop",
"org.junit"
] | java.util; org.apache.hadoop; org.junit; | 374,162 |
void removeExchange(Exchange exchange) throws AMQStoreException; | void removeExchange(Exchange exchange) throws AMQStoreException; | /**
* Removes the specified persistent exchange.
*
* @param exchange The exchange to remove.
*
* @throws AMQStoreException If the operation fails for any reason.
*/ | Removes the specified persistent exchange | removeExchange | {
"repo_name": "ChamNDeSilva/andes",
"path": "modules/andes-core/broker/src/main/java/org/wso2/andes/server/store/DurableConfigurationStore.java",
"license": "apache-2.0",
"size": 4715
} | [
"org.wso2.andes.AMQStoreException",
"org.wso2.andes.server.exchange.Exchange"
] | import org.wso2.andes.AMQStoreException; import org.wso2.andes.server.exchange.Exchange; | import org.wso2.andes.*; import org.wso2.andes.server.exchange.*; | [
"org.wso2.andes"
] | org.wso2.andes; | 1,164,323 |
public boolean read()
throws IOException {
return (inputBuffer.realReadBytes(null, 0, 0) > 0);
}
// -------------------------------------------------------- Request Methods
protected Connector connector;
| boolean function() throws IOException { return (inputBuffer.realReadBytes(null, 0, 0) > 0); } protected Connector connector; | /**
* Clear cached encoders (to save memory for Comet requests).
*/ | Clear cached encoders (to save memory for Comet requests) | read | {
"repo_name": "wenzhucjy/tomcat_source",
"path": "tomcat-8.0.9-sourcecode/java/org/apache/catalina/connector/Request.java",
"license": "apache-2.0",
"size": 101815
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,193,498 |
private PythonSourceFolder tryWrapSourceFolder(Object parent, IContainer container, Set<String> sourcePathSet) {
IPath fullPath = container.getFullPath();
if (sourcePathSet.contains(fullPath.toString())) {
PythonSourceFolder sourceFolder;
if (container instanceof IFolder) {
sourceFolder = new PythonSourceFolder(parent, (IFolder) container);
} else if (container instanceof IProject) {
sourceFolder = new PythonProjectSourceFolder(parent, (IProject) container);
} else {
return null; //some other container we don't know how to treat!
}
//System.out.println("Created source folder: "+ret[i]+" - "+folder.getProject()+" - "+folder.getProjectRelativePath());
Set<PythonSourceFolder> sourceFolders = getProjectSourceFolders(container.getProject());
sourceFolders.add((PythonSourceFolder) sourceFolder);
return sourceFolder;
}
return null;
} | PythonSourceFolder function(Object parent, IContainer container, Set<String> sourcePathSet) { IPath fullPath = container.getFullPath(); if (sourcePathSet.contains(fullPath.toString())) { PythonSourceFolder sourceFolder; if (container instanceof IFolder) { sourceFolder = new PythonSourceFolder(parent, (IFolder) container); } else if (container instanceof IProject) { sourceFolder = new PythonProjectSourceFolder(parent, (IProject) container); } else { return null; } Set<PythonSourceFolder> sourceFolders = getProjectSourceFolders(container.getProject()); sourceFolders.add((PythonSourceFolder) sourceFolder); return sourceFolder; } return null; } | /**
* This method checks if the given folder can be wrapped as a source-folder, and if that's possible, creates and returns
* it
* @return a created source folder or null if it couldn't be created.
*/ | This method checks if the given folder can be wrapped as a source-folder, and if that's possible, creates and returns it | tryWrapSourceFolder | {
"repo_name": "aptana/Pydev",
"path": "bundles/org.python.pydev/src_navigator/org/python/pydev/navigator/PythonModelProvider.java",
"license": "epl-1.0",
"size": 28888
} | [
"java.util.Set",
"org.eclipse.core.resources.IContainer",
"org.eclipse.core.resources.IFolder",
"org.eclipse.core.resources.IProject",
"org.eclipse.core.runtime.IPath",
"org.python.pydev.navigator.elements.PythonProjectSourceFolder",
"org.python.pydev.navigator.elements.PythonSourceFolder"
] | import java.util.Set; import org.eclipse.core.resources.IContainer; import org.eclipse.core.resources.IFolder; import org.eclipse.core.resources.IProject; import org.eclipse.core.runtime.IPath; import org.python.pydev.navigator.elements.PythonProjectSourceFolder; import org.python.pydev.navigator.elements.PythonSourceFolder; | import java.util.*; import org.eclipse.core.resources.*; import org.eclipse.core.runtime.*; import org.python.pydev.navigator.elements.*; | [
"java.util",
"org.eclipse.core",
"org.python.pydev"
] | java.util; org.eclipse.core; org.python.pydev; | 1,853,077 |
//check there are no command line options that are empty
while(pCommandLine.contains("")) {
pCommandLine.remove("");
}
ProcessBuilder pb = new ProcessBuilder(pCommandLine);
pb.redirectErrorStream(gRedirectStderr);
//set the working directory to our temporary directory
//pb.directory(new File(gTempDir));
//log outputs to file(s) - fixes hangs on windows
//and logs *all* output (unlike when using IOStreamThread)
File stdoutFile = File.createTempFile("stdout-log-", ".log");
stdoutFile.deleteOnExit();
File stderrFile = File.createTempFile("stderr-log-", ".log");
stderrFile.deleteOnExit();
pb.redirectOutput(stdoutFile);
if(!gRedirectStderr) {
pb.redirectError(stderrFile);
}
//start the executable
Process proc = pb.start();
try {
//wait for process to end before continuing
proc.waitFor();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//create a log of the console output
gStdout = new BufferedReader(new FileReader(stdoutFile));
gStderr = new BufferedReader(new FileReader(stderrFile));
return proc.exitValue();
}
| while(pCommandLine.contains(STRSTRstdout-log-STR.logSTRstderr-log-STR.log"); stderrFile.deleteOnExit(); pb.redirectOutput(stdoutFile); if(!gRedirectStderr) { pb.redirectError(stderrFile); } Process proc = pb.start(); try { proc.waitFor(); } catch (InterruptedException e) { e.printStackTrace(); } gStdout = new BufferedReader(new FileReader(stdoutFile)); gStderr = new BufferedReader(new FileReader(stderrFile)); return proc.exitValue(); } | /**
* Executes a given command line. Note stdout and stderr will be populated by this method.
* @param pCommandLine command line to run
* @return exit code from execution of the command line
* @throws IOException error
*/ | Executes a given command line. Note stdout and stderr will be populated by this method | runCommand | {
"repo_name": "bl-dpt/dissimilar",
"path": "src/main/java/uk/bl/dpt/openjpeg/ToolRunner.java",
"license": "apache-2.0",
"size": 3231
} | [
"java.io.BufferedReader",
"java.io.FileReader"
] | import java.io.BufferedReader; import java.io.FileReader; | import java.io.*; | [
"java.io"
] | java.io; | 2,741,617 |
public List<DocumentError> getErrors() {
return this.errors;
} | List<DocumentError> function() { return this.errors; } | /**
* Get the errors property: Errors by document id.
*
* @return the errors value.
*/ | Get the errors property: Errors by document id | getErrors | {
"repo_name": "navalev/azure-sdk-for-java",
"path": "sdk/textanalytics/azure-ai-textanalytics/src/main/java/com/azure/ai/textanalytics/implementation/models/KeyPhraseResult.java",
"license": "mit",
"size": 3166
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,616,413 |
private void initComponents()
{
lblValue = new Label();
lblValue.setValue(Util.cleanAmp(Msg.translate(Env.getCtx(), "Value")));
lblName = new Label();
lblName.setValue(Util.cleanAmp(Msg.translate(Env.getCtx(), "Name")));
lblUPC = new Label();
lblUPC.setValue(Msg.translate(Env.getCtx(), "UPC"));
lblSKU = new Label();
lblSKU.setValue(Msg.translate(Env.getCtx(), "SKU"));
lblPriceList = new Label();
lblPriceList.setValue(Msg.getMsg(Env.getCtx(), "PriceListVersion"));
// Elaine 2008/11/21
lblProductCategory = new Label();
lblProductCategory.setValue(Msg.translate(Env.getCtx(), "M_Product_Category_ID"));
//
lblAS = new Label();
lblAS.setValue(Msg.translate(Env.getCtx(), "M_AttributeSet_ID"));
lblWarehouse = new Label();
lblWarehouse.setValue(Util.cleanAmp(Msg.getMsg(Env.getCtx(), "Warehouse")));
lblVendor = new Label();
lblVendor.setValue(Msg.translate(Env.getCtx(), "Vendor"));
m_InfoPAttributeButton.setImage("/images/PAttribute16.png");
m_InfoPAttributeButton.setTooltiptext(Msg.getMsg(Env.getCtx(), "PAttribute"));
m_InfoPAttributeButton.addEventListener(Events.ON_CLICK,this);
fieldValue = new Textbox();
fieldValue.setMaxlength(40);
fieldName = new Textbox();
fieldName.setMaxlength(40);
fieldUPC = new Textbox();
fieldUPC.setMaxlength(40);
fieldSKU = new Textbox();
fieldSKU.setMaxlength(40);
pickPriceList = new Listbox();
pickPriceList.setRows(0);
pickPriceList.setMultiple(false);
pickPriceList.setMold("select");
pickPriceList.setWidth("150px");
pickPriceList.addEventListener(Events.ON_SELECT, this);
// Elaine 2008/11/21
pickProductCategory = new Listbox();
pickProductCategory.setRows(0);
pickProductCategory.setMultiple(false);
pickProductCategory.setMold("select");
pickProductCategory.setWidth("150px");
pickProductCategory.addEventListener(Events.ON_SELECT, this);
//
pickAS = new Listbox();
pickAS.setRows(0);
pickAS.setMultiple(false);
pickAS.setMold("select");
pickAS.setWidth("150px");
pickAS.addEventListener(Events.ON_SELECT, this);
pickWarehouse = new Listbox();
pickWarehouse.setRows(0);
pickWarehouse.setMultiple(false);
pickWarehouse.setMold("select");
pickWarehouse.setWidth("150px");
pickWarehouse.addEventListener(Events.ON_SELECT, this);
fieldVendor = new Textbox();
fieldVendor.setMaxlength(40);
contentPanel.setVflex(true);
} // initComponents | void function() { lblValue = new Label(); lblValue.setValue(Util.cleanAmp(Msg.translate(Env.getCtx(), "Value"))); lblName = new Label(); lblName.setValue(Util.cleanAmp(Msg.translate(Env.getCtx(), "Name"))); lblUPC = new Label(); lblUPC.setValue(Msg.translate(Env.getCtx(), "UPC")); lblSKU = new Label(); lblSKU.setValue(Msg.translate(Env.getCtx(), "SKU")); lblPriceList = new Label(); lblPriceList.setValue(Msg.getMsg(Env.getCtx(), STR)); lblProductCategory = new Label(); lblProductCategory.setValue(Msg.translate(Env.getCtx(), STR)); lblAS.setValue(Msg.translate(Env.getCtx(), STR)); lblWarehouse = new Label(); lblWarehouse.setValue(Util.cleanAmp(Msg.getMsg(Env.getCtx(), STR))); lblVendor = new Label(); lblVendor.setValue(Msg.translate(Env.getCtx(), STR)); m_InfoPAttributeButton.setImage(STR); m_InfoPAttributeButton.setTooltiptext(Msg.getMsg(Env.getCtx(), STR)); m_InfoPAttributeButton.addEventListener(Events.ON_CLICK,this); fieldValue = new Textbox(); fieldValue.setMaxlength(40); fieldName = new Textbox(); fieldName.setMaxlength(40); fieldUPC = new Textbox(); fieldUPC.setMaxlength(40); fieldSKU = new Textbox(); fieldSKU.setMaxlength(40); pickPriceList = new Listbox(); pickPriceList.setRows(0); pickPriceList.setMultiple(false); pickPriceList.setMold(STR); pickPriceList.setWidth("150px"); pickPriceList.addEventListener(Events.ON_SELECT, this); pickProductCategory = new Listbox(); pickProductCategory.setRows(0); pickProductCategory.setMultiple(false); pickProductCategory.setMold(STR); pickProductCategory.setWidth("150px"); pickProductCategory.addEventListener(Events.ON_SELECT, this); pickAS.setRows(0); pickAS.setMultiple(false); pickAS.setMold(STR); pickAS.setWidth("150px"); pickAS.addEventListener(Events.ON_SELECT, this); pickWarehouse = new Listbox(); pickWarehouse.setRows(0); pickWarehouse.setMultiple(false); pickWarehouse.setMold(STR); pickWarehouse.setWidth("150px"); pickWarehouse.addEventListener(Events.ON_SELECT, this); fieldVendor = new Textbox(); fieldVendor.setMaxlength(40); contentPanel.setVflex(true); } | /**
* initialize fields
*/ | initialize fields | initComponents | {
"repo_name": "geneos/adempiere",
"path": "zkwebui/WEB-INF/src/org/adempiere/webui/panel/InfoProductPanel.java",
"license": "gpl-2.0",
"size": 50856
} | [
"org.adempiere.webui.component.Label",
"org.adempiere.webui.component.Listbox",
"org.adempiere.webui.component.Textbox",
"org.compiere.util.Env",
"org.compiere.util.Msg",
"org.compiere.util.Util",
"org.zkoss.zk.ui.event.Events"
] | import org.adempiere.webui.component.Label; import org.adempiere.webui.component.Listbox; import org.adempiere.webui.component.Textbox; import org.compiere.util.Env; import org.compiere.util.Msg; import org.compiere.util.Util; import org.zkoss.zk.ui.event.Events; | import org.adempiere.webui.component.*; import org.compiere.util.*; import org.zkoss.zk.ui.event.*; | [
"org.adempiere.webui",
"org.compiere.util",
"org.zkoss.zk"
] | org.adempiere.webui; org.compiere.util; org.zkoss.zk; | 2,128,455 |
protected static void verifyInputArray(double a[], double c[]) throws
IllegalArgumentException {
if (a.length < 1 || c.length < 1) {
throw MathRuntimeException.createIllegalArgumentException(
LocalizedFormats.EMPTY_POLYNOMIALS_COEFFICIENTS_ARRAY);
}
if (a.length != c.length + 1) {
throw MathRuntimeException.createIllegalArgumentException(
LocalizedFormats.ARRAY_SIZES_SHOULD_HAVE_DIFFERENCE_1,
a.length, c.length);
}
} | static void function(double a[], double c[]) throws IllegalArgumentException { if (a.length < 1 c.length < 1) { throw MathRuntimeException.createIllegalArgumentException( LocalizedFormats.EMPTY_POLYNOMIALS_COEFFICIENTS_ARRAY); } if (a.length != c.length + 1) { throw MathRuntimeException.createIllegalArgumentException( LocalizedFormats.ARRAY_SIZES_SHOULD_HAVE_DIFFERENCE_1, a.length, c.length); } } | /**
* Verifies that the input arrays are valid.
* <p>
* The centers must be distinct for interpolation purposes, but not
* for general use. Thus it is not verified here.</p>
*
* @param a the coefficients in Newton form formula
* @param c the centers
* @throws IllegalArgumentException if not valid
* @see org.apache.commons.math.analysis.interpolation.DividedDifferenceInterpolator#computeDividedDifference(double[],
* double[])
*/ | Verifies that the input arrays are valid. The centers must be distinct for interpolation purposes, but not for general use. Thus it is not verified here | verifyInputArray | {
"repo_name": "SpoonLabs/astor",
"path": "examples/math_63/src/main/java/org/apache/commons/math/analysis/polynomials/PolynomialFunctionNewtonForm.java",
"license": "gpl-2.0",
"size": 7571
} | [
"org.apache.commons.math.MathRuntimeException",
"org.apache.commons.math.exception.util.LocalizedFormats"
] | import org.apache.commons.math.MathRuntimeException; import org.apache.commons.math.exception.util.LocalizedFormats; | import org.apache.commons.math.*; import org.apache.commons.math.exception.util.*; | [
"org.apache.commons"
] | org.apache.commons; | 1,660,164 |
void handleMouseWheel(Event event, EditPartViewer viewer);
| void handleMouseWheel(Event event, EditPartViewer viewer); | /**
* Handles mouse-wheel events. If the given event was handled in some way,
* its {@link Event#doit doit} field should be set to false so as to prevent
* further processing of that event.
*
* @param event
* The SWT event that was generated as a result of the
* mouse-wheel scrolling
* @param viewer
* The originating viewer
*/ | Handles mouse-wheel events. If the given event was handled in some way, its <code>Event#doit doit</code> field should be set to false so as to prevent further processing of that event | handleMouseWheel | {
"repo_name": "opensagres/xdocreport.eclipse",
"path": "rap/org.eclipse.gef/src/org/eclipse/gef/MouseWheelHandler.java",
"license": "lgpl-2.1",
"size": 2691
} | [
"org.eclipse.swt.widgets.Event"
] | import org.eclipse.swt.widgets.Event; | import org.eclipse.swt.widgets.*; | [
"org.eclipse.swt"
] | org.eclipse.swt; | 2,895,835 |
System.out.println("=================================================================");
System.out.println("Listing all preferred deals on account");
System.out.println("=================================================================");
// Retrieve and display preferred deals.
PreferredDeals deals = adExchangeSeller.preferreddeals().list().execute();
if (deals.getItems() != null && !deals.getItems().isEmpty()) {
for (PreferredDeal deal : deals.getItems()) {
System.out.printf("Deal id \"%s\" ", deal.getId());
if (deal.getAdvertiserName() != null) {
System.out.printf("for advertiser \"%s\" ", deal.getAdvertiserName());
}
if (deal.getBuyerNetworkName() != null) {
System.out.printf("on network \"%s\" ", deal.getBuyerNetworkName());
}
System.out.println("was found.");
}
} else {
System.out.println("No preferred deals found.");
}
System.out.println();
} | System.out.println(STR); System.out.println(STR); System.out.println(STR); PreferredDeals deals = adExchangeSeller.preferreddeals().list().execute(); if (deals.getItems() != null && !deals.getItems().isEmpty()) { for (PreferredDeal deal : deals.getItems()) { System.out.printf(STR%s\" ", deal.getId()); if (deal.getAdvertiserName() != null) { System.out.printf(STR%s\" ", deal.getAdvertiserName()); } if (deal.getBuyerNetworkName() != null) { System.out.printf(STR%s\" ", deal.getBuyerNetworkName()); } System.out.println(STR); } } else { System.out.println(STR); } System.out.println(); } | /**
* Runs this sample.
*
* @param adExchangeSeller AdExchangeSeller service object on which to run the requests.
* @throws Exception
*/ | Runs this sample | run | {
"repo_name": "googleads/googleads-adxseller-examples",
"path": "java/v1.1/src/main/java/com/google/api/services/samples/adexchangeseller/cmdline/GetAllPreferredDeals.java",
"license": "apache-2.0",
"size": 2285
} | [
"com.google.api.services.adexchangeseller.model.PreferredDeal",
"com.google.api.services.adexchangeseller.model.PreferredDeals"
] | import com.google.api.services.adexchangeseller.model.PreferredDeal; import com.google.api.services.adexchangeseller.model.PreferredDeals; | import com.google.api.services.adexchangeseller.model.*; | [
"com.google.api"
] | com.google.api; | 947,832 |
public ComponentDefinitionsFactory createFactory(
ServletContext servletContext,
Map properties)
throws DefinitionsFactoryException {
String classname = (String) properties.get(DEFINITIONS_FACTORY_CLASSNAME);
if (classname != null) {
return createFactoryFromClassname(servletContext, properties, classname);
}
return new I18nFactorySet(servletContext, properties);
} | ComponentDefinitionsFactory function( ServletContext servletContext, Map properties) throws DefinitionsFactoryException { String classname = (String) properties.get(DEFINITIONS_FACTORY_CLASSNAME); if (classname != null) { return createFactoryFromClassname(servletContext, properties, classname); } return new I18nFactorySet(servletContext, properties); } | /**
* Create Definition factory.
* Convenience method. ServletConfig is wrapped into a Map allowing retrieval
* of init parameters. Factory classname is also retrieved, as well as debug level.
* Finally, approriate createDefinitionsFactory() is called.
* @param servletContext Servlet Context passed to newly created factory.
* @param properties Map containing all properties.
*/ | Create Definition factory. Convenience method. ServletConfig is wrapped into a Map allowing retrieval of init parameters. Factory classname is also retrieved, as well as debug level. Finally, approriate createDefinitionsFactory() is called | createFactory | {
"repo_name": "davcamer/clients",
"path": "projects-for-testing/struts/tiles/src/main/java/org/apache/struts/tiles/definition/ReloadableDefinitionsFactory.java",
"license": "apache-2.0",
"size": 10358
} | [
"java.util.Map",
"javax.servlet.ServletContext",
"org.apache.struts.tiles.ComponentDefinitionsFactory",
"org.apache.struts.tiles.DefinitionsFactoryException",
"org.apache.struts.tiles.xmlDefinition.I18nFactorySet"
] | import java.util.Map; import javax.servlet.ServletContext; import org.apache.struts.tiles.ComponentDefinitionsFactory; import org.apache.struts.tiles.DefinitionsFactoryException; import org.apache.struts.tiles.xmlDefinition.I18nFactorySet; | import java.util.*; import javax.servlet.*; import org.apache.struts.tiles.*; | [
"java.util",
"javax.servlet",
"org.apache.struts"
] | java.util; javax.servlet; org.apache.struts; | 953,989 |
public void changeVendor(PaymentRequestDocument preq, Integer headerId, Integer detailId); | void function(PaymentRequestDocument preq, Integer headerId, Integer detailId); | /**
* Changes the current vendor to the vendor passed in.
*
* @param preq
* @param headerId
* @param detailId
* @param primaryHeaderId
* @param primaryDetailId
*/ | Changes the current vendor to the vendor passed in | changeVendor | {
"repo_name": "quikkian-ua-devops/will-financials",
"path": "kfs-purap/src/main/java/org/kuali/kfs/module/purap/document/service/PaymentRequestService.java",
"license": "agpl-3.0",
"size": 19090
} | [
"org.kuali.kfs.module.purap.document.PaymentRequestDocument"
] | import org.kuali.kfs.module.purap.document.PaymentRequestDocument; | import org.kuali.kfs.module.purap.document.*; | [
"org.kuali.kfs"
] | org.kuali.kfs; | 1,162,664 |
public void measureInWindow(int reactTag, Callback callback) {
mOperationsQueue.enqueueMeasureInWindow(reactTag, callback);
} | void function(int reactTag, Callback callback) { mOperationsQueue.enqueueMeasureInWindow(reactTag, callback); } | /**
* Determines the location on screen, width, and height of the given view relative to the device
* screen and returns the values via an async callback. This is the absolute position including
* things like the status bar
*/ | Determines the location on screen, width, and height of the given view relative to the device screen and returns the values via an async callback. This is the absolute position including things like the status bar | measureInWindow | {
"repo_name": "htc2u/react-native",
"path": "ReactAndroid/src/main/java/com/facebook/react/uimanager/UIImplementation.java",
"license": "bsd-3-clause",
"size": 31555
} | [
"com.facebook.react.bridge.Callback"
] | import com.facebook.react.bridge.Callback; | import com.facebook.react.bridge.*; | [
"com.facebook.react"
] | com.facebook.react; | 1,974,922 |
public String process(String document, Date documentCreationTime)
throws DocumentCreationTimeMissingException {
return process(document, documentCreationTime, getFormatter());
}
| String function(String document, Date documentCreationTime) throws DocumentCreationTimeMissingException { return process(document, documentCreationTime, getFormatter()); } | /**
* Processes document with HeidelTime
*
* @param document
* @return Annotated document
* @throws DocumentCreationTimeMissingException
* If document creation time is missing when processing a
* document of type {@link DocumentType#NEWS}. Use
* {@link #process(String, Date)} instead to provide document
* creation time!
*/ | Processes document with HeidelTime | process | {
"repo_name": "HeidelTime/heideltime",
"path": "src/de/unihd/dbs/heideltime/standalone/HeidelTimeStandalone.java",
"license": "gpl-3.0",
"size": 33640
} | [
"de.unihd.dbs.heideltime.standalone.exceptions.DocumentCreationTimeMissingException",
"java.util.Date"
] | import de.unihd.dbs.heideltime.standalone.exceptions.DocumentCreationTimeMissingException; import java.util.Date; | import de.unihd.dbs.heideltime.standalone.exceptions.*; import java.util.*; | [
"de.unihd.dbs",
"java.util"
] | de.unihd.dbs; java.util; | 1,193,265 |
public void addEntryCb(int pos, SnmpOid row, ObjectName name,
Object entry, SnmpMibTable meta)
throws SnmpStatusException {
try {
if (entries != null) entries.add(pos,entry);
} catch (Exception e) {
throw new SnmpStatusException(SnmpStatusException.noSuchName);
}
} | void function(int pos, SnmpOid row, ObjectName name, Object entry, SnmpMibTable meta) throws SnmpStatusException { try { if (entries != null) entries.add(pos,entry); } catch (Exception e) { throw new SnmpStatusException(SnmpStatusException.noSuchName); } } | /**
* This callback is called by the associated metadata object
* when a new table entry has been registered in the
* table metadata.
*
* This method will update the <code>entries</code> list.
*
* @param pos The position at which the new entry was inserted
* in the table.
* @param row The row OID of the new entry
* @param name The ObjectName of the new entry (as specified by the
* factory)
* @param entry The new entry (as returned by the factory)
* @param meta The table metadata object.
*
**/ | This callback is called by the associated metadata object when a new table entry has been registered in the table metadata. This method will update the <code>entries</code> list | addEntryCb | {
"repo_name": "wangsongpeng/jdk-src",
"path": "src/main/java/com/sun/jmx/snmp/agent/SnmpTableSupport.java",
"license": "apache-2.0",
"size": 18653
} | [
"com.sun.jmx.snmp.SnmpOid",
"com.sun.jmx.snmp.SnmpStatusException",
"javax.management.ObjectName"
] | import com.sun.jmx.snmp.SnmpOid; import com.sun.jmx.snmp.SnmpStatusException; import javax.management.ObjectName; | import com.sun.jmx.snmp.*; import javax.management.*; | [
"com.sun.jmx",
"javax.management"
] | com.sun.jmx; javax.management; | 2,772,567 |
public static boolean isConferenceUserId(String userId) {
// test first if it a known conference user id
if (mConferenceUserIdByRoomId.values().contains(userId)) {
return true;
}
boolean res = false;
String prefix = "@" + USER_PREFIX;
String suffix = ":" + DOMAIN;
if (!TextUtils.isEmpty(userId) && userId.startsWith(prefix) && userId.endsWith(suffix)) {
String roomIdBase64 = userId.substring(prefix.length(), userId.length() - suffix.length());
try {
res = MXPatterns.isRoomId((new String(Base64.decode(roomIdBase64, Base64.NO_WRAP | Base64.URL_SAFE), "UTF-8")));
} catch (Exception e) {
Log.e(LOG_TAG, "isConferenceUserId : failed " + e.getMessage(), e);
}
}
return res;
} | static boolean function(String userId) { if (mConferenceUserIdByRoomId.values().contains(userId)) { return true; } boolean res = false; String prefix = "@" + USER_PREFIX; String suffix = ":" + DOMAIN; if (!TextUtils.isEmpty(userId) && userId.startsWith(prefix) && userId.endsWith(suffix)) { String roomIdBase64 = userId.substring(prefix.length(), userId.length() - suffix.length()); try { res = MXPatterns.isRoomId((new String(Base64.decode(roomIdBase64, Base64.NO_WRAP Base64.URL_SAFE), "UTF-8"))); } catch (Exception e) { Log.e(LOG_TAG, STR + e.getMessage(), e); } } return res; } | /**
* Test if the provided user is a valid conference user Id
*
* @param userId the user id to test
* @return true if it is a valid conference user id
*/ | Test if the provided user is a valid conference user Id | isConferenceUserId | {
"repo_name": "matrix-org/matrix-android-sdk",
"path": "matrix-sdk/src/main/java/org/matrix/androidsdk/call/MXCallsManager.java",
"license": "apache-2.0",
"size": 50683
} | [
"android.text.TextUtils",
"android.util.Base64",
"org.matrix.androidsdk.core.Log",
"org.matrix.androidsdk.core.MXPatterns"
] | import android.text.TextUtils; import android.util.Base64; import org.matrix.androidsdk.core.Log; import org.matrix.androidsdk.core.MXPatterns; | import android.text.*; import android.util.*; import org.matrix.androidsdk.core.*; | [
"android.text",
"android.util",
"org.matrix.androidsdk"
] | android.text; android.util; org.matrix.androidsdk; | 1,638,603 |
public Schema getSchema(FailureCollector collector) {
if (schema == null && !containsMacro(NAME_SERVICE_ACCOUNT_TYPE) && !containsMacro(ACCOUNT_FILE_PATH)
&& !containsMacro(NAME_SERVICE_ACCOUNT_JSON)) {
if (dataSchemaInfo.isEmpty()) {
collector.addFailure("There are no headers to process.",
"Perhaps no validation step was executed before schema generation.")
.withConfigProperty(SCHEMA);
}
schema = SchemaBuilder.buildSchema(this, new ArrayList<>(dataSchemaInfo.values()));
}
return schema;
} | Schema function(FailureCollector collector) { if (schema == null && !containsMacro(NAME_SERVICE_ACCOUNT_TYPE) && !containsMacro(ACCOUNT_FILE_PATH) && !containsMacro(NAME_SERVICE_ACCOUNT_JSON)) { if (dataSchemaInfo.isEmpty()) { collector.addFailure(STR, STR) .withConfigProperty(SCHEMA); } schema = SchemaBuilder.buildSchema(this, new ArrayList<>(dataSchemaInfo.values())); } return schema; } | /**
* Returns the instance of Schema.
* @return The instance of Schema
* @param collector throws validation exception
*/ | Returns the instance of Schema | getSchema | {
"repo_name": "data-integrations/google-drive",
"path": "src/main/java/io/cdap/plugin/google/sheets/source/GoogleSheetsSourceConfig.java",
"license": "apache-2.0",
"size": 49747
} | [
"io.cdap.cdap.api.data.schema.Schema",
"io.cdap.cdap.etl.api.FailureCollector",
"java.util.ArrayList"
] | import io.cdap.cdap.api.data.schema.Schema; import io.cdap.cdap.etl.api.FailureCollector; import java.util.ArrayList; | import io.cdap.cdap.api.data.schema.*; import io.cdap.cdap.etl.api.*; import java.util.*; | [
"io.cdap.cdap",
"java.util"
] | io.cdap.cdap; java.util; | 241,575 |
public void setTimeSeriesInterval(Interval timeSeriesInterval) {
this.timeSeriesInterval = timeSeriesInterval;
} | void function(Interval timeSeriesInterval) { this.timeSeriesInterval = timeSeriesInterval; } | /**
* Sets the interval of the time series, which provides the max and min timestamps.
*/ | Sets the interval of the time series, which provides the max and min timestamps | setTimeSeriesInterval | {
"repo_name": "sajavadi/pinot",
"path": "thirdeye/thirdeye-pinot/src/main/java/com/linkedin/thirdeye/anomalydetection/context/TimeSeries.java",
"license": "apache-2.0",
"size": 3136
} | [
"org.joda.time.Interval"
] | import org.joda.time.Interval; | import org.joda.time.*; | [
"org.joda.time"
] | org.joda.time; | 2,699,852 |
private void createMapEntry(Attributes attributes) {
String key = attributes.getValue(XMLConstant.NAME.getIdentifier());
int x = Integer.valueOf(attributes.getValue(XMLConstant.LOCATION_X
.getIdentifier()));
int y = Integer.valueOf(attributes.getValue(XMLConstant.LOCATION_Y
.getIdentifier()));
int width = Integer.valueOf(attributes.getValue(XMLConstant.WIDTH
.getIdentifier()));
int height = Integer.valueOf(attributes.getValue(XMLConstant.HEIGHT
.getIdentifier()));
StateLayoutData layoutData = new StateLayoutData(new Point(x, y),
new Dimension(width, height));
_stateLayoutMap.put(key, layoutData);
} | void function(Attributes attributes) { String key = attributes.getValue(XMLConstant.NAME.getIdentifier()); int x = Integer.valueOf(attributes.getValue(XMLConstant.LOCATION_X .getIdentifier())); int y = Integer.valueOf(attributes.getValue(XMLConstant.LOCATION_Y .getIdentifier())); int width = Integer.valueOf(attributes.getValue(XMLConstant.WIDTH .getIdentifier())); int height = Integer.valueOf(attributes.getValue(XMLConstant.HEIGHT .getIdentifier())); StateLayoutData layoutData = new StateLayoutData(new Point(x, y), new Dimension(width, height)); _stateLayoutMap.put(key, layoutData); } | /**
* Creates and inserts a {@link StateLayoutData} into the
* <code>stateLayoutMap</code> with the name of the current element as key
* loaded from the attributes of the element.
*
* @param attributes
* The attributes of XML state or state-set entry
*/ | Creates and inserts a <code>StateLayoutData</code> into the <code>stateLayoutMap</code> with the name of the current element as key loaded from the attributes of the element | createMapEntry | {
"repo_name": "ESSICS/cs-studio",
"path": "applications/snl/snl-plugins/de.desy.language.snl.diagram/src/de/desy/language/snl/diagram/persistence/StateLayoutHandler.java",
"license": "epl-1.0",
"size": 2399
} | [
"org.eclipse.draw2d.geometry.Dimension",
"org.eclipse.draw2d.geometry.Point",
"org.xml.sax.Attributes"
] | import org.eclipse.draw2d.geometry.Dimension; import org.eclipse.draw2d.geometry.Point; import org.xml.sax.Attributes; | import org.eclipse.draw2d.geometry.*; import org.xml.sax.*; | [
"org.eclipse.draw2d",
"org.xml.sax"
] | org.eclipse.draw2d; org.xml.sax; | 2,314,767 |
public HttpSessionTokensSet getHttpSessionTokensSetForContext(Context context) {
// TODO: Proper implementation. Hack for now
for (Entry<String, HttpSessionTokensSet> e : this.sessionTokens.entrySet()) {
String siteName = e.getKey();
siteName = "http://" + siteName;
if (context.isInContext(siteName)) return e.getValue();
}
return null;
} | HttpSessionTokensSet function(Context context) { for (Entry<String, HttpSessionTokensSet> e : this.sessionTokens.entrySet()) { String siteName = e.getKey(); siteName = "http: if (context.isInContext(siteName)) return e.getValue(); } return null; } | /**
* Gets the http session tokens set for the first site matching a given Context.
*
* @param context the context
* @return the http session tokens set for context
*/ | Gets the http session tokens set for the first site matching a given Context | getHttpSessionTokensSetForContext | {
"repo_name": "meitar/zaproxy",
"path": "zap/src/main/java/org/zaproxy/zap/extension/httpsessions/ExtensionHttpSessions.java",
"license": "apache-2.0",
"size": 24861
} | [
"java.util.Map",
"org.zaproxy.zap.model.Context"
] | import java.util.Map; import org.zaproxy.zap.model.Context; | import java.util.*; import org.zaproxy.zap.model.*; | [
"java.util",
"org.zaproxy.zap"
] | java.util; org.zaproxy.zap; | 8,725 |
public static MultiColumnRelation createSingleMarkerInRelation(List<ColumnIdentifier> entities, Tuples.INRaw inMarker)
{
return new MultiColumnRelation(entities, Operator.IN, null, null, inMarker);
} | static MultiColumnRelation function(List<ColumnIdentifier> entities, Tuples.INRaw inMarker) { return new MultiColumnRelation(entities, Operator.IN, null, null, inMarker); } | /**
* Creates a multi-column IN relation with a marker for the IN values.
* For example: "SELECT ... WHERE (a, b) IN ?"
* @param entities the columns on the LHS of the relation
* @param inMarker a single IN marker
* @return a new <code>MultiColumnRelation</code> instance
*/ | Creates a multi-column IN relation with a marker for the IN values. For example: "SELECT ... WHERE (a, b) IN ?" | createSingleMarkerInRelation | {
"repo_name": "blerer/cassandra",
"path": "src/java/org/apache/cassandra/cql3/MultiColumnRelation.java",
"license": "apache-2.0",
"size": 10428
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 607,631 |
public void action(ActionCode actionCode, Object param) {
if (actionCode == ActionCode.ACTION_COMMIT) {
if (response.isCommitted())
return;
// Validate and write response headers
try {
prepareResponse();
} catch (IOException e) {
// Set error flag
error = true;
}
} else if (actionCode == ActionCode.ACTION_CLIENT_FLUSH) {
if (!response.isCommitted()) {
// Validate and write response headers
try {
prepareResponse();
} catch (IOException e) {
// Set error flag
error = true;
return;
}
}
try {
flush();
// Send explicit flush message
if (Socket.sendb(socket, flushMessageBuffer, 0,
flushMessageBuffer.position()) < 0) {
error = true;
}
} catch (IOException e) {
// Set error flag
error = true;
}
} else if (actionCode == ActionCode.ACTION_CLOSE) {
// Close
// End the processing of the current request, and stop any further
// transactions with the client
try {
finish();
} catch (IOException e) {
// Set error flag
error = true;
}
} else if (actionCode == ActionCode.ACTION_START) {
started = true;
} else if (actionCode == ActionCode.ACTION_STOP) {
started = false;
} else if (actionCode == ActionCode.ACTION_REQ_SSL_ATTRIBUTE ) {
if (!certificates.isNull()) {
ByteChunk certData = certificates.getByteChunk();
X509Certificate jsseCerts[] = null;
ByteArrayInputStream bais =
new ByteArrayInputStream(certData.getBytes(),
certData.getStart(),
certData.getLength());
// Fill the elements.
try {
CertificateFactory cf;
if (clientCertProvider == null) {
cf = CertificateFactory.getInstance("X.509");
} else {
cf = CertificateFactory.getInstance("X.509",
clientCertProvider);
}
while(bais.available() > 0) {
X509Certificate cert = (X509Certificate)
cf.generateCertificate(bais);
if(jsseCerts == null) {
jsseCerts = new X509Certificate[1];
jsseCerts[0] = cert;
} else {
X509Certificate [] temp = new X509Certificate[jsseCerts.length+1];
System.arraycopy(jsseCerts,0,temp,0,jsseCerts.length);
temp[jsseCerts.length] = cert;
jsseCerts = temp;
}
}
} catch (java.security.cert.CertificateException e) {
log.error(sm.getString("ajpprocessor.certs.fail"), e);
return;
} catch (NoSuchProviderException e) {
log.error(sm.getString("ajpprocessor.certs.fail"), e);
return;
}
request.setAttribute(AprEndpoint.CERTIFICATE_KEY, jsseCerts);
}
} else if (actionCode == ActionCode.ACTION_REQ_HOST_ATTRIBUTE) {
// Get remote host name using a DNS resolution
if (request.remoteHost().isNull()) {
try {
request.remoteHost().setString(InetAddress.getByName
(request.remoteAddr().toString()).getHostName());
} catch (IOException iex) {
// Ignore
}
}
} else if (actionCode == ActionCode.ACTION_REQ_LOCAL_ADDR_ATTRIBUTE) {
// Automatically populated during prepareRequest() when using
// modern AJP forwarder, otherwise copy from local name
if (request.localAddr().isNull()) {
request.localAddr().setString(request.localName().toString());
}
} else if (actionCode == ActionCode.ACTION_REQ_SET_BODY_REPLAY) {
// Set the given bytes as the content
ByteChunk bc = (ByteChunk) param;
int length = bc.getLength();
bodyBytes.setBytes(bc.getBytes(), bc.getStart(), length);
request.setContentLength(length);
first = false;
empty = false;
replay = true;
endOfStream = false;
}
}
// ------------------------------------------------------ Connector Methods
| void function(ActionCode actionCode, Object param) { if (actionCode == ActionCode.ACTION_COMMIT) { if (response.isCommitted()) return; try { prepareResponse(); } catch (IOException e) { error = true; } } else if (actionCode == ActionCode.ACTION_CLIENT_FLUSH) { if (!response.isCommitted()) { try { prepareResponse(); } catch (IOException e) { error = true; return; } } try { flush(); if (Socket.sendb(socket, flushMessageBuffer, 0, flushMessageBuffer.position()) < 0) { error = true; } } catch (IOException e) { error = true; } } else if (actionCode == ActionCode.ACTION_CLOSE) { try { finish(); } catch (IOException e) { error = true; } } else if (actionCode == ActionCode.ACTION_START) { started = true; } else if (actionCode == ActionCode.ACTION_STOP) { started = false; } else if (actionCode == ActionCode.ACTION_REQ_SSL_ATTRIBUTE ) { if (!certificates.isNull()) { ByteChunk certData = certificates.getByteChunk(); X509Certificate jsseCerts[] = null; ByteArrayInputStream bais = new ByteArrayInputStream(certData.getBytes(), certData.getStart(), certData.getLength()); try { CertificateFactory cf; if (clientCertProvider == null) { cf = CertificateFactory.getInstance("X.509"); } else { cf = CertificateFactory.getInstance("X.509", clientCertProvider); } while(bais.available() > 0) { X509Certificate cert = (X509Certificate) cf.generateCertificate(bais); if(jsseCerts == null) { jsseCerts = new X509Certificate[1]; jsseCerts[0] = cert; } else { X509Certificate [] temp = new X509Certificate[jsseCerts.length+1]; System.arraycopy(jsseCerts,0,temp,0,jsseCerts.length); temp[jsseCerts.length] = cert; jsseCerts = temp; } } } catch (java.security.cert.CertificateException e) { log.error(sm.getString(STR), e); return; } catch (NoSuchProviderException e) { log.error(sm.getString(STR), e); return; } request.setAttribute(AprEndpoint.CERTIFICATE_KEY, jsseCerts); } } else if (actionCode == ActionCode.ACTION_REQ_HOST_ATTRIBUTE) { if (request.remoteHost().isNull()) { try { request.remoteHost().setString(InetAddress.getByName (request.remoteAddr().toString()).getHostName()); } catch (IOException iex) { } } } else if (actionCode == ActionCode.ACTION_REQ_LOCAL_ADDR_ATTRIBUTE) { if (request.localAddr().isNull()) { request.localAddr().setString(request.localName().toString()); } } else if (actionCode == ActionCode.ACTION_REQ_SET_BODY_REPLAY) { ByteChunk bc = (ByteChunk) param; int length = bc.getLength(); bodyBytes.setBytes(bc.getBytes(), bc.getStart(), length); request.setContentLength(length); first = false; empty = false; replay = true; endOfStream = false; } } | /**
* Send an action to the connector.
*
* @param actionCode Type of the action
* @param param Action parameter
*/ | Send an action to the connector | action | {
"repo_name": "yuyupapa/OpenSource",
"path": "apache-tomcat-6.0.48/java/org/apache/coyote/ajp/AjpAprProcessor.java",
"license": "apache-2.0",
"size": 45534
} | [
"java.io.ByteArrayInputStream",
"java.io.IOException",
"java.net.InetAddress",
"java.security.NoSuchProviderException",
"java.security.cert.CertificateFactory",
"java.security.cert.X509Certificate",
"org.apache.coyote.ActionCode",
"org.apache.tomcat.jni.Socket",
"org.apache.tomcat.util.buf.ByteChunk",
"org.apache.tomcat.util.net.AprEndpoint"
] | import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.InetAddress; import java.security.NoSuchProviderException; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import org.apache.coyote.ActionCode; import org.apache.tomcat.jni.Socket; import org.apache.tomcat.util.buf.ByteChunk; import org.apache.tomcat.util.net.AprEndpoint; | import java.io.*; import java.net.*; import java.security.*; import java.security.cert.*; import org.apache.coyote.*; import org.apache.tomcat.jni.*; import org.apache.tomcat.util.buf.*; import org.apache.tomcat.util.net.*; | [
"java.io",
"java.net",
"java.security",
"org.apache.coyote",
"org.apache.tomcat"
] | java.io; java.net; java.security; org.apache.coyote; org.apache.tomcat; | 2,736,281 |
public byte[] getBytesFromBlob(Blob blob) throws SQLException {
// read the bytes from an oracle blob
// oracle.sql.BLOB blob = ((OracleResultSet) res).getBLOB(columnName);
byte[] content = new byte[(int) blob.length()];
content = blob.getBytes(1, (int) blob.length());
return content;
} | byte[] function(Blob blob) throws SQLException { byte[] content = new byte[(int) blob.length()]; content = blob.getBytes(1, (int) blob.length()); return content; } | /**
* Returns the bytes from a result set
*
* @param res
* The ResultSet to read from
* @param columnName
* The name of the column to read from
*
* @return The byte value from the column
*/ | Returns the bytes from a result set | getBytesFromBlob | {
"repo_name": "WilliamRen/bbossgroups-3.5",
"path": "bboss-persistent/src/com/frameworkset/orm/adapter/DBOracle.java",
"license": "apache-2.0",
"size": 42446
} | [
"java.sql.Blob",
"java.sql.SQLException"
] | import java.sql.Blob; import java.sql.SQLException; | import java.sql.*; | [
"java.sql"
] | java.sql; | 1,835,984 |
public int getModeIndex() {
int modeIndex = -1;
int photoIndex = getResources().getInteger(R.integer.camera_mode_photo);
int videoIndex = getResources().getInteger(R.integer.camera_mode_video);
int gcamIndex = getResources().getInteger(R.integer.camera_mode_gcam);
if (MediaStore.INTENT_ACTION_VIDEO_CAMERA.equals(getIntent().getAction())
|| MediaStore.ACTION_VIDEO_CAPTURE.equals(getIntent().getAction())) {
modeIndex = videoIndex;
} else if (MediaStore.ACTION_IMAGE_CAPTURE.equals(getIntent().getAction())) {
// Capture intent.
modeIndex = photoIndex;
} else if (MediaStore.INTENT_ACTION_STILL_IMAGE_CAMERA.equals(getIntent().getAction())
||MediaStore.INTENT_ACTION_STILL_IMAGE_CAMERA_SECURE.equals(getIntent()
.getAction())
|| MediaStore.ACTION_IMAGE_CAPTURE_SECURE.equals(getIntent().getAction())) {
modeIndex = mSettingsManager.getInteger(SettingsManager.SCOPE_GLOBAL,
Keys.KEY_CAMERA_MODULE_LAST_USED);
// For upgraders who have not seen the aspect ratio selection screen,
// we need to drop them back in the photo module and have them select
// aspect ratio.
// TODO: Move this to SettingsManager as an upgrade procedure.
if (!mSettingsManager.getBoolean(SettingsManager.SCOPE_GLOBAL,
Keys.KEY_USER_SELECTED_ASPECT_RATIO)) {
modeIndex = photoIndex;
}
} else {
// If the activity has not been started using an explicit intent,
// read the module index from the last time the user changed modes
modeIndex = mSettingsManager.getInteger(SettingsManager.SCOPE_GLOBAL,
Keys.KEY_STARTUP_MODULE_INDEX);
if ((modeIndex == gcamIndex &&
!GcamHelper.hasGcamAsSeparateModule()) || modeIndex < 0) {
modeIndex = photoIndex;
}
}
return modeIndex;
} | int function() { int modeIndex = -1; int photoIndex = getResources().getInteger(R.integer.camera_mode_photo); int videoIndex = getResources().getInteger(R.integer.camera_mode_video); int gcamIndex = getResources().getInteger(R.integer.camera_mode_gcam); if (MediaStore.INTENT_ACTION_VIDEO_CAMERA.equals(getIntent().getAction()) MediaStore.ACTION_VIDEO_CAPTURE.equals(getIntent().getAction())) { modeIndex = videoIndex; } else if (MediaStore.ACTION_IMAGE_CAPTURE.equals(getIntent().getAction())) { modeIndex = photoIndex; } else if (MediaStore.INTENT_ACTION_STILL_IMAGE_CAMERA.equals(getIntent().getAction()) MediaStore.INTENT_ACTION_STILL_IMAGE_CAMERA_SECURE.equals(getIntent() .getAction()) MediaStore.ACTION_IMAGE_CAPTURE_SECURE.equals(getIntent().getAction())) { modeIndex = mSettingsManager.getInteger(SettingsManager.SCOPE_GLOBAL, Keys.KEY_CAMERA_MODULE_LAST_USED); if (!mSettingsManager.getBoolean(SettingsManager.SCOPE_GLOBAL, Keys.KEY_USER_SELECTED_ASPECT_RATIO)) { modeIndex = photoIndex; } } else { modeIndex = mSettingsManager.getInteger(SettingsManager.SCOPE_GLOBAL, Keys.KEY_STARTUP_MODULE_INDEX); if ((modeIndex == gcamIndex && !GcamHelper.hasGcamAsSeparateModule()) modeIndex < 0) { modeIndex = photoIndex; } } return modeIndex; } | /**
* Get the current mode index from the Intent or from persistent
* settings.
*/ | Get the current mode index from the Intent or from persistent settings | getModeIndex | {
"repo_name": "jameliu/Camera2",
"path": "app/src/main/java/com/android/camera/CameraActivity.java",
"license": "apache-2.0",
"size": 111033
} | [
"android.provider.MediaStore",
"com.android.camera.settings.Keys",
"com.android.camera.settings.SettingsManager",
"com.android.camera.util.GcamHelper"
] | import android.provider.MediaStore; import com.android.camera.settings.Keys; import com.android.camera.settings.SettingsManager; import com.android.camera.util.GcamHelper; | import android.provider.*; import com.android.camera.settings.*; import com.android.camera.util.*; | [
"android.provider",
"com.android.camera"
] | android.provider; com.android.camera; | 2,807,999 |
@Override
public final void commit() throws XAException {
if (tc.isEntryEnabled())
Tr.entry(tc, "commit", new Object[] { _resource, _xid, getPriority() });
if (tcSummary.isDebugEnabled())
Tr.debug(tcSummary, "xa_commit", this);
if (tc.isDebugEnabled())
Tr.debug(tc, "Committing resource with priority " + getPriority());
try {
if (_state == FAILED)
_resource = reconnectRM();
_resource.commit(_xid, false);
// Record the completion direction
_completedCommit = true;
destroy();
} catch (XAException xae) {
_completionXARC = xae.errorCode;
// Record the completion XA return code
FFDCFilter.processException(xae, "com.ibm.ws.Transaction.JTA.JTAXAResourceImpl.commit", "317", this);
throw xae;
} finally {
if (tc.isEntryEnabled())
Tr.exit(tc, "commit");
if (tcSummary.isDebugEnabled())
Tr.debug(tcSummary, "xa_commit result: " +
XAReturnCodeHelper.convertXACode(_completionXARC));
}
} | final void function() throws XAException { if (tc.isEntryEnabled()) Tr.entry(tc, STR, new Object[] { _resource, _xid, getPriority() }); if (tcSummary.isDebugEnabled()) Tr.debug(tcSummary, STR, this); if (tc.isDebugEnabled()) Tr.debug(tc, STR + getPriority()); try { if (_state == FAILED) _resource = reconnectRM(); _resource.commit(_xid, false); _completedCommit = true; destroy(); } catch (XAException xae) { _completionXARC = xae.errorCode; FFDCFilter.processException(xae, STR, "317", this); throw xae; } finally { if (tc.isEntryEnabled()) Tr.exit(tc, STR); if (tcSummary.isDebugEnabled()) Tr.debug(tcSummary, STR + XAReturnCodeHelper.convertXACode(_completionXARC)); } } | /**
* Commit a transaction.
*
* @exception XAException
*/ | Commit a transaction | commit | {
"repo_name": "kgibm/open-liberty",
"path": "dev/com.ibm.tx.core/src/com/ibm/tx/jta/impl/JTAXAResourceImpl.java",
"license": "epl-1.0",
"size": 20759
} | [
"com.ibm.websphere.ras.Tr",
"com.ibm.ws.Transaction",
"com.ibm.ws.ffdc.FFDCFilter",
"javax.transaction.xa.XAException"
] | import com.ibm.websphere.ras.Tr; import com.ibm.ws.Transaction; import com.ibm.ws.ffdc.FFDCFilter; import javax.transaction.xa.XAException; | import com.ibm.websphere.ras.*; import com.ibm.ws.*; import com.ibm.ws.ffdc.*; import javax.transaction.xa.*; | [
"com.ibm.websphere",
"com.ibm.ws",
"javax.transaction"
] | com.ibm.websphere; com.ibm.ws; javax.transaction; | 1,833,240 |
public boolean exist(String word) throws IOException {
// obtainSearcher calls ensureOpen
final IndexSearcher indexSearcher = obtainSearcher();
try{
// TODO: we should use ReaderUtil+seekExact, we dont care about the docFreq
// this is just an existence check
return indexSearcher.getIndexReader().docFreq(new Term(F_WORD, word)) > 0;
} finally {
releaseSearcher(indexSearcher);
}
} | boolean function(String word) throws IOException { final IndexSearcher indexSearcher = obtainSearcher(); try{ return indexSearcher.getIndexReader().docFreq(new Term(F_WORD, word)) > 0; } finally { releaseSearcher(indexSearcher); } } | /**
* Check whether the word exists in the index.
* @param word word to check
* @throws IOException If there is a low-level I/O error.
* @throws AlreadyClosedException if the Spellchecker is already closed
* @return true if the word exists in the index
*/ | Check whether the word exists in the index | exist | {
"repo_name": "PATRIC3/p3_solr",
"path": "lucene/suggest/src/java/org/apache/lucene/search/spell/SpellChecker.java",
"license": "apache-2.0",
"size": 24627
} | [
"java.io.IOException",
"org.apache.lucene.index.Term",
"org.apache.lucene.search.IndexSearcher"
] | import java.io.IOException; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; | import java.io.*; import org.apache.lucene.index.*; import org.apache.lucene.search.*; | [
"java.io",
"org.apache.lucene"
] | java.io; org.apache.lucene; | 738,579 |
public Cell createNewCell(int row, int col) {
Cell cell = new Cell(row, col);
cells[col][row] = cell;
return cell;
}
/**
* Returns the deepest nested child component which contains "element". The
* child component is also returned if "element" is part of its caption.
* <p>
* For internal use only. May be removed or replaced in the future.
*
* @param element
* An element that is a nested sub element of the root element in
* this layout
* @return The Paintable which the element is a part of. Null if the element
* belongs to the layout and not to a child.
* @deprecated As of 7.2, call or override {@link #getComponent(Element)} | Cell function(int row, int col) { Cell cell = new Cell(row, col); cells[col][row] = cell; return cell; } /** * Returns the deepest nested child component which contains STR. The * child component is also returned if STR is part of its caption. * <p> * For internal use only. May be removed or replaced in the future. * * @param element * An element that is a nested sub element of the root element in * this layout * @return The Paintable which the element is a part of. Null if the element * belongs to the layout and not to a child. * @deprecated As of 7.2, call or override {@link #getComponent(Element)} | /**
* Creates a new Cell with the given coordinates.
* <p>
* For internal use only. May be removed or replaced in the future.
*
* @param row
* @param col
* @return
*/ | Creates a new Cell with the given coordinates. For internal use only. May be removed or replaced in the future | createNewCell | {
"repo_name": "jdahlstrom/vaadin.react",
"path": "client/src/main/java/com/vaadin/client/ui/VGridLayout.java",
"license": "apache-2.0",
"size": 31870
} | [
"com.google.gwt.dom.client.Element"
] | import com.google.gwt.dom.client.Element; | import com.google.gwt.dom.client.*; | [
"com.google.gwt"
] | com.google.gwt; | 1,198,349 |
public void setValue(double value) {
int width = getWidth();
buffGr2D.copyArea(0, 0, width, getHeight(), -1, 0);
// on ecrit le fond
buffGr2D.setColor(Color.BLACK);
buffGr2D.drawRect(width - 1, 0, 2, getHeight());
// on ecrit la ligne
int y = realCoordToPanel(value);
buffGr2D.setColor(Color.GREEN);
buffGr2D.drawLine(width - 1, lastY, width - 1, y);
lastY = y;
repaint();
} | void function(double value) { int width = getWidth(); buffGr2D.copyArea(0, 0, width, getHeight(), -1, 0); buffGr2D.setColor(Color.BLACK); buffGr2D.drawRect(width - 1, 0, 2, getHeight()); int y = realCoordToPanel(value); buffGr2D.setColor(Color.GREEN); buffGr2D.drawLine(width - 1, lastY, width - 1, y); lastY = y; repaint(); } | /**
* Indique l'oscilloscope qu'une nouvelle valeur vient d'tre entre. Le panel
* est ensuite rafraichi
*
* @param value
* @uml.property name="value"
*/ | Indique l'oscilloscope qu'une nouvelle valeur vient d'tre entre. Le panel est ensuite rafraichi | setValue | {
"repo_name": "yesnault/tetrahead",
"path": "src/gui/presentation/ViewOscilloscope.java",
"license": "gpl-2.0",
"size": 2558
} | [
"java.awt.Color"
] | import java.awt.Color; | import java.awt.*; | [
"java.awt"
] | java.awt; | 2,538,658 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.