_id
stringlengths 2
7
| title
stringlengths 3
140
| partition
stringclasses 3
values | text
stringlengths 73
34.1k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q3200
|
ParallelClient.preparePing
|
train
|
public ParallelTaskBuilder preparePing() {
reinitIfClosed();
ParallelTaskBuilder cb = new ParallelTaskBuilder();
cb.setProtocol(RequestProtocol.PING);
return cb;
}
|
java
|
{
"resource": ""
}
|
q3201
|
ParallelClient.prepareTcp
|
train
|
public ParallelTaskBuilder prepareTcp(String command) {
reinitIfClosed();
ParallelTaskBuilder cb = new ParallelTaskBuilder();
cb.setProtocol(RequestProtocol.TCP);
cb.getTcpMeta().setCommand(command);
return cb;
}
|
java
|
{
"resource": ""
}
|
q3202
|
ParallelClient.prepareUdp
|
train
|
public ParallelTaskBuilder prepareUdp(String command) {
reinitIfClosed();
ParallelTaskBuilder cb = new ParallelTaskBuilder();
cb.setProtocol(RequestProtocol.UDP);
cb.getUdpMeta().setCommand(command);
return cb;
}
|
java
|
{
"resource": ""
}
|
q3203
|
ParallelClient.prepareHttpGet
|
train
|
public ParallelTaskBuilder prepareHttpGet(String url) {
reinitIfClosed();
ParallelTaskBuilder cb = new ParallelTaskBuilder();
cb.getHttpMeta().setHttpMethod(HttpMethod.GET);
cb.getHttpMeta().setRequestUrlPostfix(url);
return cb;
}
|
java
|
{
"resource": ""
}
|
q3204
|
ParallelClient.prepareHttpPost
|
train
|
public ParallelTaskBuilder prepareHttpPost(String url) {
reinitIfClosed();
ParallelTaskBuilder cb = new ParallelTaskBuilder();
cb.getHttpMeta().setHttpMethod(HttpMethod.POST);
cb.getHttpMeta().setRequestUrlPostfix(url);
return cb;
}
|
java
|
{
"resource": ""
}
|
q3205
|
ParallelClient.prepareHttpDelete
|
train
|
public ParallelTaskBuilder prepareHttpDelete(String url) {
reinitIfClosed();
ParallelTaskBuilder cb = new ParallelTaskBuilder();
cb.getHttpMeta().setHttpMethod(HttpMethod.DELETE);
cb.getHttpMeta().setRequestUrlPostfix(url);
return cb;
}
|
java
|
{
"resource": ""
}
|
q3206
|
ParallelClient.prepareHttpPut
|
train
|
public ParallelTaskBuilder prepareHttpPut(String url) {
reinitIfClosed();
ParallelTaskBuilder cb = new ParallelTaskBuilder();
cb.getHttpMeta().setHttpMethod(HttpMethod.PUT);
cb.getHttpMeta().setRequestUrlPostfix(url);
return cb;
}
|
java
|
{
"resource": ""
}
|
q3207
|
ParallelClient.prepareHttpHead
|
train
|
public ParallelTaskBuilder prepareHttpHead(String url) {
reinitIfClosed();
ParallelTaskBuilder cb = new ParallelTaskBuilder();
cb.getHttpMeta().setHttpMethod(HttpMethod.HEAD);
cb.getHttpMeta().setRequestUrlPostfix(url);
return cb;
}
|
java
|
{
"resource": ""
}
|
q3208
|
ParallelClient.prepareHttpOptions
|
train
|
public ParallelTaskBuilder prepareHttpOptions(String url) {
reinitIfClosed();
ParallelTaskBuilder cb = new ParallelTaskBuilder();
cb.getHttpMeta().setHttpMethod(HttpMethod.OPTIONS);
cb.getHttpMeta().setRequestUrlPostfix(url);
return cb;
}
|
java
|
{
"resource": ""
}
|
q3209
|
ExecutionManager.cancelRequestAndWorkers
|
train
|
@SuppressWarnings("deprecation")
private void cancelRequestAndWorkers() {
for (ActorRef worker : workers.values()) {
if (worker != null && !worker.isTerminated()) {
worker.tell(OperationWorkerMsgType.CANCEL, getSelf());
}
}
logger.info("ExecutionManager sending cancelPendingRequest at time: "
+ PcDateUtils.getNowDateTimeStr());
}
|
java
|
{
"resource": ""
}
|
q3210
|
ExecutionManager.cancelRequestAndWorkerOnHost
|
train
|
@SuppressWarnings("deprecation")
private void cancelRequestAndWorkerOnHost(List<String> targetHosts) {
List<String> validTargetHosts = new ArrayList<String>(workers.keySet());
validTargetHosts.retainAll(targetHosts);
logger.info("targetHosts for cancel: Total: {}"
+ " Valid in current manager with worker threads: {}",
targetHosts.size(), validTargetHosts.size());
for (String targetHost : validTargetHosts) {
ActorRef worker = workers.get(targetHost);
if (worker != null && !worker.isTerminated()) {
worker.tell(OperationWorkerMsgType.CANCEL, getSelf());
logger.info("Submitted CANCEL request on Host {}", targetHost);
} else {
logger.info(
"Did NOT Submitted "
+ "CANCEL request on Host {} as worker on this host is null or already killed",
targetHost);
}
}
}
|
java
|
{
"resource": ""
}
|
q3211
|
SshProvider.startSshSessionAndObtainSession
|
train
|
public Session startSshSessionAndObtainSession() {
Session session = null;
try {
JSch jsch = new JSch();
if (sshMeta.getSshLoginType() == SshLoginType.KEY) {
String workingDir = System.getProperty("user.dir");
String privKeyAbsPath = workingDir + "/"
+ sshMeta.getPrivKeyRelativePath();
logger.debug("use privkey: path: " + privKeyAbsPath);
if (!PcFileNetworkIoUtils.isFileExist(privKeyAbsPath)) {
throw new RuntimeException("file not found at "
+ privKeyAbsPath);
}
if (sshMeta.isPrivKeyUsePassphrase()
&& sshMeta.getPassphrase() != null) {
jsch.addIdentity(privKeyAbsPath, sshMeta.getPassphrase());
} else {
jsch.addIdentity(privKeyAbsPath);
}
}
session = jsch.getSession(sshMeta.getUserName(), targetHost,
sshMeta.getSshPort());
if (sshMeta.getSshLoginType() == SshLoginType.PASSWORD) {
session.setPassword(sshMeta.getPassword());
}
session.setConfig("StrictHostKeyChecking", "no");
} catch (Exception t) {
throw new RuntimeException(t);
}
return session;
}
|
java
|
{
"resource": ""
}
|
q3212
|
SshProvider.sessionConnectGenerateChannel
|
train
|
public Channel sessionConnectGenerateChannel(Session session)
throws JSchException {
// set timeout
session.connect(sshMeta.getSshConnectionTimeoutMillis());
ChannelExec channel = (ChannelExec) session.openChannel("exec");
channel.setCommand(sshMeta.getCommandLine());
// if run as super user, assuming the input stream expecting a password
if (sshMeta.isRunAsSuperUser()) {
try {
channel.setInputStream(null, true);
OutputStream out = channel.getOutputStream();
channel.setOutputStream(System.out, true);
channel.setExtOutputStream(System.err, true);
channel.setPty(true);
channel.connect();
out.write((sshMeta.getPassword()+"\n").getBytes());
out.flush();
} catch (IOException e) {
logger.error("error in sessionConnectGenerateChannel for super user", e);
}
} else {
channel.setInputStream(null);
channel.connect();
}
return channel;
}
|
java
|
{
"resource": ""
}
|
q3213
|
SshProvider.genErrorResponse
|
train
|
public ResponseOnSingeRequest genErrorResponse(Exception t) {
ResponseOnSingeRequest sshResponse = new ResponseOnSingeRequest();
String displayError = PcErrorMsgUtils.replaceErrorMsg(t.toString());
sshResponse.setStackTrace(PcStringUtils.printStackTrace(t));
sshResponse.setErrorMessage(displayError);
sshResponse.setFailObtainResponse(true);
logger.error("error in exec SSH. \nIf exection is JSchException: "
+ "Auth cancel and using public key. "
+ "\nMake sure 1. private key full path is right (try sshMeta.getPrivKeyAbsPath()). "
+ "\n2. the user name and key matches " + t);
return sshResponse;
}
|
java
|
{
"resource": ""
}
|
q3214
|
InternalDataProvider.genNodeDataMap
|
train
|
public void genNodeDataMap(ParallelTask task) {
TargetHostMeta targetHostMeta = task.getTargetHostMeta();
HttpMeta httpMeta = task.getHttpMeta();
String entityBody = httpMeta.getEntityBody();
String requestContent = HttpMeta
.replaceDefaultFullRequestContent(entityBody);
Map<String, NodeReqResponse> parallelTaskResult = task
.getParallelTaskResult();
for (String fqdn : targetHostMeta.getHosts()) {
NodeReqResponse nodeReqResponse = new NodeReqResponse(fqdn);
nodeReqResponse.setDefaultReqestContent(requestContent);
parallelTaskResult.put(fqdn, nodeReqResponse);
}
}
|
java
|
{
"resource": ""
}
|
q3215
|
InternalDataProvider.filterUnsafeOrUnnecessaryRequest
|
train
|
public void filterUnsafeOrUnnecessaryRequest(
Map<String, NodeReqResponse> nodeDataMapValidSource,
Map<String, NodeReqResponse> nodeDataMapValidSafe) {
for (Entry<String, NodeReqResponse> entry : nodeDataMapValidSource
.entrySet()) {
String hostName = entry.getKey();
NodeReqResponse nrr = entry.getValue();
Map<String, String> map = nrr.getRequestParameters();
/**
* 20130507: will generally apply to all requests: if have this
* field and this field is false
*/
if (map.containsKey(PcConstants.NODE_REQUEST_WILL_EXECUTE)) {
Boolean willExecute = Boolean.parseBoolean(map
.get(PcConstants.NODE_REQUEST_WILL_EXECUTE));
if (!willExecute) {
logger.info("NOT_EXECUTE_COMMAND " + " on target: "
+ hostName + " at "
+ PcDateUtils.getNowDateTimeStrStandard());
continue;
}
}
// now safely to add this node in.
nodeDataMapValidSafe.put(hostName, nrr);
}// end for loop
}
|
java
|
{
"resource": ""
}
|
q3216
|
TcpWorker.bootStrapTcpClient
|
train
|
public ClientBootstrap bootStrapTcpClient()
throws HttpRequestCreateException {
ClientBootstrap tcpClient = null;
try {
// Configure the client.
tcpClient = new ClientBootstrap(tcpMeta.getChannelFactory());
// Configure the pipeline factory.
tcpClient.setPipelineFactory(new MyPipelineFactory(TcpUdpSshPingResourceStore.getInstance().getTimer(),
this, tcpMeta.getTcpIdleTimeoutSec())
);
tcpClient.setOption("connectTimeoutMillis",
tcpMeta.getTcpConnectTimeoutMillis());
tcpClient.setOption("tcpNoDelay", true);
// tcpClient.setOption("keepAlive", true);
} catch (Exception t) {
throw new TcpUdpRequestCreateException(
"Error in creating request in Tcpworker. "
+ " If tcpClient is null. Then fail to create.", t);
}
return tcpClient;
}
|
java
|
{
"resource": ""
}
|
q3217
|
TcpWorker.reply
|
train
|
private void reply(final String response, final boolean error,
final String errorMessage, final String stackTrace,
final String statusCode, final int statusCodeInt) {
if (!sentReply) {
//must update sentReply first to avoid duplicated msg.
sentReply = true;
// Close the connection. Make sure the close operation ends because
// all I/O operations are asynchronous in Netty.
if(channel!=null && channel.isOpen())
channel.close().awaitUninterruptibly();
final ResponseOnSingeRequest res = new ResponseOnSingeRequest(
response, error, errorMessage, stackTrace, statusCode,
statusCodeInt, PcDateUtils.getNowDateTimeStrStandard(), null);
if (!getContext().system().deadLetters().equals(sender)) {
sender.tell(res, getSelf());
}
if (getContext() != null) {
getContext().stop(getSelf());
}
}
}
|
java
|
{
"resource": ""
}
|
q3218
|
FilterRegex.stringMatcherByPattern
|
train
|
public static String stringMatcherByPattern(String input, String patternStr) {
String output = PcConstants.SYSTEM_FAIL_MATCH_REGEX;
// 20140105: fix the NPE issue
if (patternStr == null) {
logger.error("patternStr is NULL! (Expected when the aggregation rule is not defined at "
+ PcDateUtils.getNowDateTimeStrStandard());
return output;
}
if (input == null) {
logger.error("input (Expected when the response is null and now try to match on response) is NULL in stringMatcherByPattern() at "
+ PcDateUtils.getNowDateTimeStrStandard());
return output;
} else {
input = input.replace("\n", "").replace("\r", "");
}
logger.debug("input: " + input);
logger.debug("patternStr: " + patternStr);
Pattern patternMetric = Pattern.compile(patternStr, Pattern.MULTILINE);
final Matcher matcher = patternMetric.matcher(input);
if (matcher.matches()) {
output = matcher.group(1);
}
return output;
}
|
java
|
{
"resource": ""
}
|
q3219
|
ParallelTaskManager.initTaskSchedulerIfNot
|
train
|
public synchronized void initTaskSchedulerIfNot() {
if (scheduler == null) {
scheduler = Executors
.newSingleThreadScheduledExecutor(DaemonThreadFactory
.getInstance());
CapacityAwareTaskScheduler runner = new CapacityAwareTaskScheduler();
scheduler.scheduleAtFixedRate(runner,
ParallecGlobalConfig.schedulerInitDelay,
ParallecGlobalConfig.schedulerCheckInterval,
TimeUnit.MILLISECONDS);
logger.info("initialized daemon task scheduler to evaluate waitQ tasks.");
}
}
|
java
|
{
"resource": ""
}
|
q3220
|
ParallelTaskManager.shutdownTaskScheduler
|
train
|
public synchronized void shutdownTaskScheduler(){
if (scheduler != null && !scheduler.isShutdown()) {
scheduler.shutdown();
logger.info("shutdowned the task scheduler. No longer accepting new tasks");
scheduler = null;
}
}
|
java
|
{
"resource": ""
}
|
q3221
|
ParallelTaskManager.getTaskFromInProgressMap
|
train
|
public ParallelTask getTaskFromInProgressMap(String jobId) {
if (!inprogressTaskMap.containsKey(jobId))
return null;
return inprogressTaskMap.get(jobId);
}
|
java
|
{
"resource": ""
}
|
q3222
|
ParallelTaskManager.getTotalUsedCapacity
|
train
|
public int getTotalUsedCapacity() {
int totalCapacity = 0;
for (Entry<String, ParallelTask> entry : inprogressTaskMap.entrySet()) {
ParallelTask task = entry.getValue();
if (task != null)
totalCapacity += task.capacityUsed();
}
return totalCapacity;
}
|
java
|
{
"resource": ""
}
|
q3223
|
ParallelTaskManager.cleanWaitTaskQueue
|
train
|
public synchronized void cleanWaitTaskQueue() {
for (ParallelTask task : waitQ) {
task.setState(ParallelTaskState.COMPLETED_WITH_ERROR);
task.getTaskErrorMetas().add(
new TaskErrorMeta(TaskErrorType.USER_CANCELED, "NA"));
logger.info(
"task {} removed from wait q. This task has been marked as USER CANCELED.",
task.getTaskId());
}
waitQ.clear();
}
|
java
|
{
"resource": ""
}
|
q3224
|
ParallelTaskManager.removeTaskFromWaitQ
|
train
|
public synchronized boolean removeTaskFromWaitQ(ParallelTask taskTobeRemoved) {
boolean removed = false;
for (ParallelTask task : waitQ) {
if (task.getTaskId() == taskTobeRemoved.getTaskId()) {
task.setState(ParallelTaskState.COMPLETED_WITH_ERROR);
task.getTaskErrorMetas().add(
new TaskErrorMeta(TaskErrorType.USER_CANCELED, "NA"));
logger.info(
"task {} removed from wait q. This task has been marked as USER CANCELED.",
task.getTaskId());
removed = true;
}
}
return removed;
}
|
java
|
{
"resource": ""
}
|
q3225
|
ParallelTaskManager.generateUpdateExecuteTask
|
train
|
public ResponseFromManager generateUpdateExecuteTask(ParallelTask task) {
// add to map now; as can only pass final
ParallelTaskManager.getInstance().addTaskToInProgressMap(
task.getTaskId(), task);
logger.info("Added task {} to the running inprogress map...",
task.getTaskId());
boolean useReplacementVarMap = false;
boolean useReplacementVarMapNodeSpecific = false;
Map<String, StrStrMap> replacementVarMapNodeSpecific = null;
Map<String, String> replacementVarMap = null;
ResponseFromManager batchResponseFromManager = null;
switch (task.getRequestReplacementType()) {
case UNIFORM_VAR_REPLACEMENT:
useReplacementVarMap = true;
useReplacementVarMapNodeSpecific = false;
replacementVarMap = task.getReplacementVarMap();
break;
case TARGET_HOST_SPECIFIC_VAR_REPLACEMENT:
useReplacementVarMap = false;
useReplacementVarMapNodeSpecific = true;
replacementVarMapNodeSpecific = task
.getReplacementVarMapNodeSpecific();
break;
case NO_REPLACEMENT:
useReplacementVarMap = false;
useReplacementVarMapNodeSpecific = false;
break;
default:
logger.error("error request replacement type. default as no replacement");
}// end switch
// generate content in nodedata
InternalDataProvider dp = InternalDataProvider.getInstance();
dp.genNodeDataMap(task);
VarReplacementProvider.getInstance()
.updateRequestWithReplacement(task, useReplacementVarMap,
replacementVarMap, useReplacementVarMapNodeSpecific,
replacementVarMapNodeSpecific);
batchResponseFromManager =
sendTaskToExecutionManager(task);
removeTaskFromInProgressMap(task.getTaskId());
logger.info(
"Removed task {} from the running inprogress map... "
+ ". This task should be garbage collected if there are no other pointers.",
task.getTaskId());
return batchResponseFromManager;
}
|
java
|
{
"resource": ""
}
|
q3226
|
ParallelTaskManager.sendTaskToExecutionManager
|
train
|
@SuppressWarnings("deprecation")
public ResponseFromManager sendTaskToExecutionManager(ParallelTask task) {
ResponseFromManager commandResponseFromManager = null;
ActorRef executionManager = null;
try {
// Start new job
logger.info("!!STARTED sendAgentCommandToManager : "
+ task.getTaskId() + " at "
+ PcDateUtils.getNowDateTimeStr());
executionManager = ActorConfig.createAndGetActorSystem().actorOf(
Props.create(ExecutionManager.class, task),
"ExecutionManager-" + task.getTaskId());
final FiniteDuration duration = Duration.create(task.getConfig()
.getTimeoutAskManagerSec(), TimeUnit.SECONDS);
// Timeout timeout = new
// Timeout(FiniteDuration.parse("300 seconds"));
Future<Object> future = Patterns.ask(executionManager,
new InitialRequestToManager(task), new Timeout(duration));
// set ref
task.executionManager = executionManager;
commandResponseFromManager = (ResponseFromManager) Await.result(
future, duration);
logger.info("!!COMPLETED sendTaskToExecutionManager : "
+ task.getTaskId() + " at "
+ PcDateUtils.getNowDateTimeStr()
+ " \t\t GenericResponseMap in future size: "
+ commandResponseFromManager.getResponseCount());
} catch (Exception ex) {
logger.error("Exception in sendTaskToExecutionManager {} details {}: ",
ex, ex);
} finally {
// stop the manager
if (executionManager != null && !executionManager.isTerminated()) {
ActorConfig.createAndGetActorSystem().stop(executionManager);
}
if (task.getConfig().isAutoSaveLogToLocal()) {
task.saveLogToLocal();
}
}
return commandResponseFromManager;
}
|
java
|
{
"resource": ""
}
|
q3227
|
PcFileNetworkIoUtils.isFileExist
|
train
|
public static boolean isFileExist(String filePath) {
File f = new File(filePath);
return f.exists() && !f.isDirectory();
}
|
java
|
{
"resource": ""
}
|
q3228
|
PcFileNetworkIoUtils.readFileContentToString
|
train
|
public static String readFileContentToString(String filePath)
throws IOException {
String content = "";
content = Files.toString(new File(filePath), Charsets.UTF_8);
return content;
}
|
java
|
{
"resource": ""
}
|
q3229
|
PcFileNetworkIoUtils.readStringFromUrlGeneric
|
train
|
public static String readStringFromUrlGeneric(String url)
throws IOException {
InputStream is = null;
URL urlObj = null;
String responseString = PcConstants.NA;
try {
urlObj = new URL(url);
URLConnection con = urlObj.openConnection();
con.setConnectTimeout(ParallecGlobalConfig.urlConnectionConnectTimeoutMillis);
con.setReadTimeout(ParallecGlobalConfig.urlConnectionReadTimeoutMillis);
is = con.getInputStream();
BufferedReader rd = new BufferedReader(new InputStreamReader(is,
Charset.forName("UTF-8")));
responseString = PcFileNetworkIoUtils.readAll(rd);
} finally {
if (is != null) {
is.close();
}
}
return responseString;
}
|
java
|
{
"resource": ""
}
|
q3230
|
VarReplacementProvider.updateRequestByAddingReplaceVarPair
|
train
|
public void updateRequestByAddingReplaceVarPair(
ParallelTask task, String replaceVarKey, String replaceVarValue) {
Map<String, NodeReqResponse> taskResult = task.getParallelTaskResult();
for (Entry<String, NodeReqResponse> entry : taskResult.entrySet()) {
NodeReqResponse nodeReqResponse = entry.getValue();
nodeReqResponse.getRequestParameters()
.put(PcConstants.NODE_REQUEST_PREFIX_REPLACE_VAR
+ replaceVarKey, replaceVarValue);
nodeReqResponse.getRequestParameters().put(
PcConstants.NODE_REQUEST_WILL_EXECUTE,
Boolean.toString(true));
}// end for loop
}
|
java
|
{
"resource": ""
}
|
q3231
|
ActorConfig.createAndGetActorSystem
|
train
|
public static ActorSystem createAndGetActorSystem() {
if (actorSystem == null || actorSystem.isTerminated()) {
actorSystem = ActorSystem.create(PcConstants.ACTOR_SYSTEM, conf);
}
return actorSystem;
}
|
java
|
{
"resource": ""
}
|
q3232
|
ActorConfig.shutDownActorSystemForce
|
train
|
public static void shutDownActorSystemForce() {
if (!actorSystem.isTerminated()) {
logger.info("shutting down actor system...");
actorSystem.shutdown();
actorSystem.awaitTermination(timeOutDuration);
logger.info("Actor system has been shut down.");
} else {
logger.info("Actor system has been terminated already. NO OP.");
}
}
|
java
|
{
"resource": ""
}
|
q3233
|
TcpUdpSshPingResourceStore.init
|
train
|
public synchronized void init() {
channelFactory = new NioClientSocketChannelFactory(
Executors.newCachedThreadPool(),
Executors.newCachedThreadPool());
datagramChannelFactory = new NioDatagramChannelFactory(
Executors.newCachedThreadPool());
timer = new HashedWheelTimer();
}
|
java
|
{
"resource": ""
}
|
q3234
|
PcErrorMsgUtils.replaceErrorMsg
|
train
|
public static String replaceErrorMsg(String origMsg) {
String replaceMsg = origMsg;
for (ERROR_TYPE errorType : ERROR_TYPE.values()) {
if (origMsg == null) {
replaceMsg = PcConstants.NA;
return replaceMsg;
}
if (origMsg.contains(errorMapOrig.get(errorType))) {
replaceMsg = errorMapReplace.get(errorType);
break;
}
}
return replaceMsg;
}
|
java
|
{
"resource": ""
}
|
q3235
|
AsyncHttpClientFactoryEmbed.disableCertificateVerification
|
train
|
private void disableCertificateVerification()
throws KeyManagementException, NoSuchAlgorithmException {
// Create a trust manager that does not validate certificate chains
final TrustManager[] trustAllCerts = new TrustManager[] { new CustomTrustManager() };
// Install the all-trusting trust manager
final SSLContext sslContext = SSLContext.getInstance("SSL");
sslContext.init(null, trustAllCerts, new SecureRandom());
final SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
HttpsURLConnection.setDefaultSSLSocketFactory(sslSocketFactory);
final HostnameVerifier verifier = new HostnameVerifier() {
@Override
public boolean verify(final String hostname,
final SSLSession session) {
return true;
}
};
HttpsURLConnection.setDefaultHostnameVerifier(verifier);
}
|
java
|
{
"resource": ""
}
|
q3236
|
HttpMeta.replaceFullRequestContent
|
train
|
public static String replaceFullRequestContent(
String requestContentTemplate, String replacementString) {
return (requestContentTemplate.replace(
PcConstants.COMMAND_VAR_DEFAULT_REQUEST_CONTENT,
replacementString));
}
|
java
|
{
"resource": ""
}
|
q3237
|
ProblemSummary.addFile
|
train
|
public void addFile(String description, FileModel fileModel)
{
Map<FileModel, ProblemFileSummary> files = addDescription(description);
if (files.containsKey(fileModel))
{
files.get(fileModel).addOccurrence();
} else {
files.put(fileModel, new ProblemFileSummary(fileModel, 1));
}
}
|
java
|
{
"resource": ""
}
|
q3238
|
ClassFilePreDecompilationScan.shouldIgnore
|
train
|
private boolean shouldIgnore(String typeReference)
{
typeReference = typeReference.replace('/', '.').replace('\\', '.');
return JavaClassIgnoreResolver.singletonInstance().matches(typeReference);
}
|
java
|
{
"resource": ""
}
|
q3239
|
Classification.resolvePayload
|
train
|
@Override
public FileModel resolvePayload(GraphRewrite event, EvaluationContext context, WindupVertexFrame payload)
{
checkVariableName(event, context);
if (payload instanceof FileReferenceModel)
{
return ((FileReferenceModel) payload).getFile();
}
if (payload instanceof FileModel)
{
return (FileModel) payload;
}
return null;
}
|
java
|
{
"resource": ""
}
|
q3240
|
FreeMarkerOperation.create
|
train
|
public static FreeMarkerOperation create(Furnace furnace, String templatePath, String outputFilename,
String... varNames)
{
return new FreeMarkerOperation(furnace, templatePath, outputFilename, varNames);
}
|
java
|
{
"resource": ""
}
|
q3241
|
UnzipArchiveToOutputFolder.recurseAndAddFiles
|
train
|
private void recurseAndAddFiles(GraphRewrite event, EvaluationContext context,
Path tempFolder,
FileService fileService, ArchiveModel archiveModel,
FileModel parentFileModel, boolean subArchivesOnly)
{
checkCancelled(event);
int numberAdded = 0;
FileFilter filter = TrueFileFilter.TRUE;
if (archiveModel instanceof IdentifiedArchiveModel)
{
filter = new IdentifiedArchiveFileFilter(archiveModel);
}
File fileReference;
if (parentFileModel instanceof ArchiveModel)
fileReference = new File(((ArchiveModel) parentFileModel).getUnzippedDirectory());
else
fileReference = parentFileModel.asFile();
WindupJavaConfigurationService windupJavaConfigurationService = new WindupJavaConfigurationService(event.getGraphContext());
File[] subFiles = fileReference.listFiles();
if (subFiles == null)
return;
for (File subFile : subFiles)
{
if (!filter.accept(subFile))
continue;
if (subArchivesOnly && !ZipUtil.endsWithZipExtension(subFile.getAbsolutePath()))
continue;
FileModel subFileModel = fileService.createByFilePath(parentFileModel, subFile.getAbsolutePath());
// check if this file should be ignored
if (windupJavaConfigurationService.checkIfIgnored(event, subFileModel))
continue;
numberAdded++;
if (numberAdded % 250 == 0)
event.getGraphContext().commit();
if (subFile.isFile() && ZipUtil.endsWithZipExtension(subFileModel.getFilePath()))
{
File newZipFile = subFileModel.asFile();
ArchiveModel newArchiveModel = GraphService.addTypeToModel(event.getGraphContext(), subFileModel, ArchiveModel.class);
newArchiveModel.setParentArchive(archiveModel);
newArchiveModel.setArchiveName(newZipFile.getName());
/*
* New archive must be reloaded in case the archive should be ignored
*/
newArchiveModel = GraphService.refresh(event.getGraphContext(), newArchiveModel);
ArchiveModel canonicalArchiveModel = null;
for (FileModel otherMatches : fileService.findAllByProperty(FileModel.SHA1_HASH, newArchiveModel.getSHA1Hash()))
{
if (otherMatches instanceof ArchiveModel && !otherMatches.equals(newArchiveModel) && !(otherMatches instanceof DuplicateArchiveModel))
{
canonicalArchiveModel = (ArchiveModel)otherMatches;
break;
}
}
if (canonicalArchiveModel != null)
{
// handle as duplicate
DuplicateArchiveModel duplicateArchive = GraphService.addTypeToModel(event.getGraphContext(), newArchiveModel, DuplicateArchiveModel.class);
duplicateArchive.setCanonicalArchive(canonicalArchiveModel);
// create dupes for child archives
unzipToTempDirectory(event, context, tempFolder, newZipFile, duplicateArchive, true);
} else
{
unzipToTempDirectory(event, context, tempFolder, newZipFile, newArchiveModel, false);
}
} else if (subFile.isDirectory())
{
recurseAndAddFiles(event, context, tempFolder, fileService, archiveModel, subFileModel, false);
}
}
}
|
java
|
{
"resource": ""
}
|
q3242
|
RenderLinkDirective.renderAsLI
|
train
|
private void renderAsLI(Writer writer, ProjectModel project, Iterator<Link> links, boolean wrap) throws IOException
{
if (!links.hasNext())
return;
if (wrap)
writer.append("<ul>");
while (links.hasNext())
{
Link link = links.next();
writer.append("<li>");
renderLink(writer, project, link);
writer.append("</li>");
}
if (wrap)
writer.append("</ul>");
}
|
java
|
{
"resource": ""
}
|
q3243
|
CreateApplicationReportIndexRuleProvider.createApplicationReportIndex
|
train
|
private ApplicationReportIndexModel createApplicationReportIndex(GraphContext context,
ProjectModel applicationProjectModel)
{
ApplicationReportIndexService applicationReportIndexService = new ApplicationReportIndexService(context);
ApplicationReportIndexModel index = applicationReportIndexService.create();
addAllProjectModels(index, applicationProjectModel);
return index;
}
|
java
|
{
"resource": ""
}
|
q3244
|
CreateApplicationReportIndexRuleProvider.addAllProjectModels
|
train
|
private void addAllProjectModels(ApplicationReportIndexModel navIdx, ProjectModel projectModel)
{
navIdx.addProjectModel(projectModel);
for (ProjectModel childProject : projectModel.getChildProjects())
{
if (!Iterators.asSet(navIdx.getProjectModels()).contains(childProject))
addAllProjectModels(navIdx, childProject);
}
}
|
java
|
{
"resource": ""
}
|
q3245
|
ProgressEstimate.getTimeRemainingInMillis
|
train
|
public long getTimeRemainingInMillis()
{
long batchTime = System.currentTimeMillis() - startTime;
double timePerIteration = (double) batchTime / (double) worked.get();
return (long) (timePerIteration * (total - worked.get()));
}
|
java
|
{
"resource": ""
}
|
q3246
|
ArchiveService.getChildFile
|
train
|
public FileModel getChildFile(ArchiveModel archiveModel, String filePath)
{
filePath = FilenameUtils.separatorsToUnix(filePath);
StringTokenizer stk = new StringTokenizer(filePath, "/");
FileModel currentFileModel = archiveModel;
while (stk.hasMoreTokens() && currentFileModel != null)
{
String pathElement = stk.nextToken();
currentFileModel = findFileModel(currentFileModel, pathElement);
}
return currentFileModel;
}
|
java
|
{
"resource": ""
}
|
q3247
|
RuleProviderSorter.sort
|
train
|
private void sort()
{
DefaultDirectedWeightedGraph<RuleProvider, DefaultEdge> graph = new DefaultDirectedWeightedGraph<>(
DefaultEdge.class);
for (RuleProvider provider : providers)
{
graph.addVertex(provider);
}
addProviderRelationships(graph);
checkForCycles(graph);
List<RuleProvider> result = new ArrayList<>(this.providers.size());
TopologicalOrderIterator<RuleProvider, DefaultEdge> iterator = new TopologicalOrderIterator<>(graph);
while (iterator.hasNext())
{
RuleProvider provider = iterator.next();
result.add(provider);
}
this.providers = Collections.unmodifiableList(result);
int index = 0;
for (RuleProvider provider : this.providers)
{
if (provider instanceof AbstractRuleProvider)
((AbstractRuleProvider) provider).setExecutionIndex(index++);
}
}
|
java
|
{
"resource": ""
}
|
q3248
|
RuleProviderSorter.checkForCycles
|
train
|
private void checkForCycles(DefaultDirectedWeightedGraph<RuleProvider, DefaultEdge> graph)
{
CycleDetector<RuleProvider, DefaultEdge> cycleDetector = new CycleDetector<>(graph);
if (cycleDetector.detectCycles())
{
// if we have cycles, then try to throw an exception with some usable data
Set<RuleProvider> cycles = cycleDetector.findCycles();
StringBuilder errorSB = new StringBuilder();
for (RuleProvider cycle : cycles)
{
errorSB.append("Found dependency cycle involving: " + cycle.getMetadata().getID()).append(System.lineSeparator());
Set<RuleProvider> subCycleSet = cycleDetector.findCyclesContainingVertex(cycle);
for (RuleProvider subCycle : subCycleSet)
{
errorSB.append("\tSubcycle: " + subCycle.getMetadata().getID()).append(System.lineSeparator());
}
}
throw new RuntimeException("Dependency cycles detected: " + errorSB.toString());
}
}
|
java
|
{
"resource": ""
}
|
q3249
|
RecurseDirectoryAndAddFiles.recurseAndAddFiles
|
train
|
private void recurseAndAddFiles(GraphRewrite event, FileService fileService, WindupJavaConfigurationService javaConfigurationService, FileModel file)
{
if (javaConfigurationService.checkIfIgnored(event, file))
return;
String filePath = file.getFilePath();
File fileReference = new File(filePath);
Long directorySize = new Long(0);
if (fileReference.isDirectory())
{
File[] subFiles = fileReference.listFiles();
if (subFiles != null)
{
for (File reference : subFiles)
{
FileModel subFile = fileService.createByFilePath(file, reference.getAbsolutePath());
recurseAndAddFiles(event, fileService, javaConfigurationService, subFile);
if (subFile.isDirectory())
{
directorySize = directorySize + subFile.getDirectorySize();
}
else
{
directorySize = directorySize + subFile.getSize();
}
}
}
file.setDirectorySize(directorySize);
}
}
|
java
|
{
"resource": ""
}
|
q3250
|
Checks.checkFileOrDirectoryToBeRead
|
train
|
public static void checkFileOrDirectoryToBeRead(File fileOrDir, String fileDesc)
{
if (fileOrDir == null)
throw new IllegalArgumentException(fileDesc + " must not be null.");
if (!fileOrDir.exists())
throw new IllegalArgumentException(fileDesc + " does not exist: " + fileOrDir.getAbsolutePath());
if (!(fileOrDir.isDirectory() || fileOrDir.isFile()))
throw new IllegalArgumentException(fileDesc + " must be a file or a directory: " + fileOrDir.getPath());
if (fileOrDir.isDirectory())
{
if (fileOrDir.list().length == 0)
throw new IllegalArgumentException(fileDesc + " is an empty directory: " + fileOrDir.getPath());
}
}
|
java
|
{
"resource": ""
}
|
q3251
|
FurnaceClasspathScanner.scan
|
train
|
public List<URL> scan(Predicate<String> filter)
{
List<URL> discoveredURLs = new ArrayList<>(128);
// For each Forge addon...
for (Addon addon : furnace.getAddonRegistry().getAddons(AddonFilters.allStarted()))
{
List<String> filteredResourcePaths = filterAddonResources(addon, filter);
for (String filePath : filteredResourcePaths)
{
URL ruleFile = addon.getClassLoader().getResource(filePath);
if (ruleFile != null)
discoveredURLs.add(ruleFile);
}
}
return discoveredURLs;
}
|
java
|
{
"resource": ""
}
|
q3252
|
FurnaceClasspathScanner.scanClasses
|
train
|
public List<Class<?>> scanClasses(Predicate<String> filter)
{
List<Class<?>> discoveredClasses = new ArrayList<>(128);
// For each Forge addon...
for (Addon addon : furnace.getAddonRegistry().getAddons(AddonFilters.allStarted()))
{
List<String> discoveredFileNames = filterAddonResources(addon, filter);
// Then try to load the classes.
for (String discoveredFilename : discoveredFileNames)
{
String clsName = PathUtil.classFilePathToClassname(discoveredFilename);
try
{
Class<?> clazz = addon.getClassLoader().loadClass(clsName);
discoveredClasses.add(clazz);
}
catch (ClassNotFoundException ex)
{
LOG.log(Level.WARNING, "Failed to load class for name '" + clsName + "':\n" + ex.getMessage(), ex);
}
}
}
return discoveredClasses;
}
|
java
|
{
"resource": ""
}
|
q3253
|
FurnaceClasspathScanner.filterAddonResources
|
train
|
public List<String> filterAddonResources(Addon addon, Predicate<String> filter)
{
List<String> discoveredFileNames = new ArrayList<>();
List<File> addonResources = addon.getRepository().getAddonResources(addon.getId());
for (File addonFile : addonResources)
{
if (addonFile.isDirectory())
handleDirectory(filter, addonFile, discoveredFileNames);
else
handleArchiveByFile(filter, addonFile, discoveredFileNames);
}
return discoveredFileNames;
}
|
java
|
{
"resource": ""
}
|
q3254
|
FurnaceClasspathScanner.handleArchiveByFile
|
train
|
private void handleArchiveByFile(Predicate<String> filter, File archive, List<String> discoveredFiles)
{
try
{
try (ZipFile zip = new ZipFile(archive))
{
Enumeration<? extends ZipEntry> entries = zip.entries();
while (entries.hasMoreElements())
{
ZipEntry entry = entries.nextElement();
String name = entry.getName();
if (filter.accept(name))
discoveredFiles.add(name);
}
}
}
catch (IOException e)
{
throw new RuntimeException("Error handling file " + archive, e);
}
}
|
java
|
{
"resource": ""
}
|
q3255
|
FurnaceClasspathScanner.handleDirectory
|
train
|
private void handleDirectory(final Predicate<String> filter, final File rootDir, final List<String> discoveredFiles)
{
try
{
new DirectoryWalker<String>()
{
private Path startDir;
public void walk() throws IOException
{
this.startDir = rootDir.toPath();
this.walk(rootDir, discoveredFiles);
}
@Override
protected void handleFile(File file, int depth, Collection<String> discoveredFiles) throws IOException
{
String newPath = startDir.relativize(file.toPath()).toString();
if (filter.accept(newPath))
discoveredFiles.add(newPath);
}
}.walk();
}
catch (IOException ex)
{
LOG.log(Level.SEVERE, "Error reading Furnace addon directory", ex);
}
}
|
java
|
{
"resource": ""
}
|
q3256
|
Project.dependsOnArtifact
|
train
|
public static Project dependsOnArtifact(Artifact artifact)
{
Project project = new Project();
project.artifact = artifact;
return project;
}
|
java
|
{
"resource": ""
}
|
q3257
|
Util.getSingle
|
train
|
public static final <T> T getSingle( Iterable<T> it ) {
if( ! it.iterator().hasNext() )
return null;
final Iterator<T> iterator = it.iterator();
T o = iterator.next();
if(iterator.hasNext())
throw new IllegalStateException("Found multiple items in iterator over " + o.getClass().getName() );
return o;
}
|
java
|
{
"resource": ""
}
|
q3258
|
XSLTTransformation.perform
|
train
|
@Override
public void perform(GraphRewrite event, EvaluationContext context)
{
checkVariableName(event, context);
WindupVertexFrame payload = resolveVariable(event, getVariableName());
if (payload instanceof FileReferenceModel)
{
FileModel file = ((FileReferenceModel) payload).getFile();
perform(event, context, (XmlFileModel) file);
}
else
{
super.perform(event, context);
}
}
|
java
|
{
"resource": ""
}
|
q3259
|
ProjectFrom.dependsOnArtifact
|
train
|
public Project dependsOnArtifact(Artifact artifact)
{
Project project = new Project();
project.setArtifact(artifact);
project.setInputVariablesName(inputVarName);
return project;
}
|
java
|
{
"resource": ""
}
|
q3260
|
JmsDestinationService.getTypeFromClass
|
train
|
public static JmsDestinationType getTypeFromClass(String aClass)
{
if (StringUtils.equals(aClass, "javax.jms.Queue") || StringUtils.equals(aClass, "javax.jms.QueueConnectionFactory"))
{
return JmsDestinationType.QUEUE;
}
else if (StringUtils.equals(aClass, "javax.jms.Topic") || StringUtils.equals(aClass, "javax.jms.TopicConnectionFactory"))
{
return JmsDestinationType.TOPIC;
}
else
{
return null;
}
}
|
java
|
{
"resource": ""
}
|
q3261
|
AbstractIterationOperation.checkVariableName
|
train
|
protected void checkVariableName(GraphRewrite event, EvaluationContext context)
{
if (variableName == null)
{
setVariableName(Iteration.getPayloadVariableName(event, context));
}
}
|
java
|
{
"resource": ""
}
|
q3262
|
ASTProcessor.analyze
|
train
|
public static List<ClassReference> analyze(WildcardImportResolver importResolver, Set<String> libraryPaths, Set<String> sourcePaths,
Path sourceFile)
{
ASTParser parser = ASTParser.newParser(AST.JLS11);
parser.setEnvironment(libraryPaths.toArray(new String[libraryPaths.size()]), sourcePaths.toArray(new String[sourcePaths.size()]), null, true);
parser.setBindingsRecovery(false);
parser.setResolveBindings(true);
Map options = JavaCore.getOptions();
JavaCore.setComplianceOptions(JavaCore.VERSION_1_8, options);
parser.setCompilerOptions(options);
String fileName = sourceFile.getFileName().toString();
parser.setUnitName(fileName);
try
{
parser.setSource(FileUtils.readFileToString(sourceFile.toFile()).toCharArray());
}
catch (IOException e)
{
throw new ASTException("Failed to get source for file: " + sourceFile.toString() + " due to: " + e.getMessage(), e);
}
parser.setKind(ASTParser.K_COMPILATION_UNIT);
CompilationUnit cu = (CompilationUnit) parser.createAST(null);
ReferenceResolvingVisitor visitor = new ReferenceResolvingVisitor(importResolver, cu, sourceFile.toString());
cu.accept(visitor);
return visitor.getJavaClassReferences();
}
|
java
|
{
"resource": ""
}
|
q3263
|
GraphUtil.vertexAsString
|
train
|
public static final String vertexAsString(Vertex vertex, int depth, String withEdgesOfLabel)
{
StringBuilder sb = new StringBuilder();
vertexAsString(vertex, depth, withEdgesOfLabel, sb, 0, new HashSet<>());
return sb.toString();
}
|
java
|
{
"resource": ""
}
|
q3264
|
MavenizationService.normalizeDirName
|
train
|
private static String normalizeDirName(String name)
{
if(name == null)
return null;
return name.toLowerCase().replaceAll("[^a-zA-Z0-9]", "-");
}
|
java
|
{
"resource": ""
}
|
q3265
|
MavenizationService.guessPackaging
|
train
|
private static String guessPackaging(ProjectModel projectModel)
{
String projectType = projectModel.getProjectType();
if (projectType != null)
return projectType;
LOG.warning("WINDUP-983 getProjectType() returned null for: " + projectModel.getRootFileModel().getPrettyPath());
String suffix = StringUtils.substringAfterLast(projectModel.getRootFileModel().getFileName(), ".");
if ("jar war ear sar har ".contains(suffix+" ")){
projectModel.setProjectType(suffix); // FIXME: Remove when WINDUP-983 is fixed.
return suffix;
}
// Should we try something more? Used APIs? What if it's a source?
return "unknown";
}
|
java
|
{
"resource": ""
}
|
q3266
|
XmlUtil.xpathExists
|
train
|
public static boolean xpathExists(Node document, String xpathExpression, Map<String, String> namespaceMapping) throws XPathException,
MarshallingException
{
Boolean result = (Boolean) executeXPath(document, xpathExpression, namespaceMapping, XPathConstants.BOOLEAN);
return result != null && result;
}
|
java
|
{
"resource": ""
}
|
q3267
|
XmlUtil.executeXPath
|
train
|
public static Object executeXPath(Node document, String xpathExpression, Map<String, String> namespaceMapping, QName result)
throws XPathException, MarshallingException
{
NamespaceMapContext mapContext = new NamespaceMapContext(namespaceMapping);
try
{
XPathFactory xPathfactory = XPathFactory.newInstance();
XPath xpath = xPathfactory.newXPath();
xpath.setNamespaceContext(mapContext);
XPathExpression expr = xpath.compile(xpathExpression);
return executeXPath(document, expr, result);
}
catch (XPathExpressionException e)
{
throw new XPathException("Xpath(" + xpathExpression + ") cannot be compiled", e);
}
catch (Exception e)
{
throw new MarshallingException("Exception unmarshalling XML.", e);
}
}
|
java
|
{
"resource": ""
}
|
q3268
|
ModuleAnalysisHelper.deriveGroupIdFromPackages
|
train
|
String deriveGroupIdFromPackages(ProjectModel projectModel)
{
Map<Object, Long> pkgsMap = new HashMap<>();
Set<String> pkgs = new HashSet<>(1000);
GraphTraversal<Vertex, Vertex> pipeline = new GraphTraversalSource(graphContext.getGraph()).V(projectModel);
pkgsMap = pipeline.out(ProjectModel.PROJECT_MODEL_TO_FILE)
.has(WindupVertexFrame.TYPE_PROP, new P(new BiPredicate<String, String>() {
@Override
public boolean test(String o, String o2) {
return o.contains(o2);
}
},
GraphTypeManager.getTypeValue(JavaClassFileModel.class)))
.hasKey(JavaClassFileModel.PROPERTY_PACKAGE_NAME)
.groupCount()
.by(v -> upToThirdDot(graphContext, (Vertex)v)).toList().get(0);
Map.Entry<Object, Long> biggest = null;
for (Map.Entry<Object, Long> entry : pkgsMap.entrySet())
{
if (biggest == null || biggest.getValue() < entry.getValue())
biggest = entry;
}
// More than a half is of this package.
if (biggest != null && biggest.getValue() > pkgsMap.size() / 2)
return biggest.getKey().toString();
return null;
}
|
java
|
{
"resource": ""
}
|
q3269
|
JavaClass.at
|
train
|
@Override
public JavaClassBuilderAt at(TypeReferenceLocation... locations)
{
if (locations != null)
this.locations = Arrays.asList(locations);
return this;
}
|
java
|
{
"resource": ""
}
|
q3270
|
JavaClass.as
|
train
|
@Override
public ConditionBuilder as(String variable)
{
Assert.notNull(variable, "Variable name must not be null.");
this.setOutputVariablesName(variable);
return this;
}
|
java
|
{
"resource": ""
}
|
q3271
|
GraphCondition.setResults
|
train
|
protected void setResults(GraphRewrite event, String variable, Iterable<? extends WindupVertexFrame> results)
{
Variables variables = Variables.instance(event);
Iterable<? extends WindupVertexFrame> existingVariables = variables.findVariable(variable, 1);
if (existingVariables != null)
{
variables.setVariable(variable, Iterables.concat(existingVariables, results));
}
else
{
variables.setVariable(variable, results);
}
}
|
java
|
{
"resource": ""
}
|
q3272
|
RulePhaseFinder.loadPhases
|
train
|
private Map<String, Class<? extends RulePhase>> loadPhases()
{
Map<String, Class<? extends RulePhase>> phases;
phases = new HashMap<>();
Furnace furnace = FurnaceHolder.getFurnace();
for (RulePhase phase : furnace.getAddonRegistry().getServices(RulePhase.class))
{
@SuppressWarnings("unchecked")
Class<? extends RulePhase> unwrappedClass = (Class<? extends RulePhase>) Proxies.unwrap(phase).getClass();
String simpleName = unwrappedClass.getSimpleName();
phases.put(classNameToMapKey(simpleName), unwrappedClass);
}
return Collections.unmodifiableMap(phases);
}
|
java
|
{
"resource": ""
}
|
q3273
|
FileContent.allInput
|
train
|
private void allInput(List<FileModel> vertices, GraphRewrite event, ParameterStore store)
{
if (StringUtils.isBlank(getInputVariablesName()) && this.filenamePattern == null)
{
FileService fileModelService = new FileService(event.getGraphContext());
for (FileModel fileModel : fileModelService.findAll())
{
vertices.add(fileModel);
}
}
}
|
java
|
{
"resource": ""
}
|
q3274
|
GraphService.getById
|
train
|
@Override
public T getById(Object id)
{
return context.getFramed().getFramedVertex(this.type, id);
}
|
java
|
{
"resource": ""
}
|
q3275
|
GraphService.addTypeToModel
|
train
|
public static <T extends WindupVertexFrame> T addTypeToModel(GraphContext graphContext, WindupVertexFrame frame, Class<T> type)
{
Vertex vertex = frame.getElement();
graphContext.getGraphTypeManager().addTypeToElement(type, vertex);
return graphContext.getFramed().frameElement(vertex, type);
}
|
java
|
{
"resource": ""
}
|
q3276
|
GraphService.removeTypeFromModel
|
train
|
public static <T extends WindupVertexFrame> WindupVertexFrame removeTypeFromModel(GraphContext graphContext, WindupVertexFrame frame, Class<T> type)
{
Vertex vertex = frame.getElement();
graphContext.getGraphTypeManager().removeTypeFromElement(type, vertex);
return graphContext.getFramed().frameElement(vertex, WindupVertexFrame.class);
}
|
java
|
{
"resource": ""
}
|
q3277
|
ClassificationServiceCache.getCache
|
train
|
@SuppressWarnings("unchecked")
private static synchronized Map<String, Boolean> getCache(GraphRewrite event)
{
Map<String, Boolean> result = (Map<String, Boolean>)event.getRewriteContext().get(ClassificationServiceCache.class);
if (result == null)
{
result = Collections.synchronizedMap(new LRUMap(30000));
event.getRewriteContext().put(ClassificationServiceCache.class, result);
}
return result;
}
|
java
|
{
"resource": ""
}
|
q3278
|
EffortReportService.getEffortLevelDescription
|
train
|
public static String getEffortLevelDescription(Verbosity verbosity, int points)
{
EffortLevel level = EffortLevel.forPoints(points);
switch (verbosity)
{
case ID:
return level.name();
case VERBOSE:
return level.getVerboseDescription();
case SHORT:
default:
return level.getShortDescription();
}
}
|
java
|
{
"resource": ""
}
|
q3279
|
WindupConfiguration.setOptionValue
|
train
|
public WindupConfiguration setOptionValue(String name, Object value)
{
configurationOptions.put(name, value);
return this;
}
|
java
|
{
"resource": ""
}
|
q3280
|
WindupConfiguration.getOptionValue
|
train
|
@SuppressWarnings("unchecked")
public <T> T getOptionValue(String name)
{
return (T) configurationOptions.get(name);
}
|
java
|
{
"resource": ""
}
|
q3281
|
RuleSubset.logTimeTakenByRuleProvider
|
train
|
private void logTimeTakenByRuleProvider(GraphContext graphContext, Context context, int ruleIndex, int timeTaken)
{
AbstractRuleProvider ruleProvider = (AbstractRuleProvider) context.get(RuleMetadataType.RULE_PROVIDER);
if (ruleProvider == null)
return;
if (!timeTakenByProvider.containsKey(ruleProvider))
{
RuleProviderExecutionStatisticsModel model = new RuleProviderExecutionStatisticsService(graphContext)
.create();
model.setRuleIndex(ruleIndex);
model.setRuleProviderID(ruleProvider.getMetadata().getID());
model.setTimeTaken(timeTaken);
timeTakenByProvider.put(ruleProvider, model.getElement().id());
}
else
{
RuleProviderExecutionStatisticsService service = new RuleProviderExecutionStatisticsService(graphContext);
RuleProviderExecutionStatisticsModel model = service.getById(timeTakenByProvider.get(ruleProvider));
int prevTimeTaken = model.getTimeTaken();
model.setTimeTaken(prevTimeTaken + timeTaken);
}
logTimeTakenByPhase(graphContext, ruleProvider.getMetadata().getPhase(), timeTaken);
}
|
java
|
{
"resource": ""
}
|
q3282
|
RuleSubset.logTimeTakenByPhase
|
train
|
private void logTimeTakenByPhase(GraphContext graphContext, Class<? extends RulePhase> phase, int timeTaken)
{
if (!timeTakenByPhase.containsKey(phase))
{
RulePhaseExecutionStatisticsModel model = new GraphService<>(graphContext,
RulePhaseExecutionStatisticsModel.class).create();
model.setRulePhase(phase.toString());
model.setTimeTaken(timeTaken);
model.setOrderExecuted(timeTakenByPhase.size());
timeTakenByPhase.put(phase, model.getElement().id());
}
else
{
GraphService<RulePhaseExecutionStatisticsModel> service = new GraphService<>(graphContext,
RulePhaseExecutionStatisticsModel.class);
RulePhaseExecutionStatisticsModel model = service.getById(timeTakenByPhase.get(phase));
int prevTimeTaken = model.getTimeTaken();
model.setTimeTaken(prevTimeTaken + timeTaken);
}
}
|
java
|
{
"resource": ""
}
|
q3283
|
XmlFileXPathTransformer.transformXPath
|
train
|
public static String transformXPath(String originalXPath)
{
// use a list to maintain the multiple joined xqueries (if there are multiple queries joined with the "|" operator)
List<StringBuilder> compiledXPaths = new ArrayList<>(1);
int frameIdx = -1;
boolean inQuote = false;
int conditionLevel = 0;
char startQuoteChar = 0;
StringBuilder currentXPath = new StringBuilder();
compiledXPaths.add(currentXPath);
for (int i = 0; i < originalXPath.length(); i++)
{
char curChar = originalXPath.charAt(i);
if (!inQuote && curChar == '[')
{
frameIdx++;
conditionLevel++;
currentXPath.append("[windup:startFrame(").append(frameIdx).append(") and windup:evaluate(").append(frameIdx).append(", ");
}
else if (!inQuote && curChar == ']')
{
conditionLevel--;
currentXPath.append(")]");
}
else if (!inQuote && conditionLevel == 0 && curChar == '|')
{
// joining multiple xqueries
currentXPath = new StringBuilder();
compiledXPaths.add(currentXPath);
}
else
{
if (inQuote && curChar == startQuoteChar)
{
inQuote = false;
startQuoteChar = 0;
}
else if (curChar == '"' || curChar == '\'')
{
inQuote = true;
startQuoteChar = curChar;
}
if (!inQuote && originalXPath.startsWith(WINDUP_MATCHES_FUNCTION_PREFIX, i))
{
i += (WINDUP_MATCHES_FUNCTION_PREFIX.length() - 1);
currentXPath.append("windup:matches(").append(frameIdx).append(", ");
}
else
{
currentXPath.append(curChar);
}
}
}
Pattern leadingAndTrailingWhitespace = Pattern.compile("(\\s*)(.*?)(\\s*)");
StringBuilder finalResult = new StringBuilder();
for (StringBuilder compiledXPath : compiledXPaths)
{
if (StringUtils.isNotBlank(compiledXPath))
{
Matcher whitespaceMatcher = leadingAndTrailingWhitespace.matcher(compiledXPath);
if (!whitespaceMatcher.matches())
continue;
compiledXPath = new StringBuilder();
compiledXPath.append(whitespaceMatcher.group(1));
compiledXPath.append(whitespaceMatcher.group(2));
compiledXPath.append("/self::node()[windup:persist(").append(frameIdx).append(", ").append(".)]");
compiledXPath.append(whitespaceMatcher.group(3));
if (StringUtils.isNotBlank(finalResult))
finalResult.append("|");
finalResult.append(compiledXPath);
}
}
return finalResult.toString();
}
|
java
|
{
"resource": ""
}
|
q3284
|
Compiler.accept
|
train
|
@Override
public void accept(IBinaryType binaryType, PackageBinding packageBinding, AccessRestriction accessRestriction) {
if (this.options.verbose) {
this.out.println(
Messages.bind(Messages.compilation_loadBinary, new String(binaryType.getName())));
// new Exception("TRACE BINARY").printStackTrace(System.out);
// System.out.println();
}
LookupEnvironment env = packageBinding.environment;
env.createBinaryTypeFrom(binaryType, packageBinding, accessRestriction);
}
|
java
|
{
"resource": ""
}
|
q3285
|
Compiler.accept
|
train
|
@Override
public void accept(ICompilationUnit sourceUnit, AccessRestriction accessRestriction) {
// Switch the current policy and compilation result for this unit to the requested one.
CompilationResult unitResult =
new CompilationResult(sourceUnit, this.totalUnits, this.totalUnits, this.options.maxProblemsPerUnit);
unitResult.checkSecondaryTypes = true;
try {
if (this.options.verbose) {
String count = String.valueOf(this.totalUnits + 1);
this.out.println(
Messages.bind(Messages.compilation_request,
new String[] {
count,
count,
new String(sourceUnit.getFileName())
}));
}
// diet parsing for large collection of unit
CompilationUnitDeclaration parsedUnit;
if (this.totalUnits < this.parseThreshold) {
parsedUnit = this.parser.parse(sourceUnit, unitResult);
} else {
parsedUnit = this.parser.dietParse(sourceUnit, unitResult);
}
// initial type binding creation
this.lookupEnvironment.buildTypeBindings(parsedUnit, accessRestriction);
addCompilationUnit(sourceUnit, parsedUnit);
// binding resolution
this.lookupEnvironment.completeTypeBindings(parsedUnit);
} catch (AbortCompilationUnit e) {
// at this point, currentCompilationUnitResult may not be sourceUnit, but some other
// one requested further along to resolve sourceUnit.
if (unitResult.compilationUnit == sourceUnit) { // only report once
this.requestor.acceptResult(unitResult.tagAsAccepted());
} else {
throw e; // want to abort enclosing request to compile
}
}
}
|
java
|
{
"resource": ""
}
|
q3286
|
Compiler.accept
|
train
|
@Override
public void accept(ISourceType[] sourceTypes, PackageBinding packageBinding, AccessRestriction accessRestriction) {
this.problemReporter.abortDueToInternalError(
Messages.bind(Messages.abort_againstSourceModel, new String[] { String.valueOf(sourceTypes[0].getName()), String.valueOf(sourceTypes[0].getFileName()) }));
}
|
java
|
{
"resource": ""
}
|
q3287
|
Compiler.reportProgress
|
train
|
protected void reportProgress(String taskDecription) {
if (this.progress != null) {
if (this.progress.isCanceled()) {
// Only AbortCompilation can stop the compiler cleanly.
// We check cancellation again following the call to compile.
throw new AbortCompilation(true, null);
}
this.progress.setTaskName(taskDecription);
}
}
|
java
|
{
"resource": ""
}
|
q3288
|
Compiler.reportWorked
|
train
|
protected void reportWorked(int workIncrement, int currentUnitIndex) {
if (this.progress != null) {
if (this.progress.isCanceled()) {
// Only AbortCompilation can stop the compiler cleanly.
// We check cancellation again following the call to compile.
throw new AbortCompilation(true, null);
}
this.progress.worked(workIncrement, (this.totalUnits* this.remainingIterations) - currentUnitIndex - 1);
}
}
|
java
|
{
"resource": ""
}
|
q3289
|
Compiler.compile
|
train
|
private void compile(ICompilationUnit[] sourceUnits, boolean lastRound) {
this.stats.startTime = System.currentTimeMillis();
try {
// build and record parsed units
reportProgress(Messages.compilation_beginningToCompile);
if (this.options.complianceLevel >= ClassFileConstants.JDK9) {
// in Java 9 the compiler must never ask the oracle for a module that is contained in the input units:
sortModuleDeclarationsFirst(sourceUnits);
}
if (this.annotationProcessorManager == null) {
beginToCompile(sourceUnits);
} else {
ICompilationUnit[] originalUnits = sourceUnits.clone(); // remember source units in case a source type collision occurs
try {
beginToCompile(sourceUnits);
if (!lastRound) {
processAnnotations();
}
if (!this.options.generateClassFiles) {
// -proc:only was set on the command line
return;
}
} catch (SourceTypeCollisionException e) {
backupAptProblems();
reset();
// a generated type was referenced before it was created
// the compiler either created a MissingType or found a BinaryType for it
// so add the processor's generated files & start over,
// but remember to only pass the generated files to the annotation processor
int originalLength = originalUnits.length;
int newProcessedLength = e.newAnnotationProcessorUnits.length;
ICompilationUnit[] combinedUnits = new ICompilationUnit[originalLength + newProcessedLength];
System.arraycopy(originalUnits, 0, combinedUnits, 0, originalLength);
System.arraycopy(e.newAnnotationProcessorUnits, 0, combinedUnits, originalLength, newProcessedLength);
this.annotationProcessorStartIndex = originalLength;
compile(combinedUnits, e.isLastRound);
return;
}
}
// Restore the problems before the results are processed and cleaned up.
restoreAptProblems();
processCompiledUnits(0, lastRound);
} catch (AbortCompilation e) {
this.handleInternalException(e, null);
}
if (this.options.verbose) {
if (this.totalUnits > 1) {
this.out.println(
Messages.bind(Messages.compilation_units, String.valueOf(this.totalUnits)));
} else {
this.out.println(
Messages.bind(Messages.compilation_unit, String.valueOf(this.totalUnits)));
}
}
}
|
java
|
{
"resource": ""
}
|
q3290
|
Compiler.process
|
train
|
public void process(CompilationUnitDeclaration unit, int i) {
this.lookupEnvironment.unitBeingCompleted = unit;
long parseStart = System.currentTimeMillis();
this.parser.getMethodBodies(unit);
long resolveStart = System.currentTimeMillis();
this.stats.parseTime += resolveStart - parseStart;
// fault in fields & methods
if (unit.scope != null)
unit.scope.faultInTypes();
// verify inherited methods
if (unit.scope != null)
unit.scope.verifyMethods(this.lookupEnvironment.methodVerifier());
// type checking
unit.resolve();
long analyzeStart = System.currentTimeMillis();
this.stats.resolveTime += analyzeStart - resolveStart;
//No need of analysis or generation of code if statements are not required
if (!this.options.ignoreMethodBodies) unit.analyseCode(); // flow analysis
long generateStart = System.currentTimeMillis();
this.stats.analyzeTime += generateStart - analyzeStart;
if (!this.options.ignoreMethodBodies) unit.generateCode(); // code generation
// reference info
if (this.options.produceReferenceInfo && unit.scope != null)
unit.scope.storeDependencyInfo();
// finalize problems (suppressWarnings)
unit.finalizeProblems();
this.stats.generateTime += System.currentTimeMillis() - generateStart;
// refresh the total number of units known at this stage
unit.compilationResult.totalUnitsKnown = this.totalUnits;
this.lookupEnvironment.unitBeingCompleted = null;
}
|
java
|
{
"resource": ""
}
|
q3291
|
TagServiceHolder.loadTagDefinitions
|
train
|
@PostConstruct
public void loadTagDefinitions()
{
Map<Addon, List<URL>> addonToResourcesMap = scanner.scanForAddonMap(new FileExtensionFilter("tags.xml"));
for (Map.Entry<Addon, List<URL>> entry : addonToResourcesMap.entrySet())
{
for (URL resource : entry.getValue())
{
log.info("Reading tags definitions from: " + resource.toString() + " from addon " + entry.getKey().getId());
try(InputStream is = resource.openStream())
{
tagService.readTags(is);
}
catch( Exception ex )
{
throw new WindupException("Failed reading tags definition: " + resource.toString() + " from addon " + entry.getKey().getId() + ":\n" + ex.getMessage(), ex);
}
}
}
}
|
java
|
{
"resource": ""
}
|
q3292
|
BatchASTProcessor.analyze
|
train
|
public static BatchASTFuture analyze(final BatchASTListener listener, final WildcardImportResolver importResolver,
final Set<String> libraryPaths,
final Set<String> sourcePaths, Set<Path> sourceFiles)
{
final String[] encodings = null;
final String[] bindingKeys = new String[0];
final ExecutorService executor = WindupExecutors.newFixedThreadPool(WindupExecutors.getDefaultThreadCount());
final FileASTRequestor requestor = new FileASTRequestor()
{
@Override
public void acceptAST(String sourcePath, CompilationUnit ast)
{
try
{
/*
* This super() call doesn't do anything, but we call it just to be nice, in case that ever changes.
*/
super.acceptAST(sourcePath, ast);
ReferenceResolvingVisitor visitor = new ReferenceResolvingVisitor(importResolver, ast, sourcePath);
ast.accept(visitor);
listener.processed(Paths.get(sourcePath), visitor.getJavaClassReferences());
}
catch (WindupStopException ex)
{
throw ex;
}
catch (Throwable t)
{
listener.failed(Paths.get(sourcePath), t);
}
}
};
List<List<String>> batches = createBatches(sourceFiles);
for (final List<String> batch : batches)
{
executor.submit(new Callable<Void>()
{
@Override
public Void call() throws Exception
{
ASTParser parser = ASTParser.newParser(AST.JLS8);
parser.setBindingsRecovery(false);
parser.setResolveBindings(true);
Map<String, String> options = JavaCore.getOptions();
JavaCore.setComplianceOptions(JavaCore.VERSION_1_8, options);
// these options seem to slightly reduce the number of times that JDT aborts on compilation errors
options.put(JavaCore.CORE_INCOMPLETE_CLASSPATH, "warning");
options.put(JavaCore.COMPILER_PB_ENUM_IDENTIFIER, "warning");
options.put(JavaCore.COMPILER_PB_FORBIDDEN_REFERENCE, "warning");
options.put(JavaCore.CORE_CIRCULAR_CLASSPATH, "warning");
options.put(JavaCore.COMPILER_PB_ASSERT_IDENTIFIER, "warning");
options.put(JavaCore.COMPILER_PB_NULL_SPECIFICATION_VIOLATION, "warning");
options.put(JavaCore.CORE_JAVA_BUILD_INVALID_CLASSPATH, "ignore");
options.put(JavaCore.COMPILER_PB_NULL_ANNOTATION_INFERENCE_CONFLICT, "warning");
options.put(JavaCore.CORE_OUTPUT_LOCATION_OVERLAPPING_ANOTHER_SOURCE, "warning");
options.put(JavaCore.CORE_JAVA_BUILD_DUPLICATE_RESOURCE, "warning");
parser.setCompilerOptions(options);
parser.setEnvironment(libraryPaths.toArray(new String[libraryPaths.size()]),
sourcePaths.toArray(new String[sourcePaths.size()]),
null,
true);
parser.createASTs(batch.toArray(new String[batch.size()]), encodings, bindingKeys, requestor, null);
return null;
}
});
}
executor.shutdown();
return new BatchASTFuture()
{
@Override
public boolean isDone()
{
return executor.isTerminated();
}
};
}
|
java
|
{
"resource": ""
}
|
q3293
|
AnnotationTypeCondition.addCondition
|
train
|
public AnnotationTypeCondition addCondition(String element, AnnotationCondition condition)
{
this.conditions.put(element, condition);
return this;
}
|
java
|
{
"resource": ""
}
|
q3294
|
ProcyonDecompiler.decompileClassFile
|
train
|
@Override
public DecompilationResult decompileClassFile(Path rootDir, Path classFilePath, Path outputDir)
throws DecompilationException
{
Checks.checkDirectoryToBeRead(rootDir.toFile(), "Classes root dir");
File classFile = classFilePath.toFile();
Checks.checkFileToBeRead(classFile, "Class file");
Checks.checkDirectoryToBeFilled(outputDir.toFile(), "Output directory");
log.info("Decompiling .class '" + classFilePath + "' to '" + outputDir + "' from: '" + rootDir + "'");
String name = classFilePath.normalize().toAbsolutePath().toString().substring(rootDir.toAbsolutePath().toString().length() + 1);
final String typeName = StringUtils.removeEnd(name, ".class");// .replace('/', '.');
DecompilationResult result = new DecompilationResult();
try
{
DecompilerSettings settings = getDefaultSettings(outputDir.toFile());
this.procyonConf.setDecompilerSettings(settings); // TODO: This is horrible mess.
final ITypeLoader typeLoader = new CompositeTypeLoader(new WindupClasspathTypeLoader(rootDir.toString()), new ClasspathTypeLoader());
WindupMetadataSystem metadataSystem = new WindupMetadataSystem(typeLoader);
File outputFile = this.decompileType(settings, metadataSystem, typeName);
result.addDecompiled(Collections.singletonList(classFilePath.toString()), outputFile.getAbsolutePath());
}
catch (Throwable e)
{
DecompilationFailure failure = new DecompilationFailure("Error during decompilation of "
+ classFilePath.toString() + ":\n " + e.getMessage(), Collections.singletonList(name), e);
log.severe(failure.getMessage());
result.addFailure(failure);
}
return result;
}
|
java
|
{
"resource": ""
}
|
q3295
|
ProcyonDecompiler.refreshMetadataCache
|
train
|
private void refreshMetadataCache(final Queue<WindupMetadataSystem> metadataSystemCache, final DecompilerSettings settings)
{
metadataSystemCache.clear();
for (int i = 0; i < this.getNumberOfThreads(); i++)
{
metadataSystemCache.add(new NoRetryMetadataSystem(settings.getTypeLoader()));
}
}
|
java
|
{
"resource": ""
}
|
q3296
|
ProcyonDecompiler.decompileType
|
train
|
private File decompileType(final DecompilerSettings settings, final WindupMetadataSystem metadataSystem, final String typeName) throws IOException
{
log.fine("Decompiling " + typeName);
final TypeReference type;
// Hack to get around classes whose descriptors clash with primitive types.
if (typeName.length() == 1)
{
final MetadataParser parser = new MetadataParser(IMetadataResolver.EMPTY);
final TypeReference reference = parser.parseTypeDescriptor(typeName);
type = metadataSystem.resolve(reference);
}
else
type = metadataSystem.lookupType(typeName);
if (type == null)
{
log.severe("Failed to load class: " + typeName);
return null;
}
final TypeDefinition resolvedType = type.resolve();
if (resolvedType == null)
{
log.severe("Failed to resolve type: " + typeName);
return null;
}
boolean nested = resolvedType.isNested() || resolvedType.isAnonymous() || resolvedType.isSynthetic();
if (!this.procyonConf.isIncludeNested() && nested)
return null;
settings.setJavaFormattingOptions(new JavaFormattingOptions());
final FileOutputWriter writer = createFileWriter(resolvedType, settings);
final PlainTextOutput output;
output = new PlainTextOutput(writer);
output.setUnicodeOutputEnabled(settings.isUnicodeOutputEnabled());
if (settings.getLanguage() instanceof BytecodeLanguage)
output.setIndentToken(" ");
DecompilationOptions options = new DecompilationOptions();
options.setSettings(settings); // I'm missing why these two classes are split.
// --------- DECOMPILE ---------
final TypeDecompilationResults results = settings.getLanguage().decompileType(resolvedType, output, options);
writer.flush();
writer.close();
// If we're writing to a file and we were asked to include line numbers in any way,
// then reformat the file to include that line number information.
final List<LineNumberPosition> lineNumberPositions = results.getLineNumberPositions();
if (!this.procyonConf.getLineNumberOptions().isEmpty())
{
final LineNumberFormatter lineFormatter = new LineNumberFormatter(writer.getFile(), lineNumberPositions,
this.procyonConf.getLineNumberOptions());
lineFormatter.reformatFile();
}
return writer.getFile();
}
|
java
|
{
"resource": ""
}
|
q3297
|
ProcyonDecompiler.getDefaultSettings
|
train
|
private DecompilerSettings getDefaultSettings(File outputDir)
{
DecompilerSettings settings = new DecompilerSettings();
procyonConf.setDecompilerSettings(settings);
settings.setOutputDirectory(outputDir.getPath());
settings.setShowSyntheticMembers(false);
settings.setForceExplicitImports(true);
if (settings.getTypeLoader() == null)
settings.setTypeLoader(new ClasspathTypeLoader());
return settings;
}
|
java
|
{
"resource": ""
}
|
q3298
|
ProcyonDecompiler.loadJar
|
train
|
private JarFile loadJar(File archive) throws DecompilationException
{
try
{
return new JarFile(archive);
}
catch (IOException ex)
{
throw new DecompilationException("Can't load .jar: " + archive.getPath(), ex);
}
}
|
java
|
{
"resource": ""
}
|
q3299
|
ProcyonDecompiler.createFileWriter
|
train
|
private static synchronized FileOutputWriter createFileWriter(final TypeDefinition type, final DecompilerSettings settings)
throws IOException
{
final String outputDirectory = settings.getOutputDirectory();
final String fileName = type.getName() + settings.getLanguage().getFileExtension();
final String packageName = type.getPackageName();
// foo.Bar -> foo/Bar.java
final String subDir = StringUtils.defaultIfEmpty(packageName, "").replace('.', File.separatorChar);
final String outputPath = PathHelper.combine(outputDirectory, subDir, fileName);
final File outputFile = new File(outputPath);
final File parentDir = outputFile.getParentFile();
if (parentDir != null && !parentDir.exists() && !parentDir.mkdirs())
{
throw new IllegalStateException("Could not create directory:" + parentDir);
}
if (!outputFile.exists() && !outputFile.createNewFile())
{
throw new IllegalStateException("Could not create output file: " + outputPath);
}
return new FileOutputWriter(outputFile, settings);
}
|
java
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.