lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
---|---|---|---|---|---|---|---|---|---|---|---|
Java | apache-2.0 | fee7f579d062f21ac1ab55fe3c7b40a098934f4e | 0 | ribasco/async-gamequery-lib,ribasco/async-gamequery-lib | /*
* MIT License
*
* Copyright (c) 2016 Asynchronous Game Query Library
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.ibasco.agql.protocols.valve.source.query.handlers;
import com.ibasco.agql.protocols.valve.source.query.SourceRconResponsePacket;
import com.ibasco.agql.protocols.valve.source.query.packets.response.SourceRconAuthResponsePacket;
import com.ibasco.agql.protocols.valve.source.query.packets.response.SourceRconCmdResponsePacket;
import com.ibasco.agql.protocols.valve.source.query.packets.response.SourceRconTermResponsePacket;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToMessageDecoder;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.LinkedList;
import java.util.List;
/**
* Class responsible for re-assembling split packet instances into one complete packet.
*
* @see <a href="https://developer.valvesoftware.com/wiki/Source_RCON_Protocol#Multiple-packet_Responses">Multiple-packet_Responses</a>
*/
public class SourceRconPacketAssembler extends MessageToMessageDecoder<SourceRconResponsePacket> {
private static final Logger log = LoggerFactory.getLogger(SourceRconPacketAssembler.class);
private LinkedList<SourceRconResponsePacket> packetContainer = new LinkedList<>();
@Override
protected void decode(ChannelHandlerContext ctx, SourceRconResponsePacket msg, List<Object> out) throws Exception {
if (msg instanceof SourceRconAuthResponsePacket) {
//automatically forward all auth responses to the next handler
log.debug("Forwarding authentication response to next handler");
out.add(msg);
} else {
//Should be a type of response (either a terminator or a valid response packet)
if (msg instanceof SourceRconTermResponsePacket) {
SourceRconResponsePacket reassembledPacket = null;
if (packetContainer.size() == 1) {
reassembledPacket = packetContainer.poll();
} else if (packetContainer.size() > 1) {
reassembledPacket = reassemblePackets();
}
if (reassembledPacket != null) {
log.debug("Re-assembly Complete! Sending to the next handler");
log.debug(" # Size: {}", reassembledPacket.getSize());
log.debug(" # Request Id: {}", reassembledPacket.getId());
log.debug(" # Type: {}", reassembledPacket.getType());
log.debug(" # Body Size: {}", reassembledPacket.getBody().length());
//Send to the next handler
out.add(reassembledPacket);
}
} else {
//Ignore empty responses
if (StringUtils.isBlank(msg.getBody())) {
log.debug("Ignoring empty response packet : {}", msg);
return;
}
log.debug("Adding response packet to the queue");
packetContainer.add(msg);
}
}
}
private SourceRconResponsePacket reassemblePackets() {
//We have reached the end...lets start assembling the packet
log.debug("Received a terminator packet! Re-assembling packets. Size = {}", packetContainer.size());
SourceRconCmdResponsePacket reassembledPacket = new SourceRconCmdResponsePacket();
StringBuilder responseBody = new StringBuilder();
int bodySize = 0;
int id = -1;
int type = -1;
for (int i = 0; packetContainer.size() > 0; i++) {
SourceRconResponsePacket responsePacket = packetContainer.poll();
if (responsePacket == null)
continue;
//Initialize Variables
if (id == -1)
id = responsePacket.getId();
if (type == -1)
type = responsePacket.getType();
//Compute total body size
bodySize += responsePacket.getBody().length();
log.debug(" ({}) Re-assembling Packet: {}", i + 1, responsePacket);
responseBody.append(responsePacket.getBody());
}
//Merge the details
reassembledPacket.setSize(8 + bodySize + 2); //id(4) + type(4) + body + body terminator (1) + packet terminator (1)
reassembledPacket.setId(id);
reassembledPacket.setType(type);
reassembledPacket.setBody(responseBody.toString());
return reassembledPacket;
}
}
| protocols/valve/source/query/src/main/java/com/ibasco/agql/protocols/valve/source/query/handlers/SourceRconPacketAssembler.java | /*
* MIT License
*
* Copyright (c) 2016 Asynchronous Game Query Library
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.ibasco.agql.protocols.valve.source.query.handlers;
import com.ibasco.agql.protocols.valve.source.query.SourceRconResponsePacket;
import com.ibasco.agql.protocols.valve.source.query.packets.response.SourceRconAuthResponsePacket;
import com.ibasco.agql.protocols.valve.source.query.packets.response.SourceRconCmdResponsePacket;
import com.ibasco.agql.protocols.valve.source.query.packets.response.SourceRconTermResponsePacket;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToMessageDecoder;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.LinkedList;
import java.util.List;
/**
* Class responsible for re-assembling split packet instances into one complete packet.
*
* @see <a href="https://developer.valvesoftware.com/wiki/Source_RCON_Protocol#Multiple-packet_Responses">Multiple-packet_Responses</a>
*/
public class SourceRconPacketAssembler extends MessageToMessageDecoder<SourceRconResponsePacket> {
private static final Logger log = LoggerFactory.getLogger(SourceRconPacketAssembler.class);
private LinkedList<SourceRconResponsePacket> packetContainer = new LinkedList<>();
@Override
protected void decode(ChannelHandlerContext ctx, SourceRconResponsePacket msg, List<Object> out) throws Exception {
if (msg instanceof SourceRconAuthResponsePacket) {
//automatically forward all auth responses to the next handler
log.debug("Forwarding authentication response to next handler");
out.add(msg);
} else {
//Should be a type of response (either a terminator or a valid response packet)
if (msg instanceof SourceRconTermResponsePacket) {
SourceRconResponsePacket reassembledPacket = null;
if (packetContainer.size() == 1) {
reassembledPacket = packetContainer.poll();
} else if (packetContainer.size() > 1) {
reassembledPacket = reassemblePackets();
}
if (reassembledPacket != null) {
log.debug("Re-assembly Complete! Sending to the next handler");
log.debug(" # Size: {}", reassembledPacket.getSize());
log.debug(" # Request Id: {}", reassembledPacket.getId());
log.debug(" # Type: {}", reassembledPacket.getType());
log.debug(" # Body Size: {}", reassembledPacket.getBody().length());
//Send to the next handler
out.add(reassembledPacket);
}
} else {
//Ignore empty responses
if (StringUtils.isBlank(msg.getBody())) {
log.debug("Ignoring empty response packet : {}", msg);
return;
}
log.debug("Adding response packet to the queue");
packetContainer.add(msg);
}
}
}
private SourceRconResponsePacket reassemblePackets() {
//We have reached the end...lets start assembling the packet
log.debug("Received a terminator packet! Re-assembling packets. Size = {}", packetContainer.size());
int totalPackets = packetContainer.size();
SourceRconCmdResponsePacket reassembledPacket = new SourceRconCmdResponsePacket();
StringBuilder responseBody = new StringBuilder();
int bodySize = 0;
int id = -1;
int type = -1;
for (int i = 0; i < totalPackets; i++) {
SourceRconResponsePacket responsePacket = packetContainer.poll();
if (responsePacket == null)
continue;
//Initialize Variables
if (id == -1) {
id = responsePacket.getId();
}
if (type == -1) {
type = responsePacket.getType();
}
//Compute total body size
bodySize += responsePacket.getBody().length();
log.debug(" ({}) Re-assembling Packet: {}", i + 1, responsePacket);
responseBody.append(responsePacket.getBody());
}
//Merge the details
reassembledPacket.setSize(8 + bodySize + 2); //id(4) + type(4) + body + body terminator (1) + packet terminator (1)
reassembledPacket.setId(id);
reassembledPacket.setType(type);
reassembledPacket.setBody(responseBody.toString());
return reassembledPacket;
}
}
| Loop based on list size
| protocols/valve/source/query/src/main/java/com/ibasco/agql/protocols/valve/source/query/handlers/SourceRconPacketAssembler.java | Loop based on list size |
|
Java | apache-2.0 | d56de931d9104dc7f69d988c01e509f422ee92d0 | 0 | dadoonet/fsriver,dadoonet/fsriver,dadoonet/fsriver,dadoonet/fscrawler,dadoonet/fscrawler,dadoonet/fscrawler | /*
* Licensed to David Pilato (the "Author") under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Author licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package fr.pilato.elasticsearch.crawler.fs.client.v6;
import fr.pilato.elasticsearch.crawler.fs.client.ESBoolQuery;
import fr.pilato.elasticsearch.crawler.fs.client.ESDocumentField;
import fr.pilato.elasticsearch.crawler.fs.client.ESHighlightField;
import fr.pilato.elasticsearch.crawler.fs.client.ESMatchQuery;
import fr.pilato.elasticsearch.crawler.fs.client.ESPrefixQuery;
import fr.pilato.elasticsearch.crawler.fs.client.ESQuery;
import fr.pilato.elasticsearch.crawler.fs.client.ESRangeQuery;
import fr.pilato.elasticsearch.crawler.fs.client.ESSearchHit;
import fr.pilato.elasticsearch.crawler.fs.client.ESSearchRequest;
import fr.pilato.elasticsearch.crawler.fs.client.ESSearchResponse;
import fr.pilato.elasticsearch.crawler.fs.client.ESTermQuery;
import fr.pilato.elasticsearch.crawler.fs.client.ESTermsAggregation;
import fr.pilato.elasticsearch.crawler.fs.client.ESVersion;
import fr.pilato.elasticsearch.crawler.fs.client.ElasticsearchClient;
import fr.pilato.elasticsearch.crawler.fs.framework.JsonUtil;
import fr.pilato.elasticsearch.crawler.fs.settings.Elasticsearch;
import fr.pilato.elasticsearch.crawler.fs.settings.FsSettings;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
// TODO replace with import org.elasticsearch.client.indices.CreateIndexRequest;
// When https://github.com/elastic/elasticsearch/issues/40897 is fixed
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.ingest.GetPipelineRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;
import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.INDEX_SETTINGS_FILE;
import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.INDEX_SETTINGS_FOLDER_FILE;
import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.isNullOrEmpty;
import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.readJsonFile;
import static org.elasticsearch.action.support.IndicesOptions.LENIENT_EXPAND_OPEN;
/**
* Elasticsearch Client for Clusters running v6.
*/
public class ElasticsearchClientV6 implements ElasticsearchClient {
private static final Logger logger = LogManager.getLogger(ElasticsearchClientV6.class);
private final Path config;
private final FsSettings settings;
private RestHighLevelClient client = null;
private BulkProcessor bulkProcessor = null;
/**
* Type name for Elasticsearch versions >= 6.0
* @deprecated Will be removed with Elasticsearch V8
*/
@Deprecated
private static final String INDEX_TYPE_DOC = "_doc";
public ElasticsearchClientV6(Path config, FsSettings settings) {
this.config = config;
this.settings = settings;
}
@Override
public byte compatibleVersion() {
return 6;
}
@Override
public void start() throws IOException {
if (client != null) {
// The client has already been initialized. Let's skip this again
return;
}
try {
// Create an elasticsearch client
client = new RestHighLevelClient(buildRestClient(settings.getElasticsearch()));
checkVersion();
logger.info("Elasticsearch Client for version {}.x connected to a node running version {}", compatibleVersion(), getVersion());
} catch (Exception e) {
logger.warn("failed to create elasticsearch client, disabling crawler...");
throw e;
}
if (settings.getElasticsearch().getPipeline() != null) {
// Check that the pipeline exists
if (!isExistingPipeline(settings.getElasticsearch().getPipeline())) {
throw new RuntimeException("You defined pipeline:" + settings.getElasticsearch().getPipeline() +
", but it does not exist.");
}
}
BiConsumer<BulkRequest, ActionListener<BulkResponse>> bulkConsumer =
(request, bulkListener) -> client.bulkAsync(request, RequestOptions.DEFAULT, bulkListener);
bulkProcessor = BulkProcessor.builder(bulkConsumer, new DebugListener(logger))
.setBulkActions(settings.getElasticsearch().getBulkSize())
.setFlushInterval(TimeValue.timeValueMillis(settings.getElasticsearch().getFlushInterval().millis()))
.setBulkSize(new ByteSizeValue(settings.getElasticsearch().getByteSize().getBytes()))
.build();
}
@Override
public ESVersion getVersion() throws IOException {
Version version = client.info(RequestOptions.DEFAULT).getVersion();
return ESVersion.fromString(version.toString());
}
/**
* For Elasticsearch 6, we need to make sure we are running at least Elasticsearch 6.4
* @throws IOException when something is wrong while asking the version of the node.
*/
@Override
public void checkVersion() throws IOException {
ESVersion esVersion = getVersion();
if (esVersion.major != compatibleVersion()) {
throw new RuntimeException("The Elasticsearch client version [" +
compatibleVersion() + "] is not compatible with the Elasticsearch cluster version [" +
esVersion.toString() + "].");
}
if (esVersion.minor < 4) {
throw new RuntimeException("This version of FSCrawler is not compatible with " +
"Elasticsearch version [" +
esVersion.toString() + "]. Please upgrade Elasticsearch to at least a 6.4.x version.");
}
}
class DebugListener implements BulkProcessor.Listener {
private final Logger logger;
DebugListener(Logger logger) {
this.logger = logger;
}
@Override public void beforeBulk(long executionId, BulkRequest request) {
logger.trace("Sending a bulk request of [{}] requests", request.numberOfActions());
}
@Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
logger.trace("Executed bulk request with [{}] requests", request.numberOfActions());
if (response.hasFailures()) {
final int[] failures = {0};
response.iterator().forEachRemaining(bir -> {
if (bir.isFailed()) {
failures[0]++;
logger.debug("Error caught for [{}]/[{}]/[{}]: {}", bir.getIndex(),
bir.getType(), bir.getId(), bir.getFailureMessage());
}
});
logger.warn("Got [{}] failures of [{}] requests", failures[0], request.numberOfActions());
}
}
@Override public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
logger.warn("Got a hard failure when executing the bulk request", failure);
}
}
/**
* Create an index
* @param index index name
* @param ignoreErrors don't fail if the index already exists
* @param indexSettings index settings if any
* @throws IOException In case of error
*/
public void createIndex(String index, boolean ignoreErrors, String indexSettings) throws IOException {
logger.debug("create index [{}]", index);
logger.trace("index settings: [{}]", indexSettings);
CreateIndexRequest cir = new CreateIndexRequest(index);
if (!isNullOrEmpty(indexSettings)) {
cir.source(indexSettings, XContentType.JSON);
}
try {
client.indices().create(cir, RequestOptions.DEFAULT);
} catch (ElasticsearchStatusException e) {
if (e.getMessage().contains("resource_already_exists_exception") && !ignoreErrors) {
throw new RuntimeException("index already exists");
}
if (!e.getMessage().contains("resource_already_exists_exception")) {
throw e;
}
}
waitForHealthyIndex(index);
}
/**
* Check if an index exists
* @param index index name
* @return true if the index exists, false otherwise
* @throws IOException In case of error
*/
public boolean isExistingIndex(String index) throws IOException {
logger.debug("is existing index [{}]", index);
GetIndexRequest gir = new GetIndexRequest();
gir.indices(index);
return client.indices().exists(gir, RequestOptions.DEFAULT);
}
/**
* Check if a pipeline exists
* @param pipelineName pipeline name
* @return true if the pipeline exists, false otherwise
* @throws IOException In case of error
*/
public boolean isExistingPipeline(String pipelineName) throws IOException {
logger.debug("is existing pipeline [{}]", pipelineName);
try {
return client.ingest().getPipeline(new GetPipelineRequest(pipelineName), RequestOptions.DEFAULT).isFound();
} catch (ElasticsearchStatusException e) {
if (e.status().getStatus() == 404) {
return false;
}
throw new IOException(e);
}
}
/**
* Refresh an index
* @param index index name
* @throws IOException In case of error
*/
public void refresh(String index) throws IOException {
logger.debug("refresh index [{}]", index);
RefreshRequest request = new RefreshRequest();
if (!isNullOrEmpty(index)) {
request.indices(index);
}
RefreshResponse refresh = client.indices().refresh(request, RequestOptions.DEFAULT);
logger.trace("refresh response: {}", refresh);
}
/**
* Wait for an index to become at least yellow (all primaries assigned)
* @param index index name
* @throws IOException In case of error
*/
public void waitForHealthyIndex(String index) throws IOException {
logger.debug("wait for yellow health on index [{}]", index);
ClusterHealthResponse health = client.cluster().health(new ClusterHealthRequest(index).waitForYellowStatus(),
RequestOptions.DEFAULT);
logger.trace("health response: {}", health);
}
/**
* Reindex data from one index/type to another index
* @param sourceIndex source index name
* @param sourceType source type name
* @param targetIndex target index name
* @return The number of documents that have been reindexed
* @throws IOException In case of error
*/
public int reindex(String sourceIndex, String sourceType, String targetIndex) throws IOException {
logger.debug("reindex [{}]/[{}] -> [{}]/[doc]", sourceIndex, sourceType, targetIndex);
String reindexQuery = "{ \"source\": {\n" +
" \"index\": \"" + sourceIndex + "\",\n" +
" \"type\": \"" + sourceType + "\"\n" +
" },\n" +
" \"dest\": {\n" +
" \"index\": \"" + targetIndex + "\",\n" +
" \"type\": \"doc\"\n" +
" }\n" +
"}\n";
logger.trace("{}", reindexQuery);
Request request = new Request("POST", "/_reindex");
request.setJsonEntity(reindexQuery);
Response restResponse = client.getLowLevelClient().performRequest(request);
Map<String, Object> response = asMap(restResponse);
logger.debug("reindex response: {}", response);
return (int) response.get("total");
}
/**
* Fully removes a type from an index (removes data)
* @param index index name
* @param type type
* @throws IOException In case of error
*/
public void deleteByQuery(String index, String type) throws IOException {
logger.debug("deleteByQuery [{}]/[{}]", index, type);
String deleteByQuery = "{\n" +
" \"query\": {\n" +
" \"match_all\": {}\n" +
" }\n" +
"}";
Request request = new Request("POST", "/" + index + "/" + type + "/_delete_by_query");
request.setJsonEntity(deleteByQuery);
Response restResponse = client.getLowLevelClient().performRequest(request);
Map<String, Object> response = asMap(restResponse);
logger.debug("reindex response: {}", response);
}
// Utility methods
public boolean isIngestSupported() {
return true;
}
public String getDefaultTypeName() {
return INDEX_TYPE_DOC;
}
@Override
public void index(String index, String id, String json, String pipeline) {
bulkProcessor.add(new IndexRequest(index, getDefaultTypeName(), id).setPipeline(pipeline).source(json, XContentType.JSON));
}
@Override
public void indexSingle(String index, String id, String json) throws IOException {
IndexRequest request = new IndexRequest(index, getDefaultTypeName(), id);
request.source(json, XContentType.JSON);
client.index(request, RequestOptions.DEFAULT);
}
@Override
public void delete(String index, String id) {
bulkProcessor.add(new DeleteRequest(index, getDefaultTypeName(), id));
}
@Override
public void close() throws IOException {
logger.debug("Closing Elasticsearch client manager");
if (bulkProcessor != null) {
try {
bulkProcessor.awaitClose(30, TimeUnit.SECONDS);
} catch (InterruptedException e) {
logger.warn("Did not succeed in closing the bulk processor for documents", e);
throw new IOException(e);
}
}
if (client != null) {
client.close();
}
}
private static RestClientBuilder buildRestClient(Elasticsearch settings) {
List<HttpHost> hosts = new ArrayList<>(settings.getNodes().size());
settings.getNodes().forEach(node -> hosts.add(HttpHost.create(node.decodedUrl())));
RestClientBuilder builder = RestClient.builder(hosts.toArray(new HttpHost[hosts.size()]));
if (settings.getUsername() != null) {
CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(settings.getUsername(), settings.getPassword()));
builder.setHttpClientConfigCallback(httpClientBuilder ->
httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
}
return builder;
}
public void createIndices() throws Exception {
String elasticsearchVersion;
Path jobMappingDir = config.resolve(settings.getName()).resolve("_mappings");
// Let's read the current version of elasticsearch cluster
Version version = client.info(RequestOptions.DEFAULT).getVersion();
logger.debug("FS crawler connected to an elasticsearch [{}] node.", version.toString());
elasticsearchVersion = Byte.toString(version.major);
// If needed, we create the new settings for this files index
if (!settings.getFs().isAddAsInnerObject() || (!settings.getFs().isJsonSupport() && !settings.getFs().isXmlSupport())) {
createIndex(jobMappingDir, elasticsearchVersion, INDEX_SETTINGS_FILE, settings.getElasticsearch().getIndex());
} else {
createIndex(settings.getElasticsearch().getIndex(), true, null);
}
// If needed, we create the new settings for this folder index
if (settings.getFs().isIndexFolders()) {
createIndex(jobMappingDir, elasticsearchVersion, INDEX_SETTINGS_FOLDER_FILE, settings.getElasticsearch().getIndexFolder());
} else {
createIndex(settings.getElasticsearch().getIndexFolder(), true, null);
}
}
@Override
public ESSearchResponse search(ESSearchRequest request) throws IOException {
SearchRequest searchRequest = new SearchRequest();
if (!isNullOrEmpty(request.getIndex())) {
searchRequest.indices(request.getIndex());
}
SearchSourceBuilder ssb = new SearchSourceBuilder();
if (request.getSize() != null) {
ssb.size(request.getSize());
}
if (!request.getFields().isEmpty()) {
ssb.storedFields(request.getFields());
}
if (request.getESQuery() != null) {
ssb.query(toElasticsearchQuery(request.getESQuery()));
}
if (!isNullOrEmpty(request.getSort())) {
ssb.sort(request.getSort());
}
for (String highlighter : request.getHighlighters()) {
ssb.highlighter(new HighlightBuilder().field(highlighter));
}
for (ESTermsAggregation aggregation : request.getAggregations()) {
ssb.aggregation(AggregationBuilders.terms(aggregation.getName()).field(aggregation.getField()));
}
searchRequest.source(ssb);
searchRequest.indicesOptions(LENIENT_EXPAND_OPEN);
SearchResponse response = client.search(searchRequest, RequestOptions.DEFAULT);
ESSearchResponse esSearchResponse = new ESSearchResponse();
if (response.getHits() != null) {
for (SearchHit hit : response.getHits()) {
ESSearchHit esSearchHit = new ESSearchHit();
if (!hit.getFields().isEmpty()) {
Map<String, ESDocumentField> esFields = new HashMap<>();
for (Map.Entry<String, DocumentField> entry : hit.getFields().entrySet()) {
esFields.put(entry.getKey(), new ESDocumentField(entry.getKey(), entry.getValue().getValues()));
}
esSearchHit.setFields(esFields);
}
esSearchHit.setIndex(hit.getIndex());
esSearchHit.setId(hit.getId());
esSearchHit.setSourceAsMap(hit.getSourceAsMap());
esSearchHit.setSourceAsString(hit.getSourceAsString());
hit.getHighlightFields().forEach((key, value) -> {
String[] texts = new String[value.fragments().length];
for (int i = 0; i < value.fragments().length; i++) {
Text fragment = value.fragments()[i];
texts[i] = fragment.string();
}
esSearchHit.addHighlightField(key, new ESHighlightField(key, texts));
});
esSearchResponse.addHit(esSearchHit);
}
esSearchResponse.setTotalHits(response.getHits().getTotalHits());
if (response.getAggregations() != null) {
for (String name : response.getAggregations().asMap().keySet()) {
Terms termsAgg = response.getAggregations().get(name);
ESTermsAggregation aggregation = new ESTermsAggregation(name, null);
for (Terms.Bucket bucket : termsAgg.getBuckets()) {
aggregation.addBucket(new ESTermsAggregation.ESTermsBucket(bucket.getKeyAsString(), bucket.getDocCount()));
}
esSearchResponse.addAggregation(name, aggregation);
}
}
}
return esSearchResponse;
}
private QueryBuilder toElasticsearchQuery(ESQuery query) {
if (query instanceof ESTermQuery) {
ESTermQuery esQuery = (ESTermQuery) query;
return QueryBuilders.termQuery(esQuery.getField(), esQuery.getValue());
}
if (query instanceof ESMatchQuery) {
ESMatchQuery esQuery = (ESMatchQuery) query;
return QueryBuilders.matchQuery(esQuery.getField(), esQuery.getValue());
}
if (query instanceof ESPrefixQuery) {
ESPrefixQuery esQuery = (ESPrefixQuery) query;
return QueryBuilders.prefixQuery(esQuery.getField(), esQuery.getValue());
}
if (query instanceof ESRangeQuery) {
ESRangeQuery esQuery = (ESRangeQuery) query;
RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery(esQuery.getField());
if (esQuery.getFrom() != null) {
rangeQuery.from(esQuery.getFrom());
}
if (esQuery.getTo() != null) {
rangeQuery.to(esQuery.getTo());
}
return rangeQuery;
}
if (query instanceof ESBoolQuery) {
ESBoolQuery esQuery = (ESBoolQuery) query;
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
for (ESQuery clause : esQuery.getMustClauses()) {
boolQuery.must(toElasticsearchQuery(clause));
}
return boolQuery;
}
throw new IllegalArgumentException("Query " + query.getClass().getSimpleName() + " not implemented yet");
}
@Override
public void deleteIndex(String index) throws IOException {
client.indices().delete(new DeleteIndexRequest(index), RequestOptions.DEFAULT);
}
@Override
public void flush() {
bulkProcessor.flush();
}
@Override
public void performLowLevelRequest(String method, String endpoint, String jsonEntity) throws IOException {
Request request = new Request(method, endpoint);
if (!isNullOrEmpty(jsonEntity)) {
request.setJsonEntity(jsonEntity);
}
client.getLowLevelClient().performRequest(request);
}
@Override
public ESSearchHit get(String index, String id) throws IOException {
GetRequest request = new GetRequest(index, getDefaultTypeName(), id);
GetResponse response = client.get(request, RequestOptions.DEFAULT);
ESSearchHit hit = new ESSearchHit();
hit.setIndex(response.getIndex());
hit.setId(response.getId());
hit.setVersion(response.getVersion());
hit.setSourceAsMap(response.getSourceAsMap());
return hit;
}
@Override
public boolean exists(String index, String id) throws IOException {
return client.exists(new GetRequest(index, getDefaultTypeName(), id), RequestOptions.DEFAULT);
}
private void createIndex(Path jobMappingDir, String elasticsearchVersion, String indexSettingsFile, String indexName) throws Exception {
try {
// If needed, we create the new settings for this files index
String indexSettings = readJsonFile(jobMappingDir, config, elasticsearchVersion, indexSettingsFile);
createIndex(indexName, true, indexSettings);
} catch (Exception e) {
logger.warn("failed to create index [{}], disabling crawler...", indexName);
throw e;
}
}
static Map<String, Object> asMap(Response response) {
try {
if (response.getEntity() == null) {
return null;
}
return JsonUtil.asMap(response.getEntity().getContent());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
| elasticsearch-client/elasticsearch-client-v6/src/main/java/fr/pilato/elasticsearch/crawler/fs/client/v6/ElasticsearchClientV6.java | /*
* Licensed to David Pilato (the "Author") under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Author licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package fr.pilato.elasticsearch.crawler.fs.client.v6;
import fr.pilato.elasticsearch.crawler.fs.client.ESBoolQuery;
import fr.pilato.elasticsearch.crawler.fs.client.ESDocumentField;
import fr.pilato.elasticsearch.crawler.fs.client.ESHighlightField;
import fr.pilato.elasticsearch.crawler.fs.client.ESMatchQuery;
import fr.pilato.elasticsearch.crawler.fs.client.ESPrefixQuery;
import fr.pilato.elasticsearch.crawler.fs.client.ESQuery;
import fr.pilato.elasticsearch.crawler.fs.client.ESRangeQuery;
import fr.pilato.elasticsearch.crawler.fs.client.ESSearchHit;
import fr.pilato.elasticsearch.crawler.fs.client.ESSearchRequest;
import fr.pilato.elasticsearch.crawler.fs.client.ESSearchResponse;
import fr.pilato.elasticsearch.crawler.fs.client.ESTermQuery;
import fr.pilato.elasticsearch.crawler.fs.client.ESTermsAggregation;
import fr.pilato.elasticsearch.crawler.fs.client.ESVersion;
import fr.pilato.elasticsearch.crawler.fs.client.ElasticsearchClient;
import fr.pilato.elasticsearch.crawler.fs.framework.JsonUtil;
import fr.pilato.elasticsearch.crawler.fs.settings.Elasticsearch;
import fr.pilato.elasticsearch.crawler.fs.settings.FsSettings;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.ingest.GetPipelineRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;
import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.INDEX_SETTINGS_FILE;
import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.INDEX_SETTINGS_FOLDER_FILE;
import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.isNullOrEmpty;
import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.readJsonFile;
import static org.elasticsearch.action.support.IndicesOptions.LENIENT_EXPAND_OPEN;
/**
* Elasticsearch Client for Clusters running v6.
*/
public class ElasticsearchClientV6 implements ElasticsearchClient {
private static final Logger logger = LogManager.getLogger(ElasticsearchClientV6.class);
private final Path config;
private final FsSettings settings;
private RestHighLevelClient client = null;
private BulkProcessor bulkProcessor = null;
/**
* Type name for Elasticsearch versions >= 6.0
* @deprecated Will be removed with Elasticsearch V8
*/
@Deprecated
private static final String INDEX_TYPE_DOC = "_doc";
public ElasticsearchClientV6(Path config, FsSettings settings) {
this.config = config;
this.settings = settings;
}
@Override
public byte compatibleVersion() {
return 6;
}
@Override
public void start() throws IOException {
if (client != null) {
// The client has already been initialized. Let's skip this again
return;
}
try {
// Create an elasticsearch client
client = new RestHighLevelClient(buildRestClient(settings.getElasticsearch()));
checkVersion();
logger.info("Elasticsearch Client for version {}.x connected to a node running version {}", compatibleVersion(), getVersion());
} catch (Exception e) {
logger.warn("failed to create elasticsearch client, disabling crawler...");
throw e;
}
if (settings.getElasticsearch().getPipeline() != null) {
// Check that the pipeline exists
if (!isExistingPipeline(settings.getElasticsearch().getPipeline())) {
throw new RuntimeException("You defined pipeline:" + settings.getElasticsearch().getPipeline() +
", but it does not exist.");
}
}
BiConsumer<BulkRequest, ActionListener<BulkResponse>> bulkConsumer =
(request, bulkListener) -> client.bulkAsync(request, RequestOptions.DEFAULT, bulkListener);
bulkProcessor = BulkProcessor.builder(bulkConsumer, new DebugListener(logger))
.setBulkActions(settings.getElasticsearch().getBulkSize())
.setFlushInterval(TimeValue.timeValueMillis(settings.getElasticsearch().getFlushInterval().millis()))
.setBulkSize(new ByteSizeValue(settings.getElasticsearch().getByteSize().getBytes()))
.build();
}
@Override
public ESVersion getVersion() throws IOException {
Version version = client.info(RequestOptions.DEFAULT).getVersion();
return ESVersion.fromString(version.toString());
}
/**
* For Elasticsearch 6, we need to make sure we are running at least Elasticsearch 6.4
* @throws IOException when something is wrong while asking the version of the node.
*/
@Override
public void checkVersion() throws IOException {
ESVersion esVersion = getVersion();
if (esVersion.major != compatibleVersion()) {
throw new RuntimeException("The Elasticsearch client version [" +
compatibleVersion() + "] is not compatible with the Elasticsearch cluster version [" +
esVersion.toString() + "].");
}
if (esVersion.minor < 4) {
throw new RuntimeException("This version of FSCrawler is not compatible with " +
"Elasticsearch version [" +
esVersion.toString() + "]. Please upgrade Elasticsearch to at least a 6.4.x version.");
}
}
class DebugListener implements BulkProcessor.Listener {
private final Logger logger;
DebugListener(Logger logger) {
this.logger = logger;
}
@Override public void beforeBulk(long executionId, BulkRequest request) {
logger.trace("Sending a bulk request of [{}] requests", request.numberOfActions());
}
@Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
logger.trace("Executed bulk request with [{}] requests", request.numberOfActions());
if (response.hasFailures()) {
final int[] failures = {0};
response.iterator().forEachRemaining(bir -> {
if (bir.isFailed()) {
failures[0]++;
logger.debug("Error caught for [{}]/[{}]/[{}]: {}", bir.getIndex(),
bir.getType(), bir.getId(), bir.getFailureMessage());
}
});
logger.warn("Got [{}] failures of [{}] requests", failures[0], request.numberOfActions());
}
}
@Override public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
logger.warn("Got a hard failure when executing the bulk request", failure);
}
}
/**
* Create an index
* @param index index name
* @param ignoreErrors don't fail if the index already exists
* @param indexSettings index settings if any
* @throws IOException In case of error
*/
public void createIndex(String index, boolean ignoreErrors, String indexSettings) throws IOException {
logger.debug("create index [{}]", index);
logger.trace("index settings: [{}]", indexSettings);
CreateIndexRequest cir = new CreateIndexRequest(index);
if (!isNullOrEmpty(indexSettings)) {
cir.source(indexSettings, XContentType.JSON);
}
try {
client.indices().create(cir, RequestOptions.DEFAULT);
} catch (ElasticsearchStatusException e) {
if (e.getMessage().contains("resource_already_exists_exception") && !ignoreErrors) {
throw new RuntimeException("index already exists");
}
if (!e.getMessage().contains("resource_already_exists_exception")) {
throw e;
}
}
waitForHealthyIndex(index);
}
/**
* Check if an index exists
* @param index index name
* @return true if the index exists, false otherwise
* @throws IOException In case of error
*/
public boolean isExistingIndex(String index) throws IOException {
logger.debug("is existing index [{}]", index);
GetIndexRequest gir = new GetIndexRequest();
gir.indices(index);
return client.indices().exists(gir, RequestOptions.DEFAULT);
}
/**
* Check if a pipeline exists
* @param pipelineName pipeline name
* @return true if the pipeline exists, false otherwise
* @throws IOException In case of error
*/
public boolean isExistingPipeline(String pipelineName) throws IOException {
logger.debug("is existing pipeline [{}]", pipelineName);
try {
return client.ingest().getPipeline(new GetPipelineRequest(pipelineName), RequestOptions.DEFAULT).isFound();
} catch (ElasticsearchStatusException e) {
if (e.status().getStatus() == 404) {
return false;
}
throw new IOException(e);
}
}
/**
* Refresh an index
* @param index index name
* @throws IOException In case of error
*/
public void refresh(String index) throws IOException {
logger.debug("refresh index [{}]", index);
RefreshRequest request = new RefreshRequest();
if (!isNullOrEmpty(index)) {
request.indices(index);
}
RefreshResponse refresh = client.indices().refresh(request, RequestOptions.DEFAULT);
logger.trace("refresh response: {}", refresh);
}
/**
* Wait for an index to become at least yellow (all primaries assigned)
* @param index index name
* @throws IOException In case of error
*/
public void waitForHealthyIndex(String index) throws IOException {
logger.debug("wait for yellow health on index [{}]", index);
ClusterHealthResponse health = client.cluster().health(new ClusterHealthRequest(index).waitForYellowStatus(),
RequestOptions.DEFAULT);
logger.trace("health response: {}", health);
}
/**
* Reindex data from one index/type to another index
* @param sourceIndex source index name
* @param sourceType source type name
* @param targetIndex target index name
* @return The number of documents that have been reindexed
* @throws IOException In case of error
*/
public int reindex(String sourceIndex, String sourceType, String targetIndex) throws IOException {
logger.debug("reindex [{}]/[{}] -> [{}]/[doc]", sourceIndex, sourceType, targetIndex);
String reindexQuery = "{ \"source\": {\n" +
" \"index\": \"" + sourceIndex + "\",\n" +
" \"type\": \"" + sourceType + "\"\n" +
" },\n" +
" \"dest\": {\n" +
" \"index\": \"" + targetIndex + "\",\n" +
" \"type\": \"doc\"\n" +
" }\n" +
"}\n";
logger.trace("{}", reindexQuery);
Request request = new Request("POST", "/_reindex");
request.setJsonEntity(reindexQuery);
Response restResponse = client.getLowLevelClient().performRequest(request);
Map<String, Object> response = asMap(restResponse);
logger.debug("reindex response: {}", response);
return (int) response.get("total");
}
/**
* Fully removes a type from an index (removes data)
* @param index index name
* @param type type
* @throws IOException In case of error
*/
public void deleteByQuery(String index, String type) throws IOException {
logger.debug("deleteByQuery [{}]/[{}]", index, type);
String deleteByQuery = "{\n" +
" \"query\": {\n" +
" \"match_all\": {}\n" +
" }\n" +
"}";
Request request = new Request("POST", "/" + index + "/" + type + "/_delete_by_query");
request.setJsonEntity(deleteByQuery);
Response restResponse = client.getLowLevelClient().performRequest(request);
Map<String, Object> response = asMap(restResponse);
logger.debug("reindex response: {}", response);
}
// Utility methods
public boolean isIngestSupported() {
return true;
}
public String getDefaultTypeName() {
return INDEX_TYPE_DOC;
}
@Override
public void index(String index, String id, String json, String pipeline) {
bulkProcessor.add(new IndexRequest(index, getDefaultTypeName(), id).setPipeline(pipeline).source(json, XContentType.JSON));
}
@Override
public void indexSingle(String index, String id, String json) throws IOException {
IndexRequest request = new IndexRequest(index, getDefaultTypeName(), id);
request.source(json, XContentType.JSON);
client.index(request, RequestOptions.DEFAULT);
}
@Override
public void delete(String index, String id) {
bulkProcessor.add(new DeleteRequest(index, getDefaultTypeName(), id));
}
@Override
public void close() throws IOException {
logger.debug("Closing Elasticsearch client manager");
if (bulkProcessor != null) {
try {
bulkProcessor.awaitClose(30, TimeUnit.SECONDS);
} catch (InterruptedException e) {
logger.warn("Did not succeed in closing the bulk processor for documents", e);
throw new IOException(e);
}
}
if (client != null) {
client.close();
}
}
private static RestClientBuilder buildRestClient(Elasticsearch settings) {
List<HttpHost> hosts = new ArrayList<>(settings.getNodes().size());
settings.getNodes().forEach(node -> hosts.add(HttpHost.create(node.decodedUrl())));
RestClientBuilder builder = RestClient.builder(hosts.toArray(new HttpHost[hosts.size()]));
if (settings.getUsername() != null) {
CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(settings.getUsername(), settings.getPassword()));
builder.setHttpClientConfigCallback(httpClientBuilder ->
httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
}
return builder;
}
public void createIndices() throws Exception {
String elasticsearchVersion;
Path jobMappingDir = config.resolve(settings.getName()).resolve("_mappings");
// Let's read the current version of elasticsearch cluster
Version version = client.info(RequestOptions.DEFAULT).getVersion();
logger.debug("FS crawler connected to an elasticsearch [{}] node.", version.toString());
elasticsearchVersion = Byte.toString(version.major);
// If needed, we create the new settings for this files index
if (!settings.getFs().isAddAsInnerObject() || (!settings.getFs().isJsonSupport() && !settings.getFs().isXmlSupport())) {
createIndex(jobMappingDir, elasticsearchVersion, INDEX_SETTINGS_FILE, settings.getElasticsearch().getIndex());
} else {
createIndex(settings.getElasticsearch().getIndex(), true, null);
}
// If needed, we create the new settings for this folder index
if (settings.getFs().isIndexFolders()) {
createIndex(jobMappingDir, elasticsearchVersion, INDEX_SETTINGS_FOLDER_FILE, settings.getElasticsearch().getIndexFolder());
} else {
createIndex(settings.getElasticsearch().getIndexFolder(), true, null);
}
}
@Override
public ESSearchResponse search(ESSearchRequest request) throws IOException {
SearchRequest searchRequest = new SearchRequest();
if (!isNullOrEmpty(request.getIndex())) {
searchRequest.indices(request.getIndex());
}
SearchSourceBuilder ssb = new SearchSourceBuilder();
if (request.getSize() != null) {
ssb.size(request.getSize());
}
if (!request.getFields().isEmpty()) {
ssb.storedFields(request.getFields());
}
if (request.getESQuery() != null) {
ssb.query(toElasticsearchQuery(request.getESQuery()));
}
if (!isNullOrEmpty(request.getSort())) {
ssb.sort(request.getSort());
}
for (String highlighter : request.getHighlighters()) {
ssb.highlighter(new HighlightBuilder().field(highlighter));
}
for (ESTermsAggregation aggregation : request.getAggregations()) {
ssb.aggregation(AggregationBuilders.terms(aggregation.getName()).field(aggregation.getField()));
}
searchRequest.source(ssb);
searchRequest.indicesOptions(LENIENT_EXPAND_OPEN);
SearchResponse response = client.search(searchRequest, RequestOptions.DEFAULT);
ESSearchResponse esSearchResponse = new ESSearchResponse();
if (response.getHits() != null) {
for (SearchHit hit : response.getHits()) {
ESSearchHit esSearchHit = new ESSearchHit();
if (!hit.getFields().isEmpty()) {
Map<String, ESDocumentField> esFields = new HashMap<>();
for (Map.Entry<String, DocumentField> entry : hit.getFields().entrySet()) {
esFields.put(entry.getKey(), new ESDocumentField(entry.getKey(), entry.getValue().getValues()));
}
esSearchHit.setFields(esFields);
}
esSearchHit.setIndex(hit.getIndex());
esSearchHit.setId(hit.getId());
esSearchHit.setSourceAsMap(hit.getSourceAsMap());
esSearchHit.setSourceAsString(hit.getSourceAsString());
hit.getHighlightFields().forEach((key, value) -> {
String[] texts = new String[value.fragments().length];
for (int i = 0; i < value.fragments().length; i++) {
Text fragment = value.fragments()[i];
texts[i] = fragment.string();
}
esSearchHit.addHighlightField(key, new ESHighlightField(key, texts));
});
esSearchResponse.addHit(esSearchHit);
}
esSearchResponse.setTotalHits(response.getHits().getTotalHits());
if (response.getAggregations() != null) {
for (String name : response.getAggregations().asMap().keySet()) {
Terms termsAgg = response.getAggregations().get(name);
ESTermsAggregation aggregation = new ESTermsAggregation(name, null);
for (Terms.Bucket bucket : termsAgg.getBuckets()) {
aggregation.addBucket(new ESTermsAggregation.ESTermsBucket(bucket.getKeyAsString(), bucket.getDocCount()));
}
esSearchResponse.addAggregation(name, aggregation);
}
}
}
return esSearchResponse;
}
private QueryBuilder toElasticsearchQuery(ESQuery query) {
if (query instanceof ESTermQuery) {
ESTermQuery esQuery = (ESTermQuery) query;
return QueryBuilders.termQuery(esQuery.getField(), esQuery.getValue());
}
if (query instanceof ESMatchQuery) {
ESMatchQuery esQuery = (ESMatchQuery) query;
return QueryBuilders.matchQuery(esQuery.getField(), esQuery.getValue());
}
if (query instanceof ESPrefixQuery) {
ESPrefixQuery esQuery = (ESPrefixQuery) query;
return QueryBuilders.prefixQuery(esQuery.getField(), esQuery.getValue());
}
if (query instanceof ESRangeQuery) {
ESRangeQuery esQuery = (ESRangeQuery) query;
RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery(esQuery.getField());
if (esQuery.getFrom() != null) {
rangeQuery.from(esQuery.getFrom());
}
if (esQuery.getTo() != null) {
rangeQuery.to(esQuery.getTo());
}
return rangeQuery;
}
if (query instanceof ESBoolQuery) {
ESBoolQuery esQuery = (ESBoolQuery) query;
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
for (ESQuery clause : esQuery.getMustClauses()) {
boolQuery.must(toElasticsearchQuery(clause));
}
return boolQuery;
}
throw new IllegalArgumentException("Query " + query.getClass().getSimpleName() + " not implemented yet");
}
@Override
public void deleteIndex(String index) throws IOException {
client.indices().delete(new DeleteIndexRequest(index), RequestOptions.DEFAULT);
}
@Override
public void flush() {
bulkProcessor.flush();
}
@Override
public void performLowLevelRequest(String method, String endpoint, String jsonEntity) throws IOException {
Request request = new Request(method, endpoint);
if (!isNullOrEmpty(jsonEntity)) {
request.setJsonEntity(jsonEntity);
}
client.getLowLevelClient().performRequest(request);
}
@Override
public ESSearchHit get(String index, String id) throws IOException {
GetRequest request = new GetRequest(index, getDefaultTypeName(), id);
GetResponse response = client.get(request, RequestOptions.DEFAULT);
ESSearchHit hit = new ESSearchHit();
hit.setIndex(response.getIndex());
hit.setId(response.getId());
hit.setVersion(response.getVersion());
hit.setSourceAsMap(response.getSourceAsMap());
return hit;
}
@Override
public boolean exists(String index, String id) throws IOException {
return client.exists(new GetRequest(index, getDefaultTypeName(), id), RequestOptions.DEFAULT);
}
private void createIndex(Path jobMappingDir, String elasticsearchVersion, String indexSettingsFile, String indexName) throws Exception {
try {
// If needed, we create the new settings for this files index
String indexSettings = readJsonFile(jobMappingDir, config, elasticsearchVersion, indexSettingsFile);
createIndex(indexName, true, indexSettings);
} catch (Exception e) {
logger.warn("failed to create index [{}], disabling crawler...", indexName);
throw e;
}
}
static Map<String, Object> asMap(Response response) {
try {
if (response.getEntity() == null) {
return null;
}
return JsonUtil.asMap(response.getEntity().getContent());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
| Use deprecated method
We are hitting an issue on 6.x as described in https://github.com/elastic/elasticsearch/issues/40897
In the meantime, let's revert to the "old" import.
| elasticsearch-client/elasticsearch-client-v6/src/main/java/fr/pilato/elasticsearch/crawler/fs/client/v6/ElasticsearchClientV6.java | Use deprecated method |
|
Java | apache-2.0 | 699a970e20f198b4a10e68c0a49696b9b54638c9 | 0 | sakaiproject/turnitin,sakaiproject/turnitin | /**********************************************************************************
* $URL:
* $Id:
***********************************************************************************
*
* Copyright (c) 2007 The Sakai Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.contentreview.impl.turnitin;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLEncoder;
import java.net.URLDecoder;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.TimeZone;
import javax.net.ssl.HttpsURLConnection;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.xerces.parsers.DOMParser;
import org.sakaiproject.component.api.ServerConfigurationService;
import org.sakaiproject.content.api.ContentHostingService;
import org.sakaiproject.content.api.ContentResource;
import org.sakaiproject.contentreview.dao.ContentReviewDao;
import org.sakaiproject.contentreview.exception.QueueException;
import org.sakaiproject.contentreview.exception.ReportException;
import org.sakaiproject.contentreview.exception.SubmissionException;
import org.sakaiproject.contentreview.model.ContentReviewItem;
import org.sakaiproject.contentreview.service.ContentReviewService;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.exception.PermissionException;
import org.sakaiproject.exception.ServerOverloadException;
import org.sakaiproject.exception.TypeException;
import org.sakaiproject.tool.api.ToolManager;
import org.sakaiproject.user.api.User;
import org.sakaiproject.user.api.UserDirectoryService;
import org.sakaiproject.user.api.UserNotDefinedException;
import org.sakaiproject.entity.api.EntityManager;
import org.sakaiproject.entity.api.EntityProducer;
import org.sakaiproject.entity.api.Entity;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.assignment.api.Assignment;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.api.common.edu.person.SakaiPersonManager;
import org.sakaiproject.api.common.edu.person.SakaiPerson;
import org.sakaiproject.api.common.type.Type;
import org.sakaiproject.api.common.manager.Persistable;
import org.w3c.dom.CharacterData;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
public class TurnitinReviewServiceImpl implements ContentReviewService {
private static final String SERVICE_NAME="Turnitin";
private String aid = null;
private String said = null;
private String secretKey = null;
private String apiURL = "https://www.turnitin.com/api.asp?";
private String defaultAssignmentName = null;
private String defaultInstructorEmail = null;
private String defaultInstructorFName = null;
private String defaultInstructorLName = null;
private String defaultInstructorPassword = null;
private Long maxRetry = null;
//note that the assignment id actually has to be unique globally so use this as a prefix
// eg. assignid = defaultAssignId + siteId
private String defaultAssignId = null;
private String defaultClassPassword = null;
//private static final String defaultInstructorId = defaultInstructorFName + " " + defaultInstructorLName;
private String defaultInstructorId = null;
private static final Log log = LogFactory
.getLog(TurnitinReviewServiceImpl.class);
private ContentReviewDao dao;
public void setDao(ContentReviewDao dao) {
this.dao = dao;
}
private ToolManager toolManager;
public void setToolManager(ToolManager toolManager) {
this.toolManager = toolManager;
}
private UserDirectoryService userDirectoryService;
public void setUserDirectoryService(
UserDirectoryService userDirectoryService) {
this.userDirectoryService = userDirectoryService;
}
private EntityManager entityManager;
public void setEntityManager(EntityManager en){
this.entityManager = en;
}
private ContentHostingService contentHostingService;
public void setContentHostingService(
ContentHostingService contentHostingService) {
this.contentHostingService = contentHostingService;
}
private ServerConfigurationService serverConfigurationService;
public void setServerConfigurationService (ServerConfigurationService serverConfigurationService) {
this.serverConfigurationService = serverConfigurationService;
}
private SakaiPersonManager sakaiPersonManager;
public void setSakaiPersonManager(SakaiPersonManager s) {
this.sakaiPersonManager = s;
}
//Should the service prefer the system profile email address for users if set?
private boolean preferSystemProfileEmail;
public void setPreferSystemProfileEmail(boolean b) {
preferSystemProfileEmail = b;
}
/**
* Place any code that should run when this class is initialized by spring
* here
*/
public void init() {
log.info("init()");
aid = serverConfigurationService.getString("turnitin.aid");
said = serverConfigurationService.getString("turnitin.said");
secretKey = serverConfigurationService.getString("turnitin.secretKey");
apiURL = serverConfigurationService.getString("turnitin.apiURL","https://www.turnitin.com/api.asp?");
defaultAssignmentName = serverConfigurationService.getString("turnitin.defaultAssignmentName");
defaultInstructorEmail = serverConfigurationService.getString("turnitin.defaultInstructorEmail");
defaultInstructorFName = serverConfigurationService.getString("turnitin.defaultInstructorFName");;
defaultInstructorLName = serverConfigurationService.getString("turnitin.defaultInstructorLName");;
defaultInstructorPassword = serverConfigurationService.getString("turnitin.defaultInstructorPassword");;
//note that the assignment id actually has to be unique globally so use this as a prefix
// assignid = defaultAssignId + siteId
defaultAssignId = serverConfigurationService.getString("turnitin.defaultAssignId");;
defaultClassPassword = serverConfigurationService.getString("turnitin.defaultClassPassword");;
//private static final String defaultInstructorId = defaultInstructorFName + " " + defaultInstructorLName;
defaultInstructorId = serverConfigurationService.getString("turnitin.defaultInstructorId");
maxRetry = new Long(serverConfigurationService.getInt("turnitin.maxRetry",100));
// Set the keystore name and password, which must contain the public certificate of the Turnitin API site
System.setProperty("javax.net.ssl.trustStore", serverConfigurationService.getString("turnitin.keystore_name"));
System.setProperty("javax.net.ssl.trustStorePassword", serverConfigurationService.getString("turnitin.keystore_password"));
}
public void queueContent(String userId, String siteId, String taskId, String contentId)
throws QueueException {
log.debug("Method called queueContent(" + userId + "," + siteId + "," + contentId + ")");
if (userId == null) {
log.debug("Using current user");
userId = userDirectoryService.getCurrentUser().getId();
}
if (siteId == null) {
log.debug("Using current site");
siteId = toolManager.getCurrentPlacement().getContext();
}
if (taskId == null) {
log.debug("Generating default taskId");
taskId = siteId + " " + defaultAssignmentName;
}
log.debug("Adding content: " + contentId + " from site " + siteId
+ " and user: " + userId + " for task: " + taskId + " to submission queue");
/*
* first check that this content has not been submitted before this may
* not be the best way to do this - perhaps use contentId as the primary
* key for now id is the primary key and so the database won't complain
* if we put in repeats necessitating the check
*/
List existingItems = dao
.findByExample(new ContentReviewItem(contentId));
if (existingItems.size() > 0) {
if (this.allowResubmission()) {
log.debug("Content: " + contentId + " is already queued, assuming resubmission");
for (int i =0; i < existingItems.size(); i++) {
dao.delete(existingItems.get(i));
}
} else {
throw new QueueException("Content " + contentId + " is already queued, not re-queued");
}
}
dao.save(new ContentReviewItem(userId, siteId, taskId, contentId, new Date(),
ContentReviewItem.NOT_SUBMITTED_CODE));
}
public int getReviewScore(String contentId)
throws QueueException, ReportException, Exception {
log.debug("Getting review score for content: " + contentId);
List matchingItems = dao.findByExample(new ContentReviewItem(contentId));
if (matchingItems.size() == 0) {
log.debug("Content " + contentId + " has not been queued previously");
throw new QueueException("Content " + contentId + " has not been queued previously");
}
if (matchingItems.size() > 1)
log.debug("More than one matching item - using first item found");
ContentReviewItem item = (ContentReviewItem) matchingItems.iterator().next();
if (item.getStatus().compareTo(ContentReviewItem.SUBMITTED_REPORT_AVAILABLE_CODE) != 0) {
log.debug("Report not available: " + item.getStatus());
throw new ReportException("Report not available: " + item.getStatus());
}
return item.getReviewScore().intValue();
}
public String getReviewReport(String contentId)
throws QueueException, ReportException {
// first retrieve the record from the database to get the externalId of
// the content
log.debug("Getting report for content: " + contentId);
List matchingItems = dao.findByExample(new ContentReviewItem(contentId));
if (matchingItems.size() == 0) {
log.debug("Content " + contentId + " has not been queued previously");
throw new QueueException("Content " + contentId + " has not been queued previously");
}
if (matchingItems.size() > 1)
log.debug("More than one matching item found - using first item found");
// check that the report is available
// TODO if the database record does not show report available check with
// turnitin (maybe)
ContentReviewItem item = (ContentReviewItem) matchingItems.iterator().next();
if (item.getStatus().compareTo(ContentReviewItem.SUBMITTED_REPORT_AVAILABLE_CODE) != 0) {
log.debug("Report not available: " + item.getStatus());
throw new ReportException("Report not available: " + item.getStatus());
}
// report is available - generate the URL to display
String oid = item.getExternalId();
String fid = "6";
String fcmd = "1";
String encrypt = "0";
String diagnostic = "0";
String uem = defaultInstructorEmail;
String ufn = defaultInstructorFName;
String uln = defaultInstructorLName;
String utp = "2";
// is it worthwhile using this?
String uid = defaultInstructorId;
String cid = item.getSiteId();
String assignid = defaultAssignId + item.getSiteId();
/*User user = userDirectoryService.getUser(item.getUserId());
String uem = user.getEmail();
String ufn = user.getFirstName();
String uln = user.getLastName();
String utp = "1";
// is it worthwhile using this?
String uid = item.getUserId();
String cid = item.getSiteId();*/
String gmtime = getGMTime();
// note that these vars must be ordered alphabetically according to
// their names with secretKey last
String md5_str = aid + assignid + cid + diagnostic + encrypt + fcmd + fid + gmtime + oid
+ said + uem + ufn + uid + uln + utp + secretKey;
String md5;
try {
md5 = getMD5(md5_str);
} catch (Throwable t) {
throw new ReportException("Cannot create MD5 hash of data for Turnitin API call to retrieve report", t);
}
String reportURL = apiURL;
reportURL += "fid=";
reportURL += fid;
reportURL += "&fcmd=";
reportURL += fcmd;
reportURL += "&assignid=";
reportURL += assignid;
reportURL += "&uid=";
reportURL += uid;
reportURL += "&cid=";
reportURL += cid;
reportURL += "&encrypt=";
reportURL += encrypt;
reportURL += "&aid=";
reportURL += aid;
reportURL += "&said=";
reportURL += said;
reportURL += "&diagnostic=";
reportURL += diagnostic;
reportURL += "&oid=";
reportURL += oid;
reportURL += "&uem=";
reportURL += uem;
reportURL += "&ufn=";
reportURL += ufn;
reportURL += "&uln=";
reportURL += uln;
reportURL += "&utp=";
reportURL += utp;
reportURL += "&gmtime=";
reportURL += gmtime;
reportURL += "&md5=";
reportURL += md5;
return reportURL;
}
public Long getReviewStatus(String contentId)
throws QueueException {
log.debug("Returning review status for content: " + contentId);
List matchingItems = dao.findByExample(new ContentReviewItem(contentId));
if (matchingItems.size() == 0) {
log.debug("Content " + contentId + " has not been queued previously");
throw new QueueException("Content " + contentId + " has not been queued previously");
}
if (matchingItems.size() > 1)
log.debug("more than one matching item found - using first item found");
return ((ContentReviewItem) matchingItems.iterator().next()).getStatus();
}
public Date getDateQueued(String contentId)
throws QueueException {
log.debug("Returning date queued for content: " + contentId);
List matchingItems = dao.findByExample(new ContentReviewItem(contentId));
if (matchingItems.size() == 0) {
log.debug("Content " + contentId + " has not been queued previously");
throw new QueueException("Content " + contentId + " has not been queued previously");
}
if (matchingItems.size() > 1)
log.debug("more than one matching item found - using first item found");
return ((ContentReviewItem) matchingItems.iterator().next()).getDateQueued();
}
public Date getDateSubmitted(String contentId)
throws QueueException, SubmissionException {
log.debug("Returning date queued for content: " + contentId);
List matchingItems = dao.findByExample(new ContentReviewItem(contentId));
if (matchingItems.size() == 0) {
log.debug("Content " + contentId + " has not been queued previously");
throw new QueueException("Content " + contentId + " has not been queued previously");
}
if (matchingItems.size() > 1)
log.debug("more than one matching item found - using first item found");
ContentReviewItem item = (ContentReviewItem) matchingItems.iterator().next();
if (item.getDateSubmitted() == null) {
log.debug("Content not yet submitted: " + item.getStatus());
throw new SubmissionException("Content not yet submitted: " + item.getStatus());
}
return item.getDateSubmitted();
}
private String encodeParam(String name, String value, String boundary) {
return "--" + boundary + "\r\nContent-Disposition: form-data; name=\""
+ name + "\"\r\n\r\n" + value + "\r\n";
}
private void createClass(String siteId) throws SubmissionException {
log.debug("Creating class for site: " + siteId);
String cpw = defaultClassPassword;
String ctl = siteId;
String diagnostic = "0";
String encrypt = "0";
String fcmd = "2";
String fid = "2";
String uem = defaultInstructorEmail;
String ufn = defaultInstructorFName;
String uln = defaultInstructorLName;
String utp = "2"; //user type 2 = instructor
String upw = defaultInstructorPassword;
String cid = siteId;
String uid = defaultInstructorId;
String gmtime = this.getGMTime();
// MD5 of function 2 - Create a class under a given account (instructor only)
String md5_str = aid + cid + cpw + ctl + diagnostic + encrypt + fcmd + fid +
gmtime + said + uem + ufn + uid + uln + upw + utp + secretKey;
String md5;
try{
md5 = this.getMD5(md5_str);
} catch (Throwable t) {
log.warn("MD5 error creating class on turnitin");
throw new SubmissionException("Cannot generate MD5 hash for Turnitin API call", t);
}
HttpsURLConnection connection;
try {
URL hostURL = new URL(apiURL);
connection = (HttpsURLConnection) hostURL.openConnection();
connection.setRequestMethod("GET");
connection.setDoOutput(true);
connection.setDoInput(true);
log.info("HTTPS Connection made to Turnitin");
OutputStream outStream = connection.getOutputStream();
outStream.write("uid=".getBytes("UTF-8"));
outStream.write(uid.getBytes("UTF-8"));
outStream.write("&cid=".getBytes("UTF-8"));
outStream.write(cid.getBytes("UTF-8"));
outStream.write("&aid=".getBytes("UTF-8"));
outStream.write(aid.getBytes("UTF-8"));
outStream.write("&cpw=".getBytes("UTF-8"));
outStream.write(cpw.getBytes("UTF-8"));
outStream.write("&ctl=".getBytes("UTF-8"));
outStream.write(ctl.getBytes("UTF-8"));
outStream.write("&diagnostic=".getBytes("UTF-8"));
outStream.write(diagnostic.getBytes("UTF-8"));
outStream.write("&encrypt=".getBytes("UTF-8"));
outStream.write(encrypt.getBytes("UTF-8"));
outStream.write("&fcmd=".getBytes("UTF-8"));
outStream.write(fcmd.getBytes("UTF-8"));
outStream.write("&fid=".getBytes("UTF-8"));
outStream.write(fid.getBytes("UTF-8"));
outStream.write("&gmtime=".getBytes("UTF-8"));
outStream.write(gmtime.getBytes("UTF-8"));
outStream.write("&said=".getBytes("UTF-8"));
outStream.write(said.getBytes("UTF-8"));
outStream.write("&uem=".getBytes("UTF-8"));
outStream.write(uem.getBytes("UTF-8"));
outStream.write("&ufn=".getBytes("UTF-8"));
outStream.write(ufn.getBytes("UTF-8"));
outStream.write("&uln=".getBytes("UTF-8"));
outStream.write(uln.getBytes("UTF-8"));
outStream.write("&upw=".getBytes("UTF-8"));
outStream.write(upw.getBytes("UTF-8"));
outStream.write("&utp=".getBytes("UTF-8"));
outStream.write(utp.getBytes("UTF-8"));
outStream.write("&md5=".getBytes("UTF-8"));
outStream.write(md5.getBytes("UTF-8"));
outStream.close();
}
catch (Throwable t) {
throw new SubmissionException("Class creation call to Turnitin API failed", t);
}
BufferedReader in;
try {
in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} catch (Throwable t) {
throw new SubmissionException ("Cannot get Turnitin response. Assuming call was unsuccessful", t);
}
DOMParser parser = new DOMParser();
try {
parser.parse(new org.xml.sax.InputSource(in));
} catch (Throwable t) {
throw new SubmissionException ("Cannot parse Turnitin response. Assuming call was unsuccessful", t);
}
Document document = parser.getDocument();
Element root = document.getDocumentElement();
if (((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim().compareTo("20") == 0 ||
((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim().compareTo("21") == 0 ) {
log.debug("Create Class successful");
} else {
throw new SubmissionException("Create Class not successful. Message: " + ((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim() + ". Code: " + ((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim());
}
}
private String getAssignmentTitle(String taskId){
try {
Reference ref = entityManager.newReference(taskId);
log.debug("got ref " + ref + " of type: " + ref.getType());
EntityProducer ep = ref.getEntityProducer();
Entity ent = ep.getEntity(ref);
log.debug("got entity " + ent);
if (ent instanceof Assignment) {
Assignment as = (Assignment)ent;
log.debug("Got assignemment with title " + as.getTitle());
return URLDecoder.decode(as.getTitle(),"UTF-8");
}
} catch (Exception e) {
e.printStackTrace();
}
return taskId;
}
private void createAssignment(String siteId, String taskId) throws SubmissionException {
//get the assignment reference
String taskTitle = getAssignmentTitle(taskId);
log.debug("Creating assignment for site: " + siteId + ", task: " + taskId +" tasktitle: " + taskTitle);
String diagnostic = "0"; //0 = off; 1 = on
SimpleDateFormat dform = ((SimpleDateFormat) DateFormat.getDateInstance());
dform.applyPattern("yyyyMMdd");
Calendar cal = Calendar.getInstance();
String dtstart = dform.format(cal.getTime());
//set the due dates for the assignments to be in 5 month's time
//turnitin automatically sets each class end date to 6 months after it is created
//the assignment end date must be on or before the class end date
//TODO use the 'secret' function to change this to longer
cal.add(Calendar.MONTH, 5);
String dtdue = dform.format(cal.getTime());
String encrypt = "0"; //encryption flag
String fcmd = "2"; //new assignment
String fid = "4"; //function id
String uem = defaultInstructorEmail;
String ufn = defaultInstructorFName;
String uln = defaultInstructorLName;
String utp = "2"; //user type 2 = instructor
String upw = defaultInstructorPassword;
String cid = siteId;
String uid = defaultInstructorId;
String assignid = taskId;
String assign = taskTitle;
String ctl = siteId;
String gmtime = getGMTime();
String assignEnc = assign;
try {
if (assign.contains("&")) {
//log.debug("replacing & in assingment title");
assign = assign.replace('&', 'n');
}
assignEnc = assign;
log.debug("Assign title is " + assignEnc);
}
catch (Exception e) {
e.printStackTrace();
}
String md5_str = aid + assignEnc + assignid + cid + ctl + diagnostic + dtdue + dtstart + encrypt +
fcmd + fid + gmtime + said + uem + ufn + uid + uln + upw + utp + secretKey;
String md5;
try{
md5 = this.getMD5(md5_str);
} catch (Throwable t) {
log.warn("MD5 error creating assignment on turnitin");
throw new SubmissionException("Could not generate MD5 hash for \"Create Assignment\" Turnitin API call");
}
HttpsURLConnection connection;
try {
URL hostURL = new URL(apiURL);
connection = (HttpsURLConnection) hostURL.openConnection();
connection.setRequestMethod("GET");
connection.setDoOutput(true);
connection.setDoInput(true);
log.info("HTTPS connection made to Turnitin");
OutputStream outStream = connection.getOutputStream();
outStream.write("aid=".getBytes("UTF-8"));
outStream.write(aid.getBytes("UTF-8"));
outStream.write("&assign=".getBytes("UTF-8"));
outStream.write(assignEnc.getBytes("UTF-8"));
outStream.write("&assignid=".getBytes("UTF-8"));
outStream.write(assignid.getBytes("UTF-8"));
outStream.write("&cid=".getBytes("UTF-8"));
outStream.write(cid.getBytes("UTF-8"));
outStream.write("&uid=".getBytes("UTF-8"));
outStream.write(uid.getBytes("UTF-8"));
outStream.write("&ctl=".getBytes("UTF-8"));
outStream.write(ctl.getBytes("UTF-8"));
outStream.write("&diagnostic=".getBytes("UTF-8"));
outStream.write(diagnostic.getBytes("UTF-8"));
outStream.write("&dtdue=".getBytes("UTF-8"));
outStream.write(dtdue.getBytes("UTF-8"));
outStream.write("&dtstart=".getBytes("UTF-8"));
outStream.write(dtstart.getBytes("UTF-8"));
outStream.write("&encrypt=".getBytes("UTF-8"));
outStream.write(encrypt.getBytes("UTF-8"));
outStream.write("&fcmd=".getBytes("UTF-8"));
outStream.write(fcmd.getBytes("UTF-8"));
outStream.write("&fid=".getBytes("UTF-8"));
outStream.write(fid.getBytes("UTF-8"));
outStream.write("&gmtime=".getBytes("UTF-8"));
outStream.write(gmtime.getBytes("UTF-8"));
outStream.write("&said=".getBytes("UTF-8"));
outStream.write(said.getBytes("UTF-8"));
outStream.write("&uem=".getBytes("UTF-8"));
outStream.write(uem.getBytes("UTF-8"));
outStream.write("&ufn=".getBytes("UTF-8"));
outStream.write(ufn.getBytes("UTF-8"));
outStream.write("&uln=".getBytes("UTF-8"));
outStream.write(uln.getBytes("UTF-8"));
outStream.write("&upw=".getBytes("UTF-8"));
outStream.write(upw.getBytes("UTF-8"));
outStream.write("&utp=".getBytes("UTF-8"));
outStream.write(utp.getBytes("UTF-8"));
outStream.write("&md5=".getBytes("UTF-8"));
outStream.write(md5.getBytes("UTF-8"));
outStream.close();
}
catch (Throwable t) {
throw new SubmissionException("Assignment creation call to Turnitin API failed", t);
}
BufferedReader in;
try {
in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} catch (Throwable t) {
throw new SubmissionException ("Cannot get Turnitin response. Assuming call was unsuccessful", t);
}
DOMParser parser = new DOMParser();
try {
parser.parse(new org.xml.sax.InputSource(in));
} catch (Throwable t) {
throw new SubmissionException ("Cannot parse Turnitin response. Assuming call was unsuccessful");
}
Document document = parser.getDocument();
Element root = document.getDocumentElement();
int rcode = new Integer(((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim()).intValue();
if ((rcode > 0 && rcode < 100) || rcode == 419) {
log.debug("Create Assignment successful");
} else {
log.debug("Assignment creation failed with message: " + ((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim() + ". Code: " + rcode);
throw new SubmissionException("Create Assignment not successful. Message: " + ((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim() + ". Code: " + rcode);
}
}
private void enrollInClass(String userId, String uem, String siteId) throws SubmissionException {
String ctl = siteId; //class title
String fid = "3";
String fcmd = "2";
String encrypt = "0";
String diagnostic = "0";
String tem = defaultInstructorEmail;
User user;
try {
user = userDirectoryService.getUser(userId);
} catch (Throwable t) {
throw new SubmissionException ("Cannot get user information", t);
}
log.debug("Enrolling user " + user.getEid() + "(" + userId + ") in class " + siteId);
/* not using this as we may be getting email from profile
String uem = user.getEmail();
if (uem == null) {
throw new SubmissionException ("User has no email address");
}
*/
String ufn = user.getFirstName();
if (ufn == null) {
throw new SubmissionException ("User has no first name");
}
String uln = user.getLastName();
if (uln == null) {
throw new SubmissionException ("User has no last name");
}
String utp = "1";
String uid = userId;
String cid = siteId;
String gmtime = this.getGMTime();
String md5_str = aid + cid + ctl + diagnostic + encrypt + fcmd + fid + gmtime + said + tem + uem +
ufn + uid + uln + utp + secretKey;
String md5;
try{
md5 = this.getMD5(md5_str);
} catch (Throwable t) {
log.warn("MD5 error enrolling student on turnitin");
throw new SubmissionException("Cannot generate MD5 hash for Class Enrollment Turnitin API call", t);
}
HttpsURLConnection connection;
try {
URL hostURL = new URL(apiURL);
connection = (HttpsURLConnection) hostURL.openConnection();
connection.setRequestMethod("GET");
connection.setDoOutput(true);
connection.setDoInput(true);
log.info("Connection made to Turnitin");
OutputStream outStream = connection.getOutputStream();
outStream.write("fid=".getBytes("UTF-8"));
outStream.write(fid.getBytes("UTF-8"));
outStream.write("&fcmd=".getBytes("UTF-8"));
outStream.write(fcmd.getBytes("UTF-8"));
outStream.write("&cid=".getBytes("UTF-8"));
outStream.write(cid.getBytes("UTF-8"));
outStream.write("&tem=".getBytes());
outStream.write(tem.getBytes("UTF-8"));
outStream.write("&ctl=".getBytes());
outStream.write(ctl.getBytes("UTF-8"));
outStream.write("&encrypt=".getBytes());
outStream.write(encrypt.getBytes("UTF-8"));
outStream.write("&aid=".getBytes("UTF-8"));
outStream.write(aid.getBytes("UTF-8"));
outStream.write("&said=".getBytes("UTF-8"));
outStream.write(said.getBytes("UTF-8"));
outStream.write("&diagnostic=".getBytes("UTF-8"));
outStream.write(diagnostic.getBytes("UTF-8"));
outStream.write("&uem=".getBytes("UTF-8"));
outStream.write(URLEncoder.encode(uem, "UTF-8").getBytes("UTF-8"));
outStream.write("&ufn=".getBytes("UTF-8"));
outStream.write(ufn.getBytes("UTF-8"));
outStream.write("&uln=".getBytes("UTF-8"));
outStream.write(uln.getBytes("UTF-8"));
outStream.write("&utp=".getBytes("UTF-8"));
outStream.write(utp.getBytes("UTF-8"));
outStream.write("&gmtime=".getBytes("UTF-8"));
outStream.write(URLEncoder.encode(gmtime, "UTF-8").getBytes("UTF-8"));
outStream.write("&md5=".getBytes("UTF-8"));
outStream.write(md5.getBytes("UTF-8"));
outStream.write("&uid=".getBytes("UTF-8"));
outStream.write(uid.getBytes("UTF-8"));
outStream.close();
}
catch (Throwable t) {
throw new SubmissionException("Student Enrollment call to Turnitin failed", t);
}
BufferedReader in;
try {
in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} catch (Throwable t) {
throw new SubmissionException ("Cannot get Turnitin response. Assuming call was unsuccessful", t);
}
DOMParser parser = new DOMParser();
try {
parser.parse(new org.xml.sax.InputSource(in));
} catch (Throwable t) {
throw new SubmissionException ("Cannot parse Turnitin response. Assuming call was unsuccessful", t);
}
Document document = parser.getDocument();
Element root = document.getDocumentElement();
if (((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim().compareTo("30") == 0 ||
((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim().compareTo("31") == 0 ) {
log.debug("Enrollment in Class successful");
} else {
throw new SubmissionException("Enrollment in Class not successful. Message: " + ((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim() + ". Code: " + ((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim());
}
}
public void processQueue() {
log.debug("Processing submission queue");
ContentReviewItem searchItem = new ContentReviewItem();
searchItem.setContentId(null);
searchItem.setStatus(ContentReviewItem.NOT_SUBMITTED_CODE);
List notSubmittedItems = dao.findByExample(searchItem);
searchItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
notSubmittedItems.addAll(dao.findByExample(searchItem));
log.debug("Total list is now " + notSubmittedItems.size());
Iterator notSubmittedIterator = notSubmittedItems.iterator();
ContentReviewItem currentItem;
while (notSubmittedIterator.hasNext()) {
currentItem = (ContentReviewItem) notSubmittedIterator.next();
log.debug("Attempting to submit content: " + currentItem.getContentId() + " for user: " + currentItem.getUserId() + " and site: " + currentItem.getSiteId());
if (currentItem.getRetryCount() == null ) {
currentItem.setRetryCount(new Long(0));
} else if (currentItem.getRetryCount().intValue() > maxRetry) {
currentItem.setStatus(ContentReviewItem.SUMBISSION_ERROR_RETRY_EXCEEDED);
dao.update(currentItem);
continue;
}
User user;
try {
user = userDirectoryService.getUser(currentItem.getUserId());
} catch (UserNotDefinedException e1) {
log.debug("Submission attempt unsuccessful - User not found: " + e1.getMessage());
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_NO_RETRY_CODE);
dao.update(currentItem);
continue;
}
String uem = getEmail(user);
if (uem == null ){
log.debug("User: " + user.getEid() + " has no valid email");
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_USER_DETAILS_CODE);
currentItem.setLastError("no valid email");
dao.update(currentItem);
continue;
}
String ufn = user.getFirstName().trim();
if (ufn == null || ufn.equals("")) {
log.debug("Submission attempt unsuccessful - User has no first name");
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_USER_DETAILS_CODE);
currentItem.setLastError("has no first name");
dao.update(currentItem);
continue;
}
String uln = user.getLastName().trim();
if (uln == null || uln.equals("")) {
log.debug("Submission attempt unsuccessful - User has no last name");
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_USER_DETAILS_CODE);
currentItem.setLastError("has no last name");
dao.update(currentItem);
continue;
}
try {
createClass(currentItem.getSiteId());
} catch (Throwable t) {
log.debug ("Submission attempt unsuccessful: Could not create class", t);
if (t.getClass() == IOException.class) {
currentItem.setLastError("Class creation error: " + t.getMessage());
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
} else {
currentItem.setLastError("Class creation error: " + t.getMessage());
if (t.getMessage().equals("Class creation call to Turnitin API failed"))
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
else
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_NO_RETRY_CODE);
}
dao.update(currentItem);
continue;
}
try {
enrollInClass(currentItem.getUserId(), uem, currentItem.getSiteId());
} catch (Throwable t) {
log.debug ("Submission attempt unsuccessful: Could not enroll user in class", t);
if (t.getClass() == IOException.class) {
currentItem.setLastError("Enrolment error: " + t.getMessage());
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
} else {
currentItem.setLastError("Enrolment error: " + t.getMessage());
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
}
dao.update(currentItem);
continue;
}
try {
createAssignment(currentItem.getSiteId(), currentItem.getTaskId());
} catch (Throwable t) {
log.debug ("Submission attempt unsuccessful: Could not create assignment");
if (t.getClass() == IOException.class) {
currentItem.setLastError("Assign creation error: " + t.getMessage());
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
} else {
//this is a to be expected error
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_NO_RETRY_CODE);
currentItem.setLastError("createAssignment: " + t.getMessage());
}
dao.update(currentItem);
continue;
}
//get all the info for the api call
//we do this before connecting so that if there is a problem we can jump out - saves time
//these errors should probably be caught when a student is enrolled in a class
//but we check again here to be sure
String diagnostic = "0";
String encrypt = "0";
String fcmd = "2";
String fid = "5";
//to get the name of the initial submited file we need the title
ContentResource resource = null;
ResourceProperties resourceProperties = null;
String fileName = null;
try {
resource = contentHostingService.getResource(currentItem.getContentId());
resourceProperties = resource.getProperties();
fileName = resourceProperties.getProperty(resourceProperties.getNamePropDisplayName());
log.debug("origional filename is: " + fileName);
if (fileName == null) {
//use the id
fileName = currentItem.getContentId();
} else if (fileName.length() > 199) {
fileName = fileName.substring(0, 199);
}
}
catch (PermissionException e2) {
log.debug("Submission failed due to permission error: " + e2.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_NO_RETRY_CODE);
currentItem.setLastError(e2.getMessage());
dao.update(currentItem);
continue;
}
catch (IdUnusedException e4) {
log.debug("Submission failed due to content ID error: " + e4.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_NO_RETRY_CODE);
currentItem.setLastError(e4.getMessage());
dao.update(currentItem);
continue;
}
catch (TypeException e) {
log.debug("Submission failed due to content Type error: " + e.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_NO_RETRY_CODE);
currentItem.setLastError(e.getMessage());
dao.update(currentItem);
continue;
}
String ptl = currentItem.getUserId() + ":" + fileName;
String ptype = "2";
String tem = defaultInstructorEmail;
String utp = "1";
String uid = currentItem.getUserId();
String cid = currentItem.getSiteId();
String assignid = currentItem.getTaskId();
String assign = getAssignmentTitle(currentItem.getTaskId());;
String ctl = currentItem.getSiteId();
String gmtime = this.getGMTime();
String md5_str = aid + assign + assignid + cid + ctl
+ diagnostic + encrypt + fcmd + fid + gmtime + ptl
+ ptype + said + tem + uem + ufn + uid + uln + utp
+ secretKey;
String md5;
try{
md5 = this.getMD5(md5_str);
} catch (NoSuchAlgorithmException e) {
log.debug("Submission attempt failed due to MD5 generation error");
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
currentItem.setLastError("MD5 error");
dao.update(currentItem);
continue;
}
String boundary = "";
OutputStream outStream = null;
HttpsURLConnection connection;
try {
URL hostURL = new URL(apiURL);
connection = (HttpsURLConnection) hostURL.openConnection();
connection.setRequestMethod("POST");
connection.setDoOutput(true);
connection.setDoInput(true);
Random rand = new Random();
//make up a boundary that should be unique
boundary = Long.toString(rand.nextLong(), 26)
+ Long.toString(rand.nextLong(), 26)
+ Long.toString(rand.nextLong(), 26);
// set up the connection to use multipart/form-data
connection.setRequestProperty("Content-Type","multipart/form-data; boundary=" + boundary);
log.info("HTTPS connection made to Turnitin");
outStream = connection.getOutputStream();
// connection.connect();
outStream.write(encodeParam("assignid", assignid, boundary).getBytes());
outStream.write(encodeParam("uid", uid, boundary).getBytes());
outStream.write(encodeParam("cid", cid, boundary).getBytes());
outStream.write(encodeParam("aid", aid, boundary).getBytes());
outStream.write(encodeParam("assign", assign, boundary).getBytes());
outStream.write(encodeParam("ctl", ctl, boundary).getBytes());
outStream.write(encodeParam("diagnostic", diagnostic, boundary).getBytes());
outStream.write(encodeParam("encrypt", encrypt, boundary).getBytes());
outStream.write(encodeParam("fcmd", fcmd, boundary).getBytes());
outStream.write(encodeParam("fid", fid, boundary).getBytes());
outStream.write(encodeParam("gmtime", gmtime, boundary).getBytes());
outStream.write(encodeParam("ptype", ptype, boundary).getBytes());
outStream.write(encodeParam("ptl", ptl, boundary).getBytes());
outStream.write(encodeParam("said", said, boundary).getBytes());
outStream.write(encodeParam("tem", tem, boundary).getBytes());
outStream.write(encodeParam("uem", uem, boundary).getBytes());
outStream.write(encodeParam("ufn", ufn, boundary).getBytes());
outStream.write(encodeParam("uln", uln, boundary).getBytes());
outStream.write(encodeParam("utp", utp, boundary).getBytes());
outStream.write(encodeParam("md5", md5, boundary).getBytes());
// put in the actual file
outStream.write(("--" + boundary
+ "\r\nContent-Disposition: form-data; name=\"pdata\"; filename=\""
+ currentItem.getContentId() + "\"\r\n"
+ "Content-Type: " + resource.getContentType()
+ "\r\ncontent-transfer-encoding: binary" + "\r\n\r\n")
.getBytes());
outStream.write(resource.getContent());
outStream.write("\r\n".getBytes("UTF-8"));
outStream.write(("--" + boundary + "--").getBytes());
outStream.close();
} catch (IOException e1) {
log.debug("Submission failed due to IO error: " + e1.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
currentItem.setLastError(e1.getMessage());
dao.update(currentItem);
continue;
}
catch (ServerOverloadException e3) {
log.debug("Submission failed due to server error: " + e3.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
currentItem.setLastError(e3.getMessage());
dao.update(currentItem);
continue;
}
BufferedReader in;
try {
in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} catch (IOException e1) {
log.debug("Unable to determine Submission status due to response IO error: " + e1.getMessage() + ". Assume unsuccessful");
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
dao.update(currentItem);
continue;
}
DOMParser parser = new DOMParser();
try {
parser.parse(new org.xml.sax.InputSource(in));
} catch (SAXException e) {
log.error("Unable to determine Submission status due to response parsing error: " + e.getMessage() + ". Assume unsuccessful");
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
dao.update(currentItem);
continue;
} catch (IOException e) {
log.warn("Unable to determine Submission status due to response IO error: " + e.getMessage() + ". Assume unsuccessful");
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
dao.update(currentItem);
continue;
}
Document document = parser.getDocument();
Element root = document.getDocumentElement();
if (((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim().compareTo("51") == 0) {
log.debug("Submission successful");
currentItem.setExternalId(((CharacterData) (root.getElementsByTagName("objectID").item(0).getFirstChild())).getData().trim());
currentItem.setStatus(ContentReviewItem.SUBMITTED_AWAITING_REPORT_CODE);
currentItem.setDateSubmitted(new Date());
dao.update(currentItem);
} else {
log.debug("Submission not successful: " + ((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim());
if (((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim().equals("User password does not match user email")) {
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
} else {
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_NO_RETRY_CODE);
}
currentItem.setLastError("Submission Error: " +((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim());
dao.update(currentItem);
}
}
log.debug("Submission queue processed");
}
private String getGMTime() {
// calculate function2 data
SimpleDateFormat dform = ((SimpleDateFormat) DateFormat
.getDateInstance());
dform.applyPattern("yyyyMMddHH");
dform.setTimeZone(TimeZone.getTimeZone("GMT"));
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
String gmtime = dform.format(cal.getTime());
gmtime += Integer.toString(((int) Math.floor((double) cal
.get(Calendar.MINUTE) / 10)));
return gmtime;
}
private String getMD5(String md5_string) throws NoSuchAlgorithmException {
MessageDigest md = MessageDigest.getInstance("MD5");
md.update(md5_string.getBytes());
// convert the binary md5 hash into hex
String md5 = "";
byte[] b_arr = md.digest();
for (int i = 0; i < b_arr.length; i++) {
// convert the high nibble
byte b = b_arr[i];
b >>>= 4;
b &= 0x0f; // this clears the top half of the byte
md5 += Integer.toHexString(b);
// convert the low nibble
b = b_arr[i];
b &= 0x0F;
md5 += Integer.toHexString(b);
}
return md5;
}
public void checkForReports() {
// get the list of all items that are waiting for reports
List awaitingReport = dao.findByProperties(ContentReviewItem.class,
new String[] { "status" },
new Object[] { ContentReviewItem.SUBMITTED_AWAITING_REPORT_CODE});
awaitingReport.addAll(dao.findByProperties(ContentReviewItem.class,
new String[] { "status" },
new Object[] { ContentReviewItem.REPORT_ERROR_RETRY_CODE}));
Iterator listIterator = awaitingReport.iterator();
HashMap reportTable = new HashMap();
log.debug("There are " + awaitingReport.size() + " submissions awaiting reports");
ContentReviewItem currentItem;
while (listIterator.hasNext()) {
currentItem = (ContentReviewItem) listIterator.next();
if (!reportTable.containsKey(currentItem.getExternalId())) {
// get the list from turnitin and see if the review is available
log.debug("Attempting to update hashtable with reports for site " + currentItem.getSiteId());
String diagnostic = "0";
String encrypt = "0";
String fcmd = "2";
String fid = "10";
String tem = defaultInstructorEmail;
String uem = defaultInstructorEmail;
String ufn = defaultInstructorFName;
String uln = defaultInstructorLName;
String utp = "2";
String uid = defaultInstructorId;
String cid = currentItem.getSiteId();
String assignid = currentItem.getTaskId();
String assign = currentItem.getTaskId();
String ctl = currentItem.getSiteId();
String gmtime = this.getGMTime();
String md5_str = aid + assign + assignid + cid + ctl
+ diagnostic + encrypt + fcmd + fid + gmtime + said
+ tem + uem + ufn + uid + uln + utp + secretKey;
String md5;
try{
md5 = this.getMD5(md5_str);
} catch (NoSuchAlgorithmException e) {
log.debug("Update failed due to MD5 generation error");
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_NO_RETRY_CODE);
currentItem.setLastError("MD5 generation error");
dao.update(currentItem);
listIterator.remove();
break;
}
HttpsURLConnection connection;
try {
URL hostURL = new URL(apiURL);
connection = (HttpsURLConnection) hostURL.openConnection();
connection.setRequestMethod("GET");
connection.setDoOutput(true);
connection.setDoInput(true);
log.info("HTTPS connection made to Turnitin");
OutputStream out = connection.getOutputStream();
out.write("fid=".getBytes("UTF-8"));
out.write(fid.getBytes("UTF-8"));
out.write("&fcmd=".getBytes("UTF-8"));
out.write(fcmd.getBytes("UTF-8"));
out.write("&uid=".getBytes("UTF-8"));
out.write(uid.getBytes("UTF-8"));
out.write("&tem=".getBytes("UTF-8"));
out.write(tem.getBytes("UTF-8"));
out.write("&assign=".getBytes("UTF-8"));
out.write(assign.getBytes("UTF-8"));
out.write("&assignid=".getBytes("UTF-8"));
out.write(assignid.getBytes("UTF-8"));
out.write("&cid=".getBytes("UTF-8"));
out.write(cid.getBytes("UTF-8"));
out.write("&ctl=".getBytes("UTF-8"));
out.write(ctl.getBytes("UTF-8"));
out.write("&encrypt=".getBytes());
out.write(encrypt.getBytes("UTF-8"));
out.write("&aid=".getBytes("UTF-8"));
out.write(aid.getBytes("UTF-8"));
out.write("&said=".getBytes("UTF-8"));
out.write(said.getBytes("UTF-8"));
out.write("&diagnostic=".getBytes("UTF-8"));
out.write(diagnostic.getBytes("UTF-8"));
out.write("&uem=".getBytes("UTF-8"));
out.write(URLEncoder.encode(uem, "UTF-8").getBytes("UTF-8"));
out.write("&ufn=".getBytes("UTF-8"));
out.write(ufn.getBytes("UTF-8"));
out.write("&uln=".getBytes("UTF-8"));
out.write(uln.getBytes("UTF-8"));
out.write("&utp=".getBytes("UTF-8"));
out.write(utp.getBytes("UTF-8"));
out.write("&gmtime=".getBytes("UTF-8"));
out.write(URLEncoder.encode(gmtime, "UTF-8").getBytes("UTF-8"));
out.write("&md5=".getBytes("UTF-8"));
out.write(md5.getBytes("UTF-8"));
out.close();
} catch (IOException e) {
log.debug("Update failed due to IO error: " + e.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
currentItem.setLastError(e.getMessage());
dao.update(currentItem);
break;
}
BufferedReader in;
try{
in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} catch (IOException e) {
log.debug("Update failed due to IO error: " + e.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
currentItem.setLastError(e.getMessage());
dao.update(currentItem);
break;
}
DOMParser parser = new DOMParser();
try{
parser.parse(new InputSource(in));
} catch (SAXException e1) {
log.error("Update failed due to Parsing error: " + e1.getMessage());
log.debug(e1.toString());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
currentItem.setLastError(e1.getMessage());
dao.update(currentItem);
//we may as well go on as the document may be in the part of the file that was parsed
continue;
} catch (IOException e2) {
log.warn("Update failed due to IO error: " + e2.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
currentItem.setLastError(e2.getMessage());
dao.update(currentItem);
continue;
}
Document document = parser.getDocument();
Element root = document.getDocumentElement();
if (((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim().compareTo("72") == 0) {
log.debug("Report list returned successfully");
NodeList objects = root.getElementsByTagName("object");
String objectId;
String similarityScore;
String overlap = "";
log.debug(objects.getLength() + " objects in the returned list");
for (int i=0; i<objects.getLength(); i++) {
similarityScore = ((CharacterData) (((Element)(objects.item(i))).getElementsByTagName("similarityScore").item(0).getFirstChild())).getData().trim();
objectId = ((CharacterData) (((Element)(objects.item(i))).getElementsByTagName("objectID").item(0).getFirstChild())).getData().trim();
if (similarityScore.compareTo("-1") != 0) {
overlap = ((CharacterData) (((Element)(objects.item(i))).getElementsByTagName("overlap").item(0).getFirstChild())).getData().trim();
reportTable.put(objectId, new Integer(overlap));
} else {
reportTable.put(objectId, new Integer(-1));
}
log.debug("objectId: " + objectId + " similarity: " + similarityScore + " overlap: " + overlap);
}
} else {
log.debug("Report list request not successful");
log.debug(document.toString());
}
}
int reportVal;
// check if the report value is now there (there may have been a
// failure to get the list above)
if (reportTable.containsKey(currentItem.getExternalId())) {
reportVal = ((Integer) (reportTable.get(currentItem
.getExternalId()))).intValue();
log.debug("reportVal for " + currentItem.getExternalId() + ": " + reportVal);
if (reportVal != -1) {
currentItem.setReviewScore(reportVal);
currentItem
.setStatus(ContentReviewItem.SUBMITTED_REPORT_AVAILABLE_CODE);
currentItem.setDateReportReceived(new Date());
dao.update(currentItem);
log.debug("new report received: " + currentItem.getExternalId() + " -> " + currentItem.getReviewScore());
}
}
}
}
public List getReportList(String siteId, String taskId) {
log.debug("Returning list of reports for site: " + siteId + ", task: " + taskId);
return dao.findByExample(new ContentReviewItem(null, siteId, taskId, null, null, ContentReviewItem.SUBMITTED_REPORT_AVAILABLE_CODE));
}
public List getReportList(String siteId) {
log.debug("Returning list of reports for site: " + siteId);
return dao.findByExample(new ContentReviewItem(null, siteId, null, null, null, ContentReviewItem.SUBMITTED_REPORT_AVAILABLE_CODE));
}
public String getServiceName() {
return this.SERVICE_NAME;
}
public void resetUserDetailsLockedItems(String userId) {
ContentReviewItem searchItem = new ContentReviewItem();
searchItem.setContentId(null);
searchItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_USER_DETAILS_CODE);
searchItem.setUserId(userId);
List lockedItems = dao.findByExample(searchItem);
for (int i =0; i < lockedItems.size();i++) {
ContentReviewItem thisItem = (ContentReviewItem) lockedItems.get(i);
thisItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
dao.update(thisItem);
}
}
public String getIconUrlforScore(Long score) {
String urlBase = "/sakai-content-review-tool/images/score_";
String suffix = ".gif";
if (score.equals(new Long(0))) {
return urlBase + "blue" + suffix;
} else if (score.compareTo(new Long(25)) < 0 ) {
return urlBase + "green" + suffix;
} else if (score.compareTo(new Long(50)) < 0 ) {
return urlBase + "yellow" + suffix;
} else if (score.compareTo(new Long(75)) < 0 ) {
return urlBase + "orange" + suffix;
} else {
return urlBase + "red" + suffix;
}
}
public boolean isAcceptableContent(ContentResource resource) {
//for now we accept all content
// TODO: Check against content types accepted by Turnitin
return true;
}
public boolean isSiteAcceptable(Site s) {
// TODO: Allow for visibility in course but not project sites
return true;
}
/**
* Is this a valid email the service will recognize
* @param email
* @return
*/
private boolean isValidEmail(String email) {
// TODO: Use a generic Sakai utility class (when a suitable one exists)
if (email == null || email.equals(""))
return false;
email = email.trim();
//must contain @
if (email.indexOf("@") == -1)
return false;
//an email can't contain spaces
if (email.indexOf(" ") > 0)
return false;
//"^[_A-Za-z0-9-]+(\\.[_A-Za-z0-9-]+)*@[A-Za-z0-9-]+(\\.[A-Za-z0-9-]+)*$"
if (email.matches("^[_A-Za-z0-9-]+(\\.[_A-Za-z0-9-]+)*@[A-Za-z0-9-]+(\\.[A-Za-z0-9-]+)*$"))
return true;
return false;
}
// returns null if no valid email exists
private String getEmail(User user) {
String uem = null;
log.debug("Looking for email for " + user.getEid() + " with prefer system profile email set to " + this.preferSystemProfileEmail);
if (!this.preferSystemProfileEmail) {
uem = user.getEmail().trim();
log.debug("got email of " + uem);
if (uem == null || uem.equals("") || !isValidEmail(uem)) {
//try the systemProfile
SakaiPerson sp = sakaiPersonManager.getSakaiPerson(user.getId(), sakaiPersonManager.getSystemMutableType());
if (sp != null ) {
String uem2 = sp.getMail().trim();
log.debug("Got system profile email of " + uem2);
if (uem2 == null || uem2.equals("") || !isValidEmail(uem2)) {
uem = null;
} else {
uem = uem2;
}
} else {
log.debug("this user has no systemMutable profile");
uem = null;
}
}
} else {
//try sakaiperson first
log.debug("try system profile email first");
SakaiPerson sp = sakaiPersonManager.getSakaiPerson(user.getId(), sakaiPersonManager.getSystemMutableType());
if (sp != null ) {
String uem2 = sp.getMail().trim();
if (uem2 == null || uem2.equals("") || !isValidEmail(uem2)) {
uem = user.getEmail().trim();
log.debug("Got system profile email of " + uem2);
if (uem == null || uem.equals("") || !isValidEmail(uem))
uem = user.getEmail().trim();
if (uem == null || uem.equals("") || !isValidEmail(uem))
uem = null;
} else {
uem = uem2;
}
} else {
uem = user.getEmail().trim();
if (uem == null || uem.equals("") || !isValidEmail(uem))
uem = null;
}
}
return uem;
}
public boolean allowResubmission() {
return true;
}
private String readerToString(BufferedReader in) {
String inputLine;
String retval = "";
try {
while ((inputLine = in.readLine()) != null)
retval.concat(inputLine);
}
catch (Exception e) {
e.printStackTrace();
}
return retval;
}
}
| contentreview-impl/turnitin/src/java/org/sakaiproject/contentreview/impl/turnitin/TurnitinReviewServiceImpl.java | /**********************************************************************************
* $URL:
* $Id:
***********************************************************************************
*
* Copyright (c) 2007 The Sakai Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.contentreview.impl.turnitin;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLEncoder;
import java.net.URLDecoder;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.TimeZone;
import javax.net.ssl.HttpsURLConnection;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.xerces.parsers.DOMParser;
import org.sakaiproject.component.api.ServerConfigurationService;
import org.sakaiproject.content.api.ContentHostingService;
import org.sakaiproject.content.api.ContentResource;
import org.sakaiproject.contentreview.dao.ContentReviewDao;
import org.sakaiproject.contentreview.exception.QueueException;
import org.sakaiproject.contentreview.exception.ReportException;
import org.sakaiproject.contentreview.exception.SubmissionException;
import org.sakaiproject.contentreview.model.ContentReviewItem;
import org.sakaiproject.contentreview.service.ContentReviewService;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.exception.PermissionException;
import org.sakaiproject.exception.ServerOverloadException;
import org.sakaiproject.exception.TypeException;
import org.sakaiproject.tool.api.ToolManager;
import org.sakaiproject.user.api.User;
import org.sakaiproject.user.api.UserDirectoryService;
import org.sakaiproject.user.api.UserNotDefinedException;
import org.sakaiproject.entity.api.EntityManager;
import org.sakaiproject.entity.api.EntityProducer;
import org.sakaiproject.entity.api.Entity;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.assignment.api.Assignment;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.api.common.edu.person.SakaiPersonManager;
import org.sakaiproject.api.common.edu.person.SakaiPerson;
import org.sakaiproject.api.common.type.Type;
import org.sakaiproject.api.common.manager.Persistable;
import org.w3c.dom.CharacterData;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
public class TurnitinReviewServiceImpl implements ContentReviewService {
private static final String SERVICE_NAME="Turnitin";
private String aid = null;
private String said = null;
private String secretKey = null;
private String apiURL = "https://www.turnitin.com/api.asp?";
private String defaultAssignmentName = null;
private String defaultInstructorEmail = null;
private String defaultInstructorFName = null;
private String defaultInstructorLName = null;
private String defaultInstructorPassword = null;
private Long maxRetry = null;
//note that the assignment id actually has to be unique globally so use this as a prefix
// eg. assignid = defaultAssignId + siteId
private String defaultAssignId = null;
private String defaultClassPassword = null;
//private static final String defaultInstructorId = defaultInstructorFName + " " + defaultInstructorLName;
private String defaultInstructorId = null;
private static final Log log = LogFactory
.getLog(TurnitinReviewServiceImpl.class);
private ContentReviewDao dao;
public void setDao(ContentReviewDao dao) {
this.dao = dao;
}
private ToolManager toolManager;
public void setToolManager(ToolManager toolManager) {
this.toolManager = toolManager;
}
private UserDirectoryService userDirectoryService;
public void setUserDirectoryService(
UserDirectoryService userDirectoryService) {
this.userDirectoryService = userDirectoryService;
}
private EntityManager entityManager;
public void setEntityManager(EntityManager en){
this.entityManager = en;
}
private ContentHostingService contentHostingService;
public void setContentHostingService(
ContentHostingService contentHostingService) {
this.contentHostingService = contentHostingService;
}
private ServerConfigurationService serverConfigurationService;
public void setServerConfigurationService (ServerConfigurationService serverConfigurationService) {
this.serverConfigurationService = serverConfigurationService;
}
private SakaiPersonManager sakaiPersonManager;
public void setSakaiPersonManager(SakaiPersonManager s) {
this.sakaiPersonManager = s;
}
//Should the service use a authoratative source for email?
private boolean preferSystemProfileEmail;
public void setPreferSystemProfileEmail(boolean b) {
preferSystemProfileEmail = b;
}
/**
* Place any code that should run when this class is initialized by spring
* here
*/
public void init() {
// TODO check on this function manager - sakai permissions?
// TODO what does the example init method do exactly? is it important?
log.info("init");
//System.setProperty("javax.net.ssl.trustStore", "dave_keystore");
//System.setProperty("javax.net.ssl.trustStorePassword", "dave_keystore");
aid = serverConfigurationService.getString("turnitin.aid");
said = serverConfigurationService.getString("turnitin.said");
secretKey = serverConfigurationService.getString("turnitin.secretKey");
apiURL = serverConfigurationService.getString("turnitin.apiURL","https://www.turnitin.com/api.asp?");
defaultAssignmentName = serverConfigurationService.getString("turnitin.defaultAssignmentName");
defaultInstructorEmail = serverConfigurationService.getString("turnitin.defaultInstructorEmail");
defaultInstructorFName = serverConfigurationService.getString("turnitin.defaultInstructorFName");;
defaultInstructorLName = serverConfigurationService.getString("turnitin.defaultInstructorLName");;
defaultInstructorPassword = serverConfigurationService.getString("turnitin.defaultInstructorPassword");;
//note that the assignment id actually has to be unique globally so use this as a prefix
// assignid = defaultAssignId + siteId
defaultAssignId = serverConfigurationService.getString("turnitin.defaultAssignId");;
defaultClassPassword = serverConfigurationService.getString("turnitin.defaultClassPassword");;
//private static final String defaultInstructorId = defaultInstructorFName + " " + defaultInstructorLName;
defaultInstructorId = serverConfigurationService.getString("turnitin.defaultInstructorId");
maxRetry = new Long(serverConfigurationService.getInt("turnitin.maxRetry",100));
//get the settings from sakai.properties
System.setProperty("javax.net.ssl.trustStore", serverConfigurationService.getString("turnitin.keystore_name"));
System.setProperty("javax.net.ssl.trustStorePassword", serverConfigurationService.getString("turnitin.keystore_password"));
}
public void queueContent(String userId, String siteId, String taskId, String contentId)
throws QueueException {
log.debug("Method called queueContent(" + userId + "," + siteId + "," + contentId + ")");
if (userId == null) {
log.debug("Using current user");
userId = userDirectoryService.getCurrentUser().getId();
}
if (siteId == null) {
log.debug("Using current site");
siteId = toolManager.getCurrentPlacement().getContext();
}
if (taskId == null) {
log.debug("Generating default taskId");
taskId = siteId + " " + defaultAssignmentName;
}
log.debug("Adding content: " + contentId + " from site " + siteId
+ " and user: " + userId + " for task: " + taskId + " to submission queue");
/*
* first check that this content has not been submitted before this may
* not be the best way to do this - perhaps use contentId as the primary
* key for now id is the primary key and so the database won't complain
* if we put in repeats necessitating the check
*/
List existingItems = dao
.findByExample(new ContentReviewItem(contentId));
if (existingItems.size() > 0) {
if (this.allowResubmission()) {
log.debug("Content: " + contentId + " is already queued, assuming resubmission");
for (int i =0; i < existingItems.size(); i++) {
dao.delete(existingItems.get(i));
}
} else {
throw new QueueException("Content " + contentId + " is already queued, not re-queued");
}
}
dao.save(new ContentReviewItem(userId, siteId, taskId, contentId, new Date(),
ContentReviewItem.NOT_SUBMITTED_CODE));
}
public int getReviewScore(String contentId)
throws QueueException, ReportException, Exception {
log.debug("Getting review score for content: " + contentId);
List matchingItems = dao.findByExample(new ContentReviewItem(contentId));
if (matchingItems.size() == 0) {
log.debug("Content " + contentId + " has not been queued previously");
throw new QueueException("Content " + contentId + " has not been queued previously");
}
if (matchingItems.size() > 1)
log.debug("More than one matching item - using first item found");
ContentReviewItem item = (ContentReviewItem) matchingItems.iterator().next();
if (item.getStatus().compareTo(ContentReviewItem.SUBMITTED_REPORT_AVAILABLE_CODE) != 0) {
log.debug("Report not available: " + item.getStatus());
throw new ReportException("Report not available: " + item.getStatus());
}
return item.getReviewScore().intValue();
}
public String getReviewReport(String contentId)
throws QueueException, ReportException {
// first retreive the record from the database to get the externalId of
// the content
log.debug("Getting report for content: " + contentId);
List matchingItems = dao.findByExample(new ContentReviewItem(contentId));
if (matchingItems.size() == 0) {
log.debug("Content " + contentId + " has not been queued previously");
throw new QueueException("Content " + contentId + " has not been queued previously");
}
if (matchingItems.size() > 1)
log.debug("More than one matching item found - using first item found");
// check that the report is available
// TODO if the database record does not show report available check with
// turnitin (maybe)
ContentReviewItem item = (ContentReviewItem) matchingItems.iterator().next();
if (item.getStatus().compareTo(ContentReviewItem.SUBMITTED_REPORT_AVAILABLE_CODE) != 0) {
log.debug("Report not available: " + item.getStatus());
throw new ReportException("Report not available: " + item.getStatus());
}
// report is available - generate the URL to display
String oid = item.getExternalId();
String fid = "6";
String fcmd = "1";
String encrypt = "0";
String diagnostic = "0";
String uem = defaultInstructorEmail;
String ufn = defaultInstructorFName;
String uln = defaultInstructorLName;
String utp = "2";
// is it worthwhile using this?
String uid = defaultInstructorId;
String cid = item.getSiteId();
String assignid = defaultAssignId + item.getSiteId();
/*User user = userDirectoryService.getUser(item.getUserId());
String uem = user.getEmail();
String ufn = user.getFirstName();
String uln = user.getLastName();
String utp = "1";
// is it worthwhile using this?
String uid = item.getUserId();
String cid = item.getSiteId();*/
String gmtime = getGMTime();
// note that these vars must be ordered alphabetically according to
// their names with secretKey last
String md5_str = aid + assignid + cid + diagnostic + encrypt + fcmd + fid + gmtime + oid
+ said + uem + ufn + uid + uln + utp + secretKey;
String md5;
try {
md5 = getMD5(md5_str);
} catch (Throwable t) {
throw new ReportException("Cannont do MD5 hash of data for Turnitin API call to retrieve report", t);
}
String reportURL = apiURL;
reportURL += "fid=";
reportURL += fid;
reportURL += "&fcmd=";
reportURL += fcmd;
reportURL += "&assignid=";
reportURL += assignid;
reportURL += "&uid=";
reportURL += uid;
reportURL += "&cid=";
reportURL += cid;
reportURL += "&encrypt=";
reportURL += encrypt;
reportURL += "&aid=";
reportURL += aid;
reportURL += "&said=";
reportURL += said;
reportURL += "&diagnostic=";
reportURL += diagnostic;
reportURL += "&oid=";
reportURL += oid;
reportURL += "&uem=";
reportURL += uem;
reportURL += "&ufn=";
reportURL += ufn;
reportURL += "&uln=";
reportURL += uln;
reportURL += "&utp=";
reportURL += utp;
reportURL += "&gmtime=";
reportURL += gmtime;
reportURL += "&md5=";
reportURL += md5;
return reportURL;
}
// TODO put this somewhere else
// ///////////////////////////////////IGNORE THIS
// STUFF/////////////////////////////////////////////
// this stuff was used to set up the keystore and test https connection
/*
*
* String retStr = "";
*
* try{
*
* //TODO get this info from somewhere else - the sakai.properties or
* something System.setProperty("javax.net.ssl.trustStore",
* "dave_keystore"); System.setProperty("javax.net.ssl.trustStorePassword",
* "dave_keystore");
*
* URL url = new URL("https://www.turnitin.com/api.asp"); HttpsURLConnection
* con = (HttpsURLConnection) url.openConnection(); con.setDoOutput(true);
* con.getOutputStream().close();
*
* BufferedReader in = new BufferedReader(new
* InputStreamReader(con.getInputStream()));
*
* String inLine = ""; while ((inLine = in.readLine()) != null) { retStr =
* retStr.concat(inLine); log.info(inLine); } } catch (SSLHandshakeException
* e1) { log.error(e1.toString()); //this stuff only for saving the
* necessary certificates to use turnitin CertPath certPath =
* ((CertPathValidatorException)(e1.getCause().getCause())).getCertPath();
* List certs = certPath.getCertificates(); Iterator it = certs.iterator();
* try { KeyStore ks = KeyStore.getInstance("JKS"); //ks.load(new
* FileInputStream(new File("dave_keystore")),
* "dave_keystore".toCharArray()); ks.load(null,
* "dave_keystore".toCharArray()); int i=0; Certificate cert; while
* (it.hasNext()) { cert = (Certificate) it.next();
* ks.setCertificateEntry("Turnitin path " + i++, cert);
* log.info(cert.toString()); } log.info(ks.size() + " certificates added to
* the keystore"); OutputStream outStream = new FileOutputStream(new
* File("dave_keystore")); ks.store(outStream,
* "dave_keystore".toCharArray()); } catch (Exception e) {
* log.error(e.toString()); } } catch (Exception e) {
* log.error(e.toString()); }
*
* return retStr; }
*/// ////////////////////////////////////OK STOP IGNORING
// NOW//////////////////////////////////////////
public Long getReviewStatus(String contentId)
throws QueueException {
log.debug("Returning review status for content: " + contentId);
List matchingItems = dao.findByExample(new ContentReviewItem(contentId));
if (matchingItems.size() == 0) {
log.debug("Content " + contentId + " has not been queued previously");
throw new QueueException("Content " + contentId + " has not been queued previously");
}
if (matchingItems.size() > 1)
log.debug("more than one matching item found - using first item found");
return ((ContentReviewItem) matchingItems.iterator().next()).getStatus();
}
public Date getDateQueued(String contentId)
throws QueueException {
log.debug("Returning date queued for content: " + contentId);
List matchingItems = dao.findByExample(new ContentReviewItem(contentId));
if (matchingItems.size() == 0) {
log.debug("Content " + contentId + " has not been queued previously");
throw new QueueException("Content " + contentId + " has not been queued previously");
}
if (matchingItems.size() > 1)
log.debug("more than one matching item found - using first item found");
return ((ContentReviewItem) matchingItems.iterator().next()).getDateQueued();
}
public Date getDateSubmitted(String contentId)
throws QueueException, SubmissionException {
log.debug("Returning date queued for content: " + contentId);
List matchingItems = dao.findByExample(new ContentReviewItem(contentId));
if (matchingItems.size() == 0) {
log.debug("Content " + contentId + " has not been queued previously");
throw new QueueException("Content " + contentId + " has not been queued previously");
}
if (matchingItems.size() > 1)
log.debug("more than one matching item found - using first item found");
ContentReviewItem item = (ContentReviewItem) matchingItems.iterator().next();
if (item.getDateSubmitted() == null) {
log.debug("Content not yet submitted: " + item.getStatus());
throw new SubmissionException("Content not yet submitted: " + item.getStatus());
}
return item.getDateSubmitted();
}
private String encodeParam(String name, String value, String boundary) {
return "--" + boundary + "\r\nContent-Disposition: form-data; name=\""
+ name + "\"\r\n\r\n" + value + "\r\n";
}
private void createClass(String siteId) throws SubmissionException{
log.debug("Creating class for site: " + siteId);
String cpw = defaultClassPassword;
String ctl = siteId;
String diagnostic = "0";
String encrypt = "0";
String fcmd = "2";
String fid = "2";
String uem = defaultInstructorEmail;
String ufn = defaultInstructorFName;
String uln = defaultInstructorLName;
String utp = "2"; //user type 2 = instructor
String upw = defaultInstructorPassword;
String cid = siteId;
String uid = defaultInstructorId;
String gmtime = this.getGMTime();
// MD5 of function 2 - Create a class under a given account (instructor only)
String md5_str = aid + cid + cpw + ctl + diagnostic + encrypt + fcmd + fid +
gmtime + said + uem + ufn + uid + uln + upw + utp + secretKey;
String md5;
try{
md5 = this.getMD5(md5_str);
} catch (Throwable t) {
log.warn("MD5 error creating class on turnitin");
throw new SubmissionException("Cannot generate MD5 hash for Turnitin API call", t);
}
HttpsURLConnection connection;
try {
URL hostURL = new URL(apiURL);
connection = (HttpsURLConnection) hostURL.openConnection();
connection.setRequestMethod("GET");
connection.setDoOutput(true);
connection.setDoInput(true);
log.info("HTTPS Connection made to Turnitin");
OutputStream outStream = connection.getOutputStream();
outStream.write("uid=".getBytes("UTF-8"));
outStream.write(uid.getBytes("UTF-8"));
outStream.write("&cid=".getBytes("UTF-8"));
outStream.write(cid.getBytes("UTF-8"));
outStream.write("&aid=".getBytes("UTF-8"));
outStream.write(aid.getBytes("UTF-8"));
outStream.write("&cpw=".getBytes("UTF-8"));
outStream.write(cpw.getBytes("UTF-8"));
outStream.write("&ctl=".getBytes("UTF-8"));
outStream.write(ctl.getBytes("UTF-8"));
outStream.write("&diagnostic=".getBytes("UTF-8"));
outStream.write(diagnostic.getBytes("UTF-8"));
outStream.write("&encrypt=".getBytes("UTF-8"));
outStream.write(encrypt.getBytes("UTF-8"));
outStream.write("&fcmd=".getBytes("UTF-8"));
outStream.write(fcmd.getBytes("UTF-8"));
outStream.write("&fid=".getBytes("UTF-8"));
outStream.write(fid.getBytes("UTF-8"));
outStream.write("&gmtime=".getBytes("UTF-8"));
outStream.write(gmtime.getBytes("UTF-8"));
outStream.write("&said=".getBytes("UTF-8"));
outStream.write(said.getBytes("UTF-8"));
outStream.write("&uem=".getBytes("UTF-8"));
outStream.write(uem.getBytes("UTF-8"));
outStream.write("&ufn=".getBytes("UTF-8"));
outStream.write(ufn.getBytes("UTF-8"));
outStream.write("&uln=".getBytes("UTF-8"));
outStream.write(uln.getBytes("UTF-8"));
outStream.write("&upw=".getBytes("UTF-8"));
outStream.write(upw.getBytes("UTF-8"));
outStream.write("&utp=".getBytes("UTF-8"));
outStream.write(utp.getBytes("UTF-8"));
outStream.write("&md5=".getBytes("UTF-8"));
outStream.write(md5.getBytes("UTF-8"));
outStream.close();
}
catch (Throwable t) {
throw new SubmissionException("Class creation call to Turnitin API failed", t);
}
BufferedReader in;
try {
in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} catch (Throwable t) {
throw new SubmissionException ("Cannot get Turnitin response. Assuming call was unsuccessful", t);
}
DOMParser parser = new DOMParser();
try {
parser.parse(new org.xml.sax.InputSource(in));
} catch (Throwable t) {
throw new SubmissionException ("Cannot parse Turnitin response. Assuming call was unsuccessful", t);
}
Document document = parser.getDocument();
Element root = document.getDocumentElement();
if (((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim().compareTo("20") == 0 ||
((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim().compareTo("21") == 0 ) {
log.debug("Create Class successful");
} else {
throw new SubmissionException("Create Class not successful. Message: " + ((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim() + ". Code: " + ((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim());
}
}
private String getAssignmentTitle(String taskId){
try {
Reference ref = entityManager.newReference(taskId);
log.debug("got ref " + ref + " of type: " + ref.getType());
EntityProducer ep = ref.getEntityProducer();
Entity ent = ep.getEntity(ref);
log.debug("got entity " + ent);
if (ent instanceof Assignment) {
Assignment as = (Assignment)ent;
log.debug("Got assignemment with title " + as.getTitle());
return URLDecoder.decode(as.getTitle(),"UTF-8");
}
} catch (Exception e) {
e.printStackTrace();
}
return taskId;
}
private void createAssignment(String siteId, String taskId) throws SubmissionException {
//get the assignement reference
String taskTitle = getAssignmentTitle(taskId);
log.debug("Creating assignment for site: " + siteId + ", task: " + taskId +" tasktitle: " + taskTitle);
String diagnostic = "0"; //0 = off; 1 = on
SimpleDateFormat dform = ((SimpleDateFormat) DateFormat.getDateInstance());
dform.applyPattern("yyyyMMdd");
Calendar cal = Calendar.getInstance();
String dtstart = dform.format(cal.getTime());
//set the due dates for the assignments to be in 5 month's time
//turnitin automatically sets each class end date to 6 months after it is created
//the assignment end date must be on or before the class end date
//TODO use the 'secret' function to change this to longer
cal.add(Calendar.MONTH, 5);
String dtdue = dform.format(cal.getTime());
String encrypt = "0"; //encryption flag
String fcmd = "2"; //new assignment
String fid = "4"; //function id
String uem = defaultInstructorEmail;
String ufn = defaultInstructorFName;
String uln = defaultInstructorLName;
String utp = "2"; //user type 2 = instructor
String upw = defaultInstructorPassword;
String cid = siteId;
String uid = defaultInstructorId;
String assignid = taskId;
String assign = taskTitle;
String ctl = siteId;
String gmtime = getGMTime();
String assignEnc = assign;
try {
if (assign.contains("&")) {
//log.debug("replacing & in assingment title");
assign = assign.replace('&', 'n');
}
assignEnc = assign;
log.debug("Assign title is " + assignEnc);
}
catch (Exception e) {
e.printStackTrace();
}
String md5_str = aid + assignEnc + assignid + cid + ctl + diagnostic + dtdue + dtstart + encrypt +
fcmd + fid + gmtime + said + uem + ufn + uid + uln + upw + utp + secretKey;
String md5;
try{
md5 = this.getMD5(md5_str);
} catch (Throwable t) {
log.warn("MD5 error creating assignment on turnitin");
throw new SubmissionException("Could not generate MD5 hash for \"Create Assignment\" Turnitin API call");
}
HttpsURLConnection connection;
try {
URL hostURL = new URL(apiURL);
connection = (HttpsURLConnection) hostURL.openConnection();
connection.setRequestMethod("GET");
connection.setDoOutput(true);
connection.setDoInput(true);
log.info("HTTPS connection made to Turnitin");
OutputStream outStream = connection.getOutputStream();
outStream.write("aid=".getBytes("UTF-8"));
outStream.write(aid.getBytes("UTF-8"));
outStream.write("&assign=".getBytes("UTF-8"));
outStream.write(assignEnc.getBytes("UTF-8"));
outStream.write("&assignid=".getBytes("UTF-8"));
outStream.write(assignid.getBytes("UTF-8"));
outStream.write("&cid=".getBytes("UTF-8"));
outStream.write(cid.getBytes("UTF-8"));
outStream.write("&uid=".getBytes("UTF-8"));
outStream.write(uid.getBytes("UTF-8"));
outStream.write("&ctl=".getBytes("UTF-8"));
outStream.write(ctl.getBytes("UTF-8"));
outStream.write("&diagnostic=".getBytes("UTF-8"));
outStream.write(diagnostic.getBytes("UTF-8"));
outStream.write("&dtdue=".getBytes("UTF-8"));
outStream.write(dtdue.getBytes("UTF-8"));
outStream.write("&dtstart=".getBytes("UTF-8"));
outStream.write(dtstart.getBytes("UTF-8"));
outStream.write("&encrypt=".getBytes("UTF-8"));
outStream.write(encrypt.getBytes("UTF-8"));
outStream.write("&fcmd=".getBytes("UTF-8"));
outStream.write(fcmd.getBytes("UTF-8"));
outStream.write("&fid=".getBytes("UTF-8"));
outStream.write(fid.getBytes("UTF-8"));
outStream.write("&gmtime=".getBytes("UTF-8"));
outStream.write(gmtime.getBytes("UTF-8"));
outStream.write("&said=".getBytes("UTF-8"));
outStream.write(said.getBytes("UTF-8"));
outStream.write("&uem=".getBytes("UTF-8"));
outStream.write(uem.getBytes("UTF-8"));
outStream.write("&ufn=".getBytes("UTF-8"));
outStream.write(ufn.getBytes("UTF-8"));
outStream.write("&uln=".getBytes("UTF-8"));
outStream.write(uln.getBytes("UTF-8"));
outStream.write("&upw=".getBytes("UTF-8"));
outStream.write(upw.getBytes("UTF-8"));
outStream.write("&utp=".getBytes("UTF-8"));
outStream.write(utp.getBytes("UTF-8"));
outStream.write("&md5=".getBytes("UTF-8"));
outStream.write(md5.getBytes("UTF-8"));
outStream.close();
}
catch (Throwable t) {
throw new SubmissionException("Assignment creation call to Turnitin API failed", t);
}
BufferedReader in;
try {
in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} catch (Throwable t) {
throw new SubmissionException ("Cannot get Turnitin response. Assuming call was unsuccessful", t);
}
DOMParser parser = new DOMParser();
try {
parser.parse(new org.xml.sax.InputSource(in));
} catch (Throwable t) {
throw new SubmissionException ("Cannot parse Turnitin response. Assuming call was unsuccessful");
}
Document document = parser.getDocument();
Element root = document.getDocumentElement();
int rcode = new Integer(((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim()).intValue();
if ((rcode > 0 && rcode < 100) || rcode == 419) {
log.debug("Create Assignment successful");
} else {
log.debug("Assignment creation failed with message: " + ((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim() + ". Code: " + rcode);
throw new SubmissionException("Create Assignment not successful. Message: " + ((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim() + ". Code: " + rcode);
}
}
private void enrollInClass(String userId, String uem, String siteId) throws SubmissionException {
String ctl = siteId; //class title
String fid = "3";
String fcmd = "2";
String encrypt = "0";
String diagnostic = "0";
String tem = defaultInstructorEmail;
User user;
try {
user = userDirectoryService.getUser(userId);
} catch (Throwable t) {
throw new SubmissionException ("Cannot get user information", t);
}
log.debug("Enrolling user " + user.getEid() + "(" + userId + ") in class " + siteId);
/* not using this as we may be getting email from profile
String uem = user.getEmail();
if (uem == null) {
throw new SubmissionException ("User has no email address");
}
*/
String ufn = user.getFirstName();
if (ufn == null) {
throw new SubmissionException ("User has no first name");
}
String uln = user.getLastName();
if (uln == null) {
throw new SubmissionException ("User has no last name");
}
String utp = "1";
String uid = userId;
String cid = siteId;
String gmtime = this.getGMTime();
String md5_str = aid + cid + ctl + diagnostic + encrypt + fcmd + fid + gmtime + said + tem + uem +
ufn + uid + uln + utp + secretKey;
String md5;
try{
md5 = this.getMD5(md5_str);
} catch (Throwable t) {
log.warn("MD5 error enrolling student on turnitin");
throw new SubmissionException("Cannot generate MD5 hash for Class Enrollment Turnitin API call", t);
}
HttpsURLConnection connection;
try {
URL hostURL = new URL(apiURL);
connection = (HttpsURLConnection) hostURL.openConnection();
connection.setRequestMethod("GET");
connection.setDoOutput(true);
connection.setDoInput(true);
log.info("Connection made to Turnitin");
OutputStream outStream = connection.getOutputStream();
outStream.write("fid=".getBytes("UTF-8"));
outStream.write(fid.getBytes("UTF-8"));
outStream.write("&fcmd=".getBytes("UTF-8"));
outStream.write(fcmd.getBytes("UTF-8"));
outStream.write("&cid=".getBytes("UTF-8"));
outStream.write(cid.getBytes("UTF-8"));
outStream.write("&tem=".getBytes());
outStream.write(tem.getBytes("UTF-8"));
outStream.write("&ctl=".getBytes());
outStream.write(ctl.getBytes("UTF-8"));
outStream.write("&encrypt=".getBytes());
outStream.write(encrypt.getBytes("UTF-8"));
outStream.write("&aid=".getBytes("UTF-8"));
outStream.write(aid.getBytes("UTF-8"));
outStream.write("&said=".getBytes("UTF-8"));
outStream.write(said.getBytes("UTF-8"));
outStream.write("&diagnostic=".getBytes("UTF-8"));
outStream.write(diagnostic.getBytes("UTF-8"));
outStream.write("&uem=".getBytes("UTF-8"));
outStream.write(URLEncoder.encode(uem, "UTF-8").getBytes("UTF-8"));
outStream.write("&ufn=".getBytes("UTF-8"));
outStream.write(ufn.getBytes("UTF-8"));
outStream.write("&uln=".getBytes("UTF-8"));
outStream.write(uln.getBytes("UTF-8"));
outStream.write("&utp=".getBytes("UTF-8"));
outStream.write(utp.getBytes("UTF-8"));
outStream.write("&gmtime=".getBytes("UTF-8"));
outStream.write(URLEncoder.encode(gmtime, "UTF-8").getBytes("UTF-8"));
outStream.write("&md5=".getBytes("UTF-8"));
outStream.write(md5.getBytes("UTF-8"));
outStream.write("&uid=".getBytes("UTF-8"));
outStream.write(uid.getBytes("UTF-8"));
outStream.close();
}
catch (Throwable t) {
throw new SubmissionException("Student Enrollment call to Turnitin failed", t);
}
BufferedReader in;
try {
in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} catch (Throwable t) {
throw new SubmissionException ("Cannot get Turnitin response. Assuming call was unsuccessful", t);
}
DOMParser parser = new DOMParser();
try {
parser.parse(new org.xml.sax.InputSource(in));
} catch (Throwable t) {
throw new SubmissionException ("Cannot parse Turnitin response. Assuming call was unsuccessful", t);
}
Document document = parser.getDocument();
Element root = document.getDocumentElement();
if (((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim().compareTo("30") == 0 ||
((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim().compareTo("31") == 0 ) {
log.debug("Enrollment in Class successful");
} else {
throw new SubmissionException("Enrollment in Class not successful. Message: " + ((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim() + ". Code: " + ((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim());
}
}
public void processQueue() {
log.debug("Processing submission queue");
ContentReviewItem searchItem = new ContentReviewItem();
searchItem.setContentId(null);
searchItem.setStatus(ContentReviewItem.NOT_SUBMITTED_CODE);
List notSubmittedItems = dao.findByExample(searchItem);
searchItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
notSubmittedItems.addAll(dao.findByExample(searchItem));
log.debug("Total list is now " + notSubmittedItems.size());
Iterator notSubmittedIterator = notSubmittedItems.iterator();
ContentReviewItem currentItem;
while (notSubmittedIterator.hasNext()) {
currentItem = (ContentReviewItem) notSubmittedIterator.next();
log.debug("Attempting to submit content: " + currentItem.getContentId() + " for user: " + currentItem.getUserId() + " and site: " + currentItem.getSiteId());
if (currentItem.getRetryCount() == null ) {
currentItem.setRetryCount(new Long(0));
} else if (currentItem.getRetryCount().intValue() > maxRetry) {
currentItem.setStatus(ContentReviewItem.SUMBISSION_ERROR_RETRY_EXCEEDED);
dao.update(currentItem);
continue;
}
User user;
try {
user = userDirectoryService.getUser(currentItem.getUserId());
} catch (UserNotDefinedException e1) {
log.debug("Submission attempt unsuccessful - User not found: " + e1.getMessage());
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_NO_RETRY_CODE);
dao.update(currentItem);
continue;
}
String uem = getEmail(user);
if (uem == null ){
log.debug("User: " + user.getEid() + " has no valid email");
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_USER_DETAILS_CODE);
currentItem.setLastError("no valid email");
dao.update(currentItem);
continue;
}
String ufn = user.getFirstName().trim();
if (ufn == null || ufn.equals("")) {
log.debug("Submission attempt unsuccessful - User has no first name");
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_USER_DETAILS_CODE);
currentItem.setLastError("has no first name");
dao.update(currentItem);
continue;
}
String uln = user.getLastName().trim();
if (uln == null || uln.equals("")) {
log.debug("Submission attempt unsuccessful - User has no last name");
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_USER_DETAILS_CODE);
currentItem.setLastError("has no last name");
dao.update(currentItem);
continue;
}
try {
createClass(currentItem.getSiteId());
} catch (Throwable t) {
log.debug ("Submission attempt unsuccessful: Could not create class", t);
if (t.getClass() == IOException.class) {
currentItem.setLastError("Class creation error: " + t.getMessage());
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
} else {
currentItem.setLastError("Class creation error: " + t.getMessage());
if (t.getMessage().equals("Class creation call to Turnitin API failed"))
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
else
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_NO_RETRY_CODE);
}
dao.update(currentItem);
continue;
}
try {
enrollInClass(currentItem.getUserId(), uem, currentItem.getSiteId());
} catch (Throwable t) {
log.debug ("Submission attempt unsuccessful: Could not enroll user in class", t);
if (t.getClass() == IOException.class) {
currentItem.setLastError("Enrolment error: " + t.getMessage());
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
} else {
currentItem.setLastError("Enrolment error: " + t.getMessage());
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
}
dao.update(currentItem);
continue;
}
try {
createAssignment(currentItem.getSiteId(), currentItem.getTaskId());
} catch (Throwable t) {
log.debug ("Submission attempt unsuccessful: Could not create assignment");
if (t.getClass() == IOException.class) {
currentItem.setLastError("Assign creation error: " + t.getMessage());
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
} else {
//this is a to be expected error
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_NO_RETRY_CODE);
currentItem.setLastError("createAssignment: " + t.getMessage());
}
dao.update(currentItem);
continue;
}
//get all the info for the api call
//we do this before connecting so that if there is a problem we can jump out - saves time
//these errors should probably be caught when a student is enrolled in a class
//but we check again here to be sure
String diagnostic = "0";
String encrypt = "0";
String fcmd = "2";
String fid = "5";
//to get the name of the initial submited file we need the title
ContentResource resource = null;
ResourceProperties resourceProperties = null;
String fileName = null;
try {
resource = contentHostingService.getResource(currentItem.getContentId());
resourceProperties = resource.getProperties();
fileName = resourceProperties.getProperty(resourceProperties.getNamePropDisplayName());
log.debug("origional filename is: " + fileName);
if (fileName == null) {
//use the id
fileName = currentItem.getContentId();
} else if (fileName.length() > 199) {
fileName = fileName.substring(0, 199);
}
}
catch (PermissionException e2) {
log.debug("Submission failed due to permission error: " + e2.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_NO_RETRY_CODE);
currentItem.setLastError(e2.getMessage());
dao.update(currentItem);
continue;
}
catch (IdUnusedException e4) {
log.debug("Submission failed due to content ID error: " + e4.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_NO_RETRY_CODE);
currentItem.setLastError(e4.getMessage());
dao.update(currentItem);
continue;
}
catch (TypeException e) {
log.debug("Submission failed due to content Type error: " + e.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_NO_RETRY_CODE);
currentItem.setLastError(e.getMessage());
dao.update(currentItem);
continue;
}
String ptl = currentItem.getUserId() + ":" + fileName;
String ptype = "2";
String tem = defaultInstructorEmail;
String utp = "1";
String uid = currentItem.getUserId();
String cid = currentItem.getSiteId();
String assignid = currentItem.getTaskId();
String assign = getAssignmentTitle(currentItem.getTaskId());;
String ctl = currentItem.getSiteId();
String gmtime = this.getGMTime();
String md5_str = aid + assign + assignid + cid + ctl
+ diagnostic + encrypt + fcmd + fid + gmtime + ptl
+ ptype + said + tem + uem + ufn + uid + uln + utp
+ secretKey;
String md5;
try{
md5 = this.getMD5(md5_str);
} catch (NoSuchAlgorithmException e) {
log.debug("Submission attempt failed due to MD5 generation error");
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
currentItem.setLastError("MD5 error");
dao.update(currentItem);
continue;
}
String boundary = "";
OutputStream outStream = null;
HttpsURLConnection connection;
try {
URL hostURL = new URL(apiURL);
connection = (HttpsURLConnection) hostURL.openConnection();
connection.setRequestMethod("POST");
connection.setDoOutput(true);
connection.setDoInput(true);
Random rand = new Random();
//make up a boundary that should be unique
boundary = Long.toString(rand.nextLong(), 26)
+ Long.toString(rand.nextLong(), 26)
+ Long.toString(rand.nextLong(), 26);
// set up the connection to use multipart/form-data
connection.setRequestProperty("Content-Type","multipart/form-data; boundary=" + boundary);
log.info("HTTPS connection made to Turnitin");
outStream = connection.getOutputStream();
// connection.connect();
outStream.write(encodeParam("assignid", assignid, boundary).getBytes());
outStream.write(encodeParam("uid", uid, boundary).getBytes());
outStream.write(encodeParam("cid", cid, boundary).getBytes());
outStream.write(encodeParam("aid", aid, boundary).getBytes());
outStream.write(encodeParam("assign", assign, boundary).getBytes());
outStream.write(encodeParam("ctl", ctl, boundary).getBytes());
outStream.write(encodeParam("diagnostic", diagnostic, boundary).getBytes());
outStream.write(encodeParam("encrypt", encrypt, boundary).getBytes());
outStream.write(encodeParam("fcmd", fcmd, boundary).getBytes());
outStream.write(encodeParam("fid", fid, boundary).getBytes());
outStream.write(encodeParam("gmtime", gmtime, boundary).getBytes());
outStream.write(encodeParam("ptype", ptype, boundary).getBytes());
outStream.write(encodeParam("ptl", ptl, boundary).getBytes());
outStream.write(encodeParam("said", said, boundary).getBytes());
outStream.write(encodeParam("tem", tem, boundary).getBytes());
outStream.write(encodeParam("uem", uem, boundary).getBytes());
outStream.write(encodeParam("ufn", ufn, boundary).getBytes());
outStream.write(encodeParam("uln", uln, boundary).getBytes());
outStream.write(encodeParam("utp", utp, boundary).getBytes());
outStream.write(encodeParam("md5", md5, boundary).getBytes());
// put in the actual file
outStream.write(("--" + boundary
+ "\r\nContent-Disposition: form-data; name=\"pdata\"; filename=\""
+ currentItem.getContentId() + "\"\r\n"
+ "Content-Type: " + resource.getContentType()
+ "\r\ncontent-transfer-encoding: binary" + "\r\n\r\n")
.getBytes());
outStream.write(resource.getContent());
outStream.write("\r\n".getBytes("UTF-8"));
outStream.write(("--" + boundary + "--").getBytes());
outStream.close();
} catch (IOException e1) {
log.debug("Submission failed due to IO error: " + e1.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
currentItem.setLastError(e1.getMessage());
dao.update(currentItem);
continue;
}
catch (ServerOverloadException e3) {
log.debug("Submission failed due to server error: " + e3.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
currentItem.setLastError(e3.getMessage());
dao.update(currentItem);
continue;
}
BufferedReader in;
try {
in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} catch (IOException e1) {
log.debug("Unable to determine Submission status due to response IO error: " + e1.getMessage() + ". Assume unsuccessful");
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
dao.update(currentItem);
continue;
}
DOMParser parser = new DOMParser();
try {
parser.parse(new org.xml.sax.InputSource(in));
} catch (SAXException e) {
log.error("Unable to determine Submission status due to response parsing error: " + e.getMessage() + ". Assume unsuccessful");
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
dao.update(currentItem);
continue;
} catch (IOException e) {
log.warn("Unable to determine Submission status due to response IO error: " + e.getMessage() + ". Assume unsuccessful");
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
dao.update(currentItem);
continue;
}
Document document = parser.getDocument();
Element root = document.getDocumentElement();
if (((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim().compareTo("51") == 0) {
log.debug("Submission successful");
currentItem.setExternalId(((CharacterData) (root.getElementsByTagName("objectID").item(0).getFirstChild())).getData().trim());
currentItem.setStatus(ContentReviewItem.SUBMITTED_AWAITING_REPORT_CODE);
currentItem.setDateSubmitted(new Date());
dao.update(currentItem);
} else {
log.debug("Submission not successful: " + ((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim());
if (((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim().equals("User password does not match user email")) {
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
} else {
currentItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_NO_RETRY_CODE);
}
currentItem.setLastError("Submission Error: " +((CharacterData) (root.getElementsByTagName("rmessage").item(0).getFirstChild())).getData().trim());
dao.update(currentItem);
}
}
log.debug("Submission queue processed");
}
private String getGMTime() {
// calculate function2 data
SimpleDateFormat dform = ((SimpleDateFormat) DateFormat
.getDateInstance());
dform.applyPattern("yyyyMMddHH");
dform.setTimeZone(TimeZone.getTimeZone("GMT"));
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
String gmtime = dform.format(cal.getTime());
gmtime += Integer.toString(((int) Math.floor((double) cal
.get(Calendar.MINUTE) / 10)));
return gmtime;
}
private String getMD5(String md5_string) throws NoSuchAlgorithmException {
MessageDigest md = MessageDigest.getInstance("MD5");
md.update(md5_string.getBytes());
// convert the binary md5 hash into hex
String md5 = "";
byte[] b_arr = md.digest();
for (int i = 0; i < b_arr.length; i++) {
// convert the high nibble
byte b = b_arr[i];
b >>>= 4;
b &= 0x0f; // this clears the top half of the byte
md5 += Integer.toHexString(b);
// convert the low nibble
b = b_arr[i];
b &= 0x0F;
md5 += Integer.toHexString(b);
}
return md5;
}
public void checkForReports() {
// get the list of all items that are waiting for reports
List awaitingReport = dao.findByProperties(ContentReviewItem.class,
new String[] { "status" },
new Object[] { ContentReviewItem.SUBMITTED_AWAITING_REPORT_CODE});
awaitingReport.addAll(dao.findByProperties(ContentReviewItem.class,
new String[] { "status" },
new Object[] { ContentReviewItem.REPORT_ERROR_RETRY_CODE}));
Iterator listIterator = awaitingReport.iterator();
HashMap reportTable = new HashMap();
log.debug("There are " + awaitingReport.size() + " submissions awaiting reports");
ContentReviewItem currentItem;
while (listIterator.hasNext()) {
currentItem = (ContentReviewItem) listIterator.next();
if (!reportTable.containsKey(currentItem.getExternalId())) {
// get the list from turnitin and see if the review is available
log.debug("Attempting to update hashtable with reports for site " + currentItem.getSiteId());
String diagnostic = "0";
String encrypt = "0";
String fcmd = "2";
String fid = "10";
String tem = defaultInstructorEmail;
String uem = defaultInstructorEmail;
String ufn = defaultInstructorFName;
String uln = defaultInstructorLName;
String utp = "2";
String uid = defaultInstructorId;
String cid = currentItem.getSiteId();
String assignid = currentItem.getTaskId();
String assign = currentItem.getTaskId();
String ctl = currentItem.getSiteId();
String gmtime = this.getGMTime();
String md5_str = aid + assign + assignid + cid + ctl
+ diagnostic + encrypt + fcmd + fid + gmtime + said
+ tem + uem + ufn + uid + uln + utp + secretKey;
String md5;
try{
md5 = this.getMD5(md5_str);
} catch (NoSuchAlgorithmException e) {
log.debug("Update failed due to MD5 generation error");
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_NO_RETRY_CODE);
currentItem.setLastError("MD5 generation error");
dao.update(currentItem);
listIterator.remove();
break;
}
HttpsURLConnection connection;
try {
URL hostURL = new URL(apiURL);
connection = (HttpsURLConnection) hostURL.openConnection();
connection.setRequestMethod("GET");
connection.setDoOutput(true);
connection.setDoInput(true);
log.info("HTTPS connection made to Turnitin");
OutputStream out = connection.getOutputStream();
out.write("fid=".getBytes("UTF-8"));
out.write(fid.getBytes("UTF-8"));
out.write("&fcmd=".getBytes("UTF-8"));
out.write(fcmd.getBytes("UTF-8"));
out.write("&uid=".getBytes("UTF-8"));
out.write(uid.getBytes("UTF-8"));
out.write("&tem=".getBytes("UTF-8"));
out.write(tem.getBytes("UTF-8"));
out.write("&assign=".getBytes("UTF-8"));
out.write(assign.getBytes("UTF-8"));
out.write("&assignid=".getBytes("UTF-8"));
out.write(assignid.getBytes("UTF-8"));
out.write("&cid=".getBytes("UTF-8"));
out.write(cid.getBytes("UTF-8"));
out.write("&ctl=".getBytes("UTF-8"));
out.write(ctl.getBytes("UTF-8"));
out.write("&encrypt=".getBytes());
out.write(encrypt.getBytes("UTF-8"));
out.write("&aid=".getBytes("UTF-8"));
out.write(aid.getBytes("UTF-8"));
out.write("&said=".getBytes("UTF-8"));
out.write(said.getBytes("UTF-8"));
out.write("&diagnostic=".getBytes("UTF-8"));
out.write(diagnostic.getBytes("UTF-8"));
out.write("&uem=".getBytes("UTF-8"));
out.write(URLEncoder.encode(uem, "UTF-8").getBytes("UTF-8"));
out.write("&ufn=".getBytes("UTF-8"));
out.write(ufn.getBytes("UTF-8"));
out.write("&uln=".getBytes("UTF-8"));
out.write(uln.getBytes("UTF-8"));
out.write("&utp=".getBytes("UTF-8"));
out.write(utp.getBytes("UTF-8"));
out.write("&gmtime=".getBytes("UTF-8"));
out.write(URLEncoder.encode(gmtime, "UTF-8").getBytes("UTF-8"));
out.write("&md5=".getBytes("UTF-8"));
out.write(md5.getBytes("UTF-8"));
out.close();
} catch (IOException e) {
log.debug("Update failed due to IO error: " + e.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
currentItem.setLastError(e.getMessage());
dao.update(currentItem);
break;
}
BufferedReader in;
try{
in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} catch (IOException e) {
log.debug("Update failed due to IO error: " + e.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
currentItem.setLastError(e.getMessage());
dao.update(currentItem);
break;
}
DOMParser parser = new DOMParser();
try{
parser.parse(new InputSource(in));
} catch (SAXException e1) {
log.error("Update failed due to Parsing error: " + e1.getMessage());
log.debug(e1.toString());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
currentItem.setLastError(e1.getMessage());
dao.update(currentItem);
//we may as well go on as the document may be in the part of the file that was parsed
continue;
} catch (IOException e2) {
log.warn("Update failed due to IO error: " + e2.getMessage());
currentItem.setStatus(ContentReviewItem.REPORT_ERROR_RETRY_CODE);
currentItem.setLastError(e2.getMessage());
dao.update(currentItem);
continue;
}
Document document = parser.getDocument();
Element root = document.getDocumentElement();
if (((CharacterData) (root.getElementsByTagName("rcode").item(0).getFirstChild())).getData().trim().compareTo("72") == 0) {
log.debug("Report list returned successfully");
NodeList objects = root.getElementsByTagName("object");
String objectId;
String similarityScore;
String overlap = "";
log.debug(objects.getLength() + " objects in the returned list");
for (int i=0; i<objects.getLength(); i++) {
similarityScore = ((CharacterData) (((Element)(objects.item(i))).getElementsByTagName("similarityScore").item(0).getFirstChild())).getData().trim();
objectId = ((CharacterData) (((Element)(objects.item(i))).getElementsByTagName("objectID").item(0).getFirstChild())).getData().trim();
if (similarityScore.compareTo("-1") != 0) {
overlap = ((CharacterData) (((Element)(objects.item(i))).getElementsByTagName("overlap").item(0).getFirstChild())).getData().trim();
reportTable.put(objectId, new Integer(overlap));
} else {
reportTable.put(objectId, new Integer(-1));
}
log.debug("objectId: " + objectId + " similarity: " + similarityScore + " overlap: " + overlap);
}
} else {
log.debug("Report list request not successful");
log.debug(document.toString());
}
}
int reportVal;
// check if the report value is now there (there may have been a
// failure to get the list above)
if (reportTable.containsKey(currentItem.getExternalId())) {
reportVal = ((Integer) (reportTable.get(currentItem
.getExternalId()))).intValue();
log.debug("reportVal for " + currentItem.getExternalId() + ": " + reportVal);
if (reportVal != -1) {
currentItem.setReviewScore(reportVal);
currentItem
.setStatus(ContentReviewItem.SUBMITTED_REPORT_AVAILABLE_CODE);
currentItem.setDateReportReceived(new Date());
dao.update(currentItem);
log.debug("new report received: " + currentItem.getExternalId() + " -> " + currentItem.getReviewScore());
}
}
}
}
public List getReportList(String siteId, String taskId) {
log.debug("Returning list of reports for site: " + siteId + ", task: " + taskId);
return dao.findByExample(new ContentReviewItem(null, siteId, taskId, null, null, ContentReviewItem.SUBMITTED_REPORT_AVAILABLE_CODE));
}
public List getReportList(String siteId) {
log.debug("Returning list of reports for site: " + siteId);
return dao.findByExample(new ContentReviewItem(null, siteId, null, null, null, ContentReviewItem.SUBMITTED_REPORT_AVAILABLE_CODE));
}
public String getServiceName() {
return this.SERVICE_NAME;
}
public void resetUserDetailsLockedItems(String userId) {
ContentReviewItem searchItem = new ContentReviewItem();
searchItem.setContentId(null);
searchItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_USER_DETAILS_CODE);
searchItem.setUserId(userId);
List lockedItems = dao.findByExample(searchItem);
for (int i =0; i < lockedItems.size();i++) {
ContentReviewItem thisItem = (ContentReviewItem) lockedItems.get(i);
thisItem.setStatus(ContentReviewItem.SUBMISSION_ERROR_RETRY_CODE);
dao.update(thisItem);
}
}
public String getIconUrlforScore(Long score) {
String urlBase = "/sakai-content-review-tool/images/score_";
String sufix = ".gif";
if (score.equals(new Long(0))) {
return urlBase + "blue" + sufix;
} else if (score.compareTo(new Long(25)) < 0 ) {
return urlBase + "green" + sufix;
} else if (score.compareTo(new Long(50)) < 0 ) {
return urlBase + "yellow" + sufix;
} else if (score.compareTo(new Long(75)) < 0 ) {
return urlBase + "orange" + sufix;
} else {
return urlBase + "red" + sufix;
}
}
public boolean isAcceptableContent(ContentResource resource) {
//for now we accept all content
return true;
}
public boolean isSiteAcceptable(Site s) {
return true;
}
/**
* Is this a valid email the service will recognize
* @param email
* @return
*/
private boolean isValidEmail(String email) {
if (email == null || email.equals(""))
return false;
email = email.trim();
//must contain @
if (email.indexOf("@") == -1)
return false;
//an email can't contain spaces
if (email.indexOf(" ") > 0)
return false;
//"^[_A-Za-z0-9-]+(\\.[_A-Za-z0-9-]+)*@[A-Za-z0-9-]+(\\.[A-Za-z0-9-]+)*$"
if (email.matches("^[_A-Za-z0-9-]+(\\.[_A-Za-z0-9-]+)*@[A-Za-z0-9-]+(\\.[A-Za-z0-9-]+)*$"))
return true;
return false;
}
// returns null if no valid email exits
private String getEmail(User user) {
String uem = null;
log.debug("Looking for email for " + user.getEid() + " with authorative email set to " + this.preferSystemProfileEmail);
if (!this.preferSystemProfileEmail) {
uem = user.getEmail().trim();
log.debug("got email of " + uem);
if (uem == null || uem.equals("") || !isValidEmail(uem)) {
//try the systemProfile
SakaiPerson sp = sakaiPersonManager.getSakaiPerson(user.getId(), sakaiPersonManager.getSystemMutableType());
if (sp != null ) {
String uem2 = sp.getMail().trim();
log.debug("Got system email of " + uem2);
if (uem2 == null || uem2.equals("") || !isValidEmail(uem2)) {
uem = null;
} else {
uem = uem2;
}
} else {
log.debug("this user has no systemMutable profile");
uem = null;
}
}
} else {
//try sakaiperson first
log.debug("try authoratative email first");
SakaiPerson sp = sakaiPersonManager.getSakaiPerson(user.getId(), sakaiPersonManager.getSystemMutableType());
if (sp != null ) {
String uem2 = sp.getMail().trim();
if (uem2 == null || uem2.equals("") || !isValidEmail(uem2)) {
uem = user.getEmail().trim();
log.debug("Got system email of " + uem2);
if (uem == null || uem.equals("") || !isValidEmail(uem))
uem = user.getEmail().trim();
if (uem == null || uem.equals("") || !isValidEmail(uem))
uem = null;
} else {
uem = uem2;
}
} else {
uem = user.getEmail().trim();
if (uem == null || uem.equals("") || !isValidEmail(uem))
uem = null;
}
}
return uem;
}
public boolean allowResubmission() {
return true;
}
private String readerToString(BufferedReader in) {
String inputLine;
String retval = "";
try {
while ((inputLine = in.readLine()) != null)
retval.concat(inputLine);
}
catch (Exception e) {
e.printStackTrace();
}
return retval;
}
}
| Code cleanup and notes
git-svn-id: 4e8da3c7dac66920e663ced6f23cd9ae43b37078@9079 fdecad78-55fc-0310-b1b2-d7d25cf747c9
| contentreview-impl/turnitin/src/java/org/sakaiproject/contentreview/impl/turnitin/TurnitinReviewServiceImpl.java | Code cleanup and notes |
|
Java | apache-2.0 | 079a61d111b759ccd7333049e5a2fb18fc795794 | 0 | b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl | /*
* Copyright 2018-2021 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.fhir.rest.tests;
import org.junit.ClassRule;
import org.junit.rules.RuleChain;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
import com.b2international.snowowl.fhir.rest.tests.batch.FhirBatchApiRestTest;
import com.b2international.snowowl.fhir.rest.tests.codesystem.FhirCodeSystemApiTest;
import com.b2international.snowowl.fhir.rest.tests.codesystem.FhirCodeSystemLookupOperationTest;
import com.b2international.snowowl.fhir.rest.tests.codesystem.FhirCodeSystemSubsumesOperationTest;
import com.b2international.snowowl.fhir.rest.tests.codesystem.FhirCodeSystemValidateCodeOperationTest;
import com.b2international.snowowl.snomed.core.domain.Rf2ReleaseType;
import com.b2international.snowowl.test.commons.BundleStartRule;
import com.b2international.snowowl.test.commons.Resources;
import com.b2international.snowowl.test.commons.SnomedContentRule;
import com.b2international.snowowl.test.commons.SnowOwlAppRule;
/**
* FHIR test suite for RESTful operations.
* @since 6.6
*/
@RunWith(Suite.class)
@SuiteClasses({
// CodeSystem API
FhirCodeSystemApiTest.class,
FhirCodeSystemLookupOperationTest.class,
FhirCodeSystemSubsumesOperationTest.class,
FhirCodeSystemValidateCodeOperationTest.class,
// ValueSet API
// SnomedValueSetRestTest.class,
// ExpandSnomedRestTest.class,
// ConceptMap API
// SnomedConceptMapRestTest.class,
// TranslateSnomedConceptMapRestTest.class,
//Batch
// FhirBatchApiRestTest.class,
})
public class AllFhirRestTests {
@ClassRule
public static final RuleChain APPRULE = RuleChain
.outerRule(SnowOwlAppRule.snowOwl(AllFhirRestTests.class))
.around(new BundleStartRule("org.eclipse.jetty.osgi.boot"))
.around(new BundleStartRule("com.b2international.snowowl.core.rest"))
.around(new SnomedContentRule(SnomedContentRule.SNOMEDCT, Resources.Snomed.MINI_RF2_INT_20210731, Rf2ReleaseType.FULL).importUntil("20200131"));
}
| fhir/com.b2international.snowowl.fhir.rest.tests/src/com/b2international/snowowl/fhir/rest/tests/AllFhirRestTests.java | /*
* Copyright 2018-2021 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.fhir.rest.tests;
import org.junit.ClassRule;
import org.junit.rules.RuleChain;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
import com.b2international.snowowl.fhir.rest.tests.batch.FhirBatchApiRestTest;
import com.b2international.snowowl.fhir.rest.tests.codesystem.FhirCodeSystemApiTest;
import com.b2international.snowowl.fhir.rest.tests.codesystem.FhirCodeSystemLookupOperationTest;
import com.b2international.snowowl.fhir.rest.tests.codesystem.FhirCodeSystemSubsumesOperationTest;
import com.b2international.snowowl.fhir.rest.tests.codesystem.FhirCodeSystemValidateCodeOperationTest;
import com.b2international.snowowl.snomed.core.domain.Rf2ReleaseType;
import com.b2international.snowowl.test.commons.BundleStartRule;
import com.b2international.snowowl.test.commons.Resources;
import com.b2international.snowowl.test.commons.SnomedContentRule;
import com.b2international.snowowl.test.commons.SnowOwlAppRule;
/**
* FHIR test suite for RESTful operations.
* @since 6.6
*/
@RunWith(Suite.class)
@SuiteClasses({
// CodeSystem API
FhirCodeSystemApiTest.class,
FhirCodeSystemLookupOperationTest.class,
FhirCodeSystemSubsumesOperationTest.class,
FhirCodeSystemValidateCodeOperationTest.class,
// ValueSet API
// SnomedValueSetRestTest.class,
// ExpandSnomedRestTest.class,
// ConceptMap API
// SnomedConceptMapRestTest.class,
// TranslateSnomedConceptMapRestTest.class,
//Batch
// FhirBatchApiRestTest.class,
})
public class AllFhirRestTests {
@ClassRule
public static final RuleChain APPRULE = RuleChain
.outerRule(SnowOwlAppRule.snowOwl(AllFhirRestTests.class))
.around(new BundleStartRule("org.eclipse.jetty.osgi.boot"))
.around(new BundleStartRule("com.b2international.snowowl.core.rest"))
.around(new SnomedContentRule(SnomedContentRule.SNOMEDCT, Resources.Snomed.MINI_RF2_INT_20200131, Rf2ReleaseType.FULL));
}
| fix(test): use 20210731 minified archive until 20200131 in fhir tests | fhir/com.b2international.snowowl.fhir.rest.tests/src/com/b2international/snowowl/fhir/rest/tests/AllFhirRestTests.java | fix(test): use 20210731 minified archive until 20200131 in fhir tests |
|
Java | apache-2.0 | 39b5b7ae6c8bff1c942b0e735a63fe8f7db4335f | 0 | gradle/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle | /*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.integtests.fixtures.executer;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import com.google.common.io.CharSource;
import groovy.lang.Closure;
import groovy.lang.DelegatesTo;
import org.gradle.api.Action;
import org.gradle.api.JavaVersion;
import org.gradle.api.Transformer;
import org.gradle.api.UncheckedIOException;
import org.gradle.api.internal.initialization.DefaultClassLoaderScope;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.logging.configuration.ConsoleOutput;
import org.gradle.api.logging.configuration.WarningMode;
import org.gradle.cache.internal.DefaultGeneratedGradleJarCache;
import org.gradle.integtests.fixtures.RepoScriptBlockUtil;
import org.gradle.integtests.fixtures.daemon.DaemonLogsAnalyzer;
import org.gradle.internal.ImmutableActionSet;
import org.gradle.internal.MutableActionSet;
import org.gradle.internal.UncheckedException;
import org.gradle.internal.featurelifecycle.LoggingDeprecatedFeatureHandler;
import org.gradle.internal.jvm.Jvm;
import org.gradle.internal.jvm.inspection.JvmVersionDetector;
import org.gradle.internal.logging.LoggingManagerInternal;
import org.gradle.internal.logging.services.DefaultLoggingManagerFactory;
import org.gradle.internal.logging.services.LoggingServiceRegistry;
import org.gradle.internal.nativeintegration.console.TestOverrideConsoleDetector;
import org.gradle.internal.nativeintegration.services.NativeServices;
import org.gradle.internal.service.ServiceRegistry;
import org.gradle.internal.service.ServiceRegistryBuilder;
import org.gradle.internal.service.scopes.GlobalScopeServices;
import org.gradle.internal.service.scopes.GradleUserHomeScopeServices;
import org.gradle.launcher.cli.DefaultCommandLineActionFactory;
import org.gradle.launcher.daemon.configuration.DaemonBuildOptions;
import org.gradle.process.internal.streams.SafeStreams;
import org.gradle.test.fixtures.file.TestDirectoryProvider;
import org.gradle.test.fixtures.file.TestFile;
import org.gradle.testfixtures.internal.NativeServicesTestFixture;
import org.gradle.util.ClosureBackedAction;
import org.gradle.util.CollectionUtils;
import org.gradle.util.GradleVersion;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static org.gradle.api.internal.artifacts.BaseRepositoryFactory.PLUGIN_PORTAL_OVERRIDE_URL_PROPERTY;
import static org.gradle.integtests.fixtures.RepoScriptBlockUtil.gradlePluginRepositoryMirrorUrl;
import static org.gradle.integtests.fixtures.executer.AbstractGradleExecuter.CliDaemonArgument.DAEMON;
import static org.gradle.integtests.fixtures.executer.AbstractGradleExecuter.CliDaemonArgument.FOREGROUND;
import static org.gradle.integtests.fixtures.executer.AbstractGradleExecuter.CliDaemonArgument.NOT_DEFINED;
import static org.gradle.integtests.fixtures.executer.AbstractGradleExecuter.CliDaemonArgument.NO_DAEMON;
import static org.gradle.integtests.fixtures.executer.OutputScrapingExecutionResult.STACK_TRACE_ELEMENT;
import static org.gradle.internal.service.scopes.DefaultGradleUserHomeScopeServiceRegistry.REUSE_USER_HOME_SERVICES;
import static org.gradle.util.CollectionUtils.collect;
import static org.gradle.util.CollectionUtils.join;
public abstract class AbstractGradleExecuter implements GradleExecuter {
protected static final ServiceRegistry GLOBAL_SERVICES = ServiceRegistryBuilder.builder()
.displayName("Global services")
.parent(newCommandLineProcessLogging())
.parent(NativeServicesTestFixture.getInstance())
.provider(new GlobalScopeServices(true))
.build();
private static final JvmVersionDetector JVM_VERSION_DETECTOR = GLOBAL_SERVICES.get(JvmVersionDetector.class);
protected final static Set<String> PROPAGATED_SYSTEM_PROPERTIES = Sets.newHashSet();
public static void propagateSystemProperty(String name) {
PROPAGATED_SYSTEM_PROPERTIES.add(name);
}
private static final String DEBUG_SYSPROP = "org.gradle.integtest.debug";
private static final String LAUNCHER_DEBUG_SYSPROP = "org.gradle.integtest.launcher.debug";
private static final String PROFILE_SYSPROP = "org.gradle.integtest.profile";
protected static final List<String> DEBUG_ARGS = ImmutableList.of(
"-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005"
);
private final Logger logger;
protected final IntegrationTestBuildContext buildContext;
private final Set<File> isolatedDaemonBaseDirs = new HashSet<File>();
private final Set<GradleHandle> running = new HashSet<GradleHandle>();
private final List<String> args = new ArrayList<String>();
private final List<String> tasks = new ArrayList<String>();
private boolean allowExtraLogging = true;
protected ConsoleAttachment consoleAttachment = ConsoleAttachment.NOT_ATTACHED;
private File workingDir;
private boolean quiet;
private boolean taskList;
private boolean dependencyList;
private Map<String, String> environmentVars = new HashMap<String, String>();
private List<File> initScripts = new ArrayList<File>();
private String executable;
private TestFile gradleUserHomeDir;
private File userHomeDir;
private File javaHome;
private File buildScript;
private File projectDir;
private File settingsFile;
private boolean ignoreMissingSettingsFile;
private PipedOutputStream stdinPipe;
private String defaultCharacterEncoding;
private Locale defaultLocale;
private int daemonIdleTimeoutSecs = 120;
private boolean requireDaemon;
private File daemonBaseDir;
private final List<String> buildJvmOpts = new ArrayList<String>();
private final List<String> commandLineJvmOpts = new ArrayList<String>();
private boolean useOnlyRequestedJvmOpts;
private boolean requiresGradleDistribution;
private boolean useOwnUserHomeServices;
private ConsoleOutput consoleType;
protected WarningMode warningMode = WarningMode.All;
private boolean showStacktrace = true;
private boolean renderWelcomeMessage;
private int expectedGenericDeprecationWarnings;
private final List<String> expectedDeprecationWarnings = new ArrayList<>();
private boolean eagerClassLoaderCreationChecksOn = true;
private boolean stackTraceChecksOn = true;
private final MutableActionSet<GradleExecuter> beforeExecute = new MutableActionSet<GradleExecuter>();
private ImmutableActionSet<GradleExecuter> afterExecute = ImmutableActionSet.empty();
private final TestDirectoryProvider testDirectoryProvider;
protected final GradleVersion gradleVersion;
private final GradleDistribution distribution;
private boolean debug = Boolean.getBoolean(DEBUG_SYSPROP);
private boolean debugLauncher = Boolean.getBoolean(LAUNCHER_DEBUG_SYSPROP);
private String profiler = System.getProperty(PROFILE_SYSPROP, "");
protected boolean interactive;
private boolean noExplicitTmpDir;
protected boolean noExplicitNativeServicesDir;
private boolean fullDeprecationStackTrace = true;
private boolean checkDeprecations = true;
private TestFile tmpDir;
private DurationMeasurement durationMeasurement;
protected AbstractGradleExecuter(GradleDistribution distribution, TestDirectoryProvider testDirectoryProvider) {
this(distribution, testDirectoryProvider, GradleVersion.current());
}
protected AbstractGradleExecuter(GradleDistribution distribution, TestDirectoryProvider testDirectoryProvider, GradleVersion gradleVersion) {
this(distribution, testDirectoryProvider, gradleVersion, IntegrationTestBuildContext.INSTANCE);
}
protected AbstractGradleExecuter(GradleDistribution distribution, TestDirectoryProvider testDirectoryProvider, GradleVersion gradleVersion, IntegrationTestBuildContext buildContext) {
this.distribution = distribution;
this.testDirectoryProvider = testDirectoryProvider;
this.gradleVersion = gradleVersion;
logger = Logging.getLogger(getClass());
this.buildContext = buildContext;
gradleUserHomeDir = buildContext.getGradleUserHomeDir();
daemonBaseDir = buildContext.getDaemonBaseDir();
}
protected Logger getLogger() {
return logger;
}
@Override
public GradleExecuter reset() {
args.clear();
tasks.clear();
initScripts.clear();
workingDir = null;
projectDir = null;
buildScript = null;
settingsFile = null;
ignoreMissingSettingsFile = false;
quiet = false;
taskList = false;
dependencyList = false;
executable = null;
javaHome = null;
environmentVars.clear();
stdinPipe = null;
defaultCharacterEncoding = null;
defaultLocale = null;
commandLineJvmOpts.clear();
buildJvmOpts.clear();
useOnlyRequestedJvmOpts = false;
expectedGenericDeprecationWarnings = 0;
expectedDeprecationWarnings.clear();
stackTraceChecksOn = true;
renderWelcomeMessage = false;
debug = Boolean.getBoolean(DEBUG_SYSPROP);
debugLauncher = Boolean.getBoolean(LAUNCHER_DEBUG_SYSPROP);
profiler = System.getProperty(PROFILE_SYSPROP, "");
interactive = false;
checkDeprecations = true;
durationMeasurement = null;
consoleType = null;
warningMode = WarningMode.All;
return this;
}
@Override
public GradleDistribution getDistribution() {
return distribution;
}
@Override
public TestDirectoryProvider getTestDirectoryProvider() {
return testDirectoryProvider;
}
@Override
public void beforeExecute(Action<? super GradleExecuter> action) {
beforeExecute.add(action);
}
@Override
public void beforeExecute(@DelegatesTo(GradleExecuter.class) Closure action) {
beforeExecute.add(new ClosureBackedAction<GradleExecuter>(action));
}
@Override
public void afterExecute(Action<? super GradleExecuter> action) {
afterExecute = afterExecute.add(action);
}
@Override
public void afterExecute(@DelegatesTo(GradleExecuter.class) Closure action) {
afterExecute(new ClosureBackedAction<GradleExecuter>(action));
}
@Override
public GradleExecuter inDirectory(File directory) {
workingDir = directory;
return this;
}
public File getWorkingDir() {
return workingDir == null ? getTestDirectoryProvider().getTestDirectory() : workingDir;
}
@Override
public GradleExecuter copyTo(GradleExecuter executer) {
executer.withGradleUserHomeDir(gradleUserHomeDir);
executer.withDaemonIdleTimeoutSecs(daemonIdleTimeoutSecs);
executer.withDaemonBaseDir(daemonBaseDir);
if (workingDir != null) {
executer.inDirectory(workingDir);
}
if (projectDir != null) {
executer.usingProjectDirectory(projectDir);
}
if (buildScript != null) {
executer.usingBuildScript(buildScript);
}
if (settingsFile != null) {
executer.usingSettingsFile(settingsFile);
}
if (ignoreMissingSettingsFile) {
executer.ignoreMissingSettingsFile();
}
if (javaHome != null) {
executer.withJavaHome(javaHome);
}
for (File initScript : initScripts) {
executer.usingInitScript(initScript);
}
executer.withTasks(tasks);
executer.withArguments(args);
executer.withEnvironmentVars(environmentVars);
executer.usingExecutable(executable);
if (quiet) {
executer.withQuietLogging();
}
if (taskList) {
executer.withTaskList();
}
if (dependencyList) {
executer.withDependencyList();
}
if (userHomeDir != null) {
executer.withUserHomeDir(userHomeDir);
}
if (stdinPipe != null) {
executer.withStdinPipe(stdinPipe);
}
if (defaultCharacterEncoding != null) {
executer.withDefaultCharacterEncoding(defaultCharacterEncoding);
}
if (noExplicitTmpDir) {
executer.withNoExplicitTmpDir();
}
if (noExplicitNativeServicesDir) {
executer.withNoExplicitNativeServicesDir();
}
if (!fullDeprecationStackTrace) {
executer.withFullDeprecationStackTraceDisabled();
}
if (defaultLocale != null) {
executer.withDefaultLocale(defaultLocale);
}
executer.withCommandLineGradleOpts(commandLineJvmOpts);
executer.withBuildJvmOpts(buildJvmOpts);
if (useOnlyRequestedJvmOpts) {
executer.useOnlyRequestedJvmOpts();
}
executer.noExtraLogging();
if (expectedGenericDeprecationWarnings > 0) {
executer.expectDeprecationWarnings(expectedGenericDeprecationWarnings);
}
expectedDeprecationWarnings.forEach(executer::expectDeprecationWarning);
if (!eagerClassLoaderCreationChecksOn) {
executer.withEagerClassLoaderCreationCheckDisabled();
}
if (!stackTraceChecksOn) {
executer.withStackTraceChecksDisabled();
}
if (requiresGradleDistribution) {
executer.requireGradleDistribution();
}
if (useOwnUserHomeServices) {
executer.withOwnUserHomeServices();
}
if (requireDaemon) {
executer.requireDaemon();
}
executer.startBuildProcessInDebugger(debug);
executer.startLauncherInDebugger(debugLauncher);
executer.withProfiler(profiler);
executer.withForceInteractive(interactive);
if (!checkDeprecations) {
executer.noDeprecationChecks();
}
if (durationMeasurement != null) {
executer.withDurationMeasurement(durationMeasurement);
}
if (consoleType != null) {
executer.withConsole(consoleType);
}
executer.withWarningMode(warningMode);
if (!showStacktrace) {
executer.withStacktraceDisabled();
}
if (renderWelcomeMessage) {
executer.withWelcomeMessageEnabled();
}
executer.withTestConsoleAttached(consoleAttachment);
return executer;
}
@Override
public GradleExecuter usingBuildScript(File buildScript) {
this.buildScript = buildScript;
return this;
}
@Override
public GradleExecuter usingProjectDirectory(File projectDir) {
this.projectDir = projectDir;
return this;
}
@Override
public GradleExecuter usingSettingsFile(File settingsFile) {
this.settingsFile = settingsFile;
return this;
}
@Override
public GradleExecuter usingInitScript(File initScript) {
initScripts.add(initScript);
return this;
}
@Override
public TestFile getGradleUserHomeDir() {
return gradleUserHomeDir;
}
@Override
public GradleExecuter withGradleUserHomeDir(File userHomeDir) {
this.gradleUserHomeDir = userHomeDir == null ? null : new TestFile(userHomeDir);
return this;
}
@Override
public GradleExecuter requireOwnGradleUserHomeDir() {
return withGradleUserHomeDir(testDirectoryProvider.getTestDirectory().file("user-home"));
}
public File getUserHomeDir() {
return userHomeDir;
}
protected GradleInvocation buildInvocation() {
validateDaemonVisibility();
GradleInvocation gradleInvocation = new GradleInvocation();
gradleInvocation.environmentVars.putAll(environmentVars);
if (!useOnlyRequestedJvmOpts) {
gradleInvocation.buildJvmArgs.addAll(getImplicitBuildJvmArgs());
}
gradleInvocation.buildJvmArgs.addAll(buildJvmOpts);
calculateLauncherJvmArgs(gradleInvocation);
gradleInvocation.args.addAll(getAllArgs());
transformInvocation(gradleInvocation);
if (!gradleInvocation.implicitLauncherJvmArgs.isEmpty()) {
throw new IllegalStateException("Implicit JVM args have not been handled.");
}
return gradleInvocation;
}
protected void validateDaemonVisibility() {
if (isUseDaemon() && isSharedDaemons()) {
throw new IllegalStateException("Daemon that will be visible to other tests has been requested.");
}
}
/**
* Adjusts the calculated invocation prior to execution. This method is responsible for handling the implicit launcher JVM args in some way, by mutating the invocation appropriately.
*/
protected void transformInvocation(GradleInvocation gradleInvocation) {
gradleInvocation.launcherJvmArgs.addAll(0, gradleInvocation.implicitLauncherJvmArgs);
gradleInvocation.implicitLauncherJvmArgs.clear();
}
/**
* Returns the JVM opts that should be used to start a forked JVM.
*/
private void calculateLauncherJvmArgs(GradleInvocation gradleInvocation) {
// Add JVM args that were explicitly requested
gradleInvocation.launcherJvmArgs.addAll(commandLineJvmOpts);
if (isUseDaemon() && !gradleInvocation.buildJvmArgs.isEmpty()) {
// Pass build JVM args through to daemon via system property on the launcher JVM
String quotedArgs = join(" ", collect(gradleInvocation.buildJvmArgs, new Transformer<String, String>() {
@Override
public String transform(String input) {
return String.format("'%s'", input);
}
}));
gradleInvocation.implicitLauncherJvmArgs.add("-Dorg.gradle.jvmargs=" + quotedArgs);
} else {
// Have to pass build JVM args directly to launcher JVM
gradleInvocation.launcherJvmArgs.addAll(gradleInvocation.buildJvmArgs);
}
// Set the implicit system properties regardless of whether default JVM args are required or not, this should not interfere with tests' intentions
// These will also be copied across to any daemon used
for (Map.Entry<String, String> entry : getImplicitJvmSystemProperties().entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
gradleInvocation.implicitLauncherJvmArgs.add(String.format("-D%s=%s", key, value));
}
if (isDebugLauncher()) {
gradleInvocation.implicitLauncherJvmArgs.addAll(DEBUG_ARGS);
}
gradleInvocation.implicitLauncherJvmArgs.add("-ea");
}
/**
* Returns additional JVM args that should be used to start the build JVM.
*/
protected List<String> getImplicitBuildJvmArgs() {
List<String> buildJvmOpts = new ArrayList<String>();
buildJvmOpts.add("-ea");
if (isDebug()) {
buildJvmOpts.addAll(DEBUG_ARGS);
}
if (isProfile()) {
buildJvmOpts.add(profiler);
}
if (isSharedDaemons()) {
buildJvmOpts.add("-Xms256m");
buildJvmOpts.add("-Xmx1024m");
} else {
buildJvmOpts.add("-Xms256m");
buildJvmOpts.add("-Xmx512m");
}
if (JVM_VERSION_DETECTOR.getJavaVersion(Jvm.forHome(getJavaHome())).compareTo(JavaVersion.VERSION_1_8) < 0) {
buildJvmOpts.add("-XX:MaxPermSize=320m");
} else {
buildJvmOpts.add("-XX:MaxMetaspaceSize=512m");
}
buildJvmOpts.add("-XX:+HeapDumpOnOutOfMemoryError");
buildJvmOpts.add("-XX:HeapDumpPath=" + buildContext.getGradleUserHomeDir());
return buildJvmOpts;
}
private boolean xmxSpecified() {
for (String arg : buildJvmOpts) {
if (arg.startsWith("-Xmx")) {
return true;
}
}
return false;
}
@Override
public GradleExecuter withUserHomeDir(File userHomeDir) {
this.userHomeDir = userHomeDir;
return this;
}
public File getJavaHome() {
return javaHome == null ? Jvm.current().getJavaHome() : javaHome;
}
@Override
public GradleExecuter withJavaHome(File javaHome) {
this.javaHome = javaHome;
return this;
}
@Override
public GradleExecuter usingExecutable(String script) {
this.executable = script;
return this;
}
public String getExecutable() {
return executable;
}
@Override
public GradleExecuter withStdinPipe() {
return withStdinPipe(new PipedOutputStream());
}
@Override
public GradleExecuter withStdinPipe(PipedOutputStream stdInPipe) {
this.stdinPipe = stdInPipe;
return this;
}
public InputStream connectStdIn() {
try {
return stdinPipe == null ? SafeStreams.emptyInput() : new PipedInputStream(stdinPipe);
} catch (IOException e) {
throw UncheckedException.throwAsUncheckedException(e);
}
}
public PipedOutputStream getStdinPipe() {
return stdinPipe;
}
@Override
public GradleExecuter withDefaultCharacterEncoding(String defaultCharacterEncoding) {
this.defaultCharacterEncoding = defaultCharacterEncoding;
return this;
}
public String getDefaultCharacterEncoding() {
return defaultCharacterEncoding == null ? Charset.defaultCharset().name() : defaultCharacterEncoding;
}
@Override
public GradleExecuter withDefaultLocale(Locale defaultLocale) {
this.defaultLocale = defaultLocale;
return this;
}
public Locale getDefaultLocale() {
return defaultLocale;
}
public boolean isQuiet() {
return quiet;
}
@Override
public GradleExecuter withQuietLogging() {
quiet = true;
return this;
}
@Override
public GradleExecuter withTaskList() {
taskList = true;
return this;
}
@Override
public GradleExecuter withDependencyList() {
dependencyList = true;
return this;
}
@Override
public GradleExecuter withArguments(String... args) {
return withArguments(Arrays.asList(args));
}
@Override
public GradleExecuter withArguments(List<String> args) {
this.args.clear();
this.args.addAll(args);
return this;
}
@Override
public GradleExecuter withArgument(String arg) {
this.args.add(arg);
return this;
}
@Override
public GradleExecuter withEnvironmentVars(Map<String, ?> environment) {
environmentVars.clear();
for (Map.Entry<String, ?> entry : environment.entrySet()) {
environmentVars.put(entry.getKey(), entry.getValue().toString());
}
return this;
}
protected String toJvmArgsString(Iterable<String> jvmArgs) {
StringBuilder result = new StringBuilder();
for (String jvmArg : jvmArgs) {
if (result.length() > 0) {
result.append(" ");
}
if (jvmArg.contains(" ")) {
assert !jvmArg.contains("\"") : "jvmArg '" + jvmArg + "' contains '\"'";
result.append('"');
result.append(jvmArg);
result.append('"');
} else {
result.append(jvmArg);
}
}
return result.toString();
}
@Override
public GradleExecuter withTasks(String... names) {
return withTasks(Arrays.asList(names));
}
@Override
public GradleExecuter withTasks(List<String> names) {
tasks.clear();
tasks.addAll(names);
return this;
}
@Override
public GradleExecuter withDaemonIdleTimeoutSecs(int secs) {
daemonIdleTimeoutSecs = secs;
return this;
}
@Override
public GradleExecuter useOnlyRequestedJvmOpts() {
useOnlyRequestedJvmOpts = true;
return this;
}
@Override
public GradleExecuter withDaemonBaseDir(File daemonBaseDir) {
this.daemonBaseDir = daemonBaseDir;
return this;
}
@Override
public GradleExecuter requireIsolatedDaemons() {
return withDaemonBaseDir(testDirectoryProvider.getTestDirectory().file("daemon"));
}
@Override
public GradleExecuter withWorkerDaemonsExpirationDisabled() {
return withCommandLineGradleOpts("-Dorg.gradle.workers.internal.disable-daemons-expiration=true");
}
@Override
public boolean usesSharedDaemons() {
return isSharedDaemons();
}
@Override
public File getDaemonBaseDir() {
return daemonBaseDir;
}
@Override
public GradleExecuter requireDaemon() {
this.requireDaemon = true;
return this;
}
protected boolean isSharedDaemons() {
return daemonBaseDir.equals(buildContext.getDaemonBaseDir());
}
@Override
public boolean isUseDaemon() {
CliDaemonArgument cliDaemonArgument = resolveCliDaemonArgument();
if (cliDaemonArgument == NO_DAEMON || cliDaemonArgument == FOREGROUND) {
return false;
}
return requireDaemon || cliDaemonArgument == DAEMON;
}
@Override
public GradleExecuter withOwnUserHomeServices() {
useOwnUserHomeServices = true;
return this;
}
@Override
public GradleExecuter withWarningMode(WarningMode warningMode) {
this.warningMode = warningMode;
return this;
}
@Override
public GradleExecuter withConsole(ConsoleOutput consoleType) {
this.consoleType = consoleType;
return this;
}
@Override
public GradleExecuter withStacktraceDisabled() {
showStacktrace = false;
return this;
}
@Override
public GradleExecuter withWelcomeMessageEnabled() {
renderWelcomeMessage = true;
return this;
}
@Override
public GradleExecuter withRepositoryMirrors() {
beforeExecute(new Action<GradleExecuter>() {
@Override
public void execute(GradleExecuter gradleExecuter) {
usingInitScript(RepoScriptBlockUtil.createMirrorInitScript());
}
});
return this;
}
@Override
public GradleExecuter withGlobalRepositoryMirrors() {
beforeExecute(new Action<GradleExecuter>() {
@Override
public void execute(GradleExecuter gradleExecuter) {
TestFile userHome = testDirectoryProvider.getTestDirectory().file("user-home");
withGradleUserHomeDir(userHome);
userHome.file("init.d/mirrors.gradle").write(RepoScriptBlockUtil.mirrorInitScript());
}
});
return this;
}
@Override
public GradleExecuter withPluginRepositoryMirror() {
beforeExecute(new Action<GradleExecuter>() {
@Override
public void execute(GradleExecuter gradleExecuter) {
withArgument("-D" + PLUGIN_PORTAL_OVERRIDE_URL_PROPERTY + "=" + gradlePluginRepositoryMirrorUrl());
}
});
return this;
}
/**
* Performs cleanup at completion of the test.
*/
public void cleanup() {
stopRunningBuilds();
cleanupIsolatedDaemons();
}
private void stopRunningBuilds() {
for (GradleHandle handle : running) {
try {
handle.abort().waitForExit();
} catch (Exception e) {
getLogger().warn("Problem stopping running build", e);
}
}
}
private void cleanupIsolatedDaemons() {
for (File baseDir : isolatedDaemonBaseDirs) {
try {
new DaemonLogsAnalyzer(baseDir, gradleVersion.getVersion()).killAll();
} catch (Exception e) {
getLogger().warn("Problem killing isolated daemons of Gradle version " + gradleVersion + " in " + baseDir, e);
}
}
}
enum CliDaemonArgument {
NOT_DEFINED,
DAEMON,
NO_DAEMON,
FOREGROUND
}
protected CliDaemonArgument resolveCliDaemonArgument() {
for (int i = args.size() - 1; i >= 0; i--) {
final String arg = args.get(i);
if (arg.equals("--daemon")) {
return DAEMON;
}
if (arg.equals("--no-daemon")) {
return NO_DAEMON;
}
if (arg.equals("--foreground")) {
return FOREGROUND;
}
}
return NOT_DEFINED;
}
private boolean noDaemonArgumentGiven() {
return resolveCliDaemonArgument() == NOT_DEFINED;
}
protected List<String> getAllArgs() {
List<String> allArgs = new ArrayList<String>();
if (buildScript != null) {
allArgs.add("--build-file");
allArgs.add(buildScript.getAbsolutePath());
}
if (projectDir != null) {
allArgs.add("--project-dir");
allArgs.add(projectDir.getAbsolutePath());
}
for (File initScript : initScripts) {
allArgs.add("--init-script");
allArgs.add(initScript.getAbsolutePath());
}
if (settingsFile != null) {
allArgs.add("--settings-file");
allArgs.add(settingsFile.getAbsolutePath());
}
if (quiet) {
allArgs.add("--quiet");
}
if (noDaemonArgumentGiven()) {
if (isUseDaemon()) {
allArgs.add("--daemon");
} else {
allArgs.add("--no-daemon");
}
}
if (showStacktrace) {
allArgs.add("--stacktrace");
}
if (taskList) {
allArgs.add("tasks");
}
if (dependencyList) {
allArgs.add("dependencies");
}
if (settingsFile == null && !ignoreMissingSettingsFile) {
ensureSettingsFileAvailable();
}
// This will cause problems on Windows if the path to the Gradle executable that is used has a space in it (e.g. the user's dir is c:/Users/Luke Daley/)
// This is fundamentally a windows issue: You can't have arguments with spaces in them if the path to the batch script has a space
// We could work around this by setting -Dgradle.user.home but GRADLE-1730 (which affects 1.0-milestone-3) means that that
// is problematic as well. For now, we just don't support running the int tests from a path with a space in it on Windows.
// When we stop testing against M3 we should change to use the system property.
if (getGradleUserHomeDir() != null) {
allArgs.add("--gradle-user-home");
allArgs.add(getGradleUserHomeDir().getAbsolutePath());
}
if (consoleType != null) {
allArgs.add("--console=" + consoleType.toString().toLowerCase());
}
if (warningMode != null) {
allArgs.add("--warning-mode=" + warningMode.toString().toLowerCase(Locale.ENGLISH));
}
allArgs.addAll(args);
allArgs.addAll(tasks);
return allArgs;
}
@Override
public GradleExecuter ignoreMissingSettingsFile() {
ignoreMissingSettingsFile = true;
return this;
}
private void ensureSettingsFileAvailable() {
TestFile workingDir = new TestFile(getWorkingDir());
TestFile dir = workingDir;
while (dir != null && getTestDirectoryProvider().getTestDirectory().isSelfOrDescendent(dir)) {
if (hasSettingsFile(dir) || hasSettingsFile(dir.file("master"))) {
return;
}
dir = dir.getParentFile();
}
workingDir.createFile("settings.gradle");
}
private boolean hasSettingsFile(TestFile dir) {
if (dir.isDirectory()) {
return dir.file("settings.gradle").isFile() || dir.file("settings.gradle.kts").isFile();
}
return false;
}
/**
* Returns the set of system properties that should be set on every JVM used by this executer.
*/
protected Map<String, String> getImplicitJvmSystemProperties() {
Map<String, String> properties = new LinkedHashMap<String, String>();
if (getUserHomeDir() != null) {
properties.put("user.home", getUserHomeDir().getAbsolutePath());
}
properties.put(DaemonBuildOptions.IdleTimeoutOption.GRADLE_PROPERTY, "" + (daemonIdleTimeoutSecs * 1000));
properties.put(DaemonBuildOptions.BaseDirOption.GRADLE_PROPERTY, daemonBaseDir.getAbsolutePath());
if (!noExplicitNativeServicesDir) {
properties.put(NativeServices.NATIVE_DIR_OVERRIDE, buildContext.getNativeServicesDir().getAbsolutePath());
}
properties.put(LoggingDeprecatedFeatureHandler.ORG_GRADLE_DEPRECATION_TRACE_PROPERTY_NAME, Boolean.toString(fullDeprecationStackTrace));
boolean useCustomGradleUserHomeDir = gradleUserHomeDir != null && !gradleUserHomeDir.equals(buildContext.getGradleUserHomeDir());
if (useOwnUserHomeServices || useCustomGradleUserHomeDir) {
properties.put(REUSE_USER_HOME_SERVICES, "false");
}
if (!useCustomGradleUserHomeDir) {
TestFile generatedApiJarCacheDir = buildContext.getGradleGeneratedApiJarCacheDir();
if (generatedApiJarCacheDir != null) {
properties.put(DefaultGeneratedGradleJarCache.BASE_DIR_OVERRIDE_PROPERTY, generatedApiJarCacheDir.getAbsolutePath());
}
}
if (!noExplicitTmpDir) {
if (tmpDir == null) {
tmpDir = getDefaultTmpDir();
}
String tmpDirPath = tmpDir.createDir().getAbsolutePath();
if (!tmpDirPath.contains(" ") || (getDistribution().isSupportsSpacesInGradleAndJavaOpts() && supportsWhiteSpaceInEnvVars())) {
properties.put("java.io.tmpdir", tmpDirPath);
}
}
properties.put("file.encoding", getDefaultCharacterEncoding());
Locale locale = getDefaultLocale();
if (locale != null) {
properties.put("user.language", locale.getLanguage());
properties.put("user.country", locale.getCountry());
properties.put("user.variant", locale.getVariant());
}
if (eagerClassLoaderCreationChecksOn) {
properties.put(DefaultClassLoaderScope.STRICT_MODE_PROPERTY, "true");
}
if (interactive) {
properties.put(TestOverrideConsoleDetector.INTERACTIVE_TOGGLE, "true");
}
// Enable VFS
properties.put(GradleUserHomeScopeServices.ENABLE_VFS_SYSTEM_PROPERTY_NAME, "true");
properties.put(DefaultCommandLineActionFactory.WELCOME_MESSAGE_ENABLED_SYSTEM_PROPERTY, Boolean.toString(renderWelcomeMessage));
return properties;
}
protected boolean supportsWhiteSpaceInEnvVars() {
return true;
}
@Override
public final GradleHandle start() {
assert afterExecute.isEmpty() : "afterExecute actions are not implemented for async execution";
return startHandle();
}
protected GradleHandle startHandle() {
fireBeforeExecute();
assertCanExecute();
collectStateBeforeExecution();
try {
GradleHandle handle = createGradleHandle();
running.add(handle);
return handle;
} finally {
reset();
}
}
@Override
public final ExecutionResult run() {
fireBeforeExecute();
assertCanExecute();
collectStateBeforeExecution();
try {
ExecutionResult result = doRun();
if (errorsShouldAppearOnStdout()) {
result = new ErrorsOnStdoutScrapingExecutionResult(result);
}
afterExecute.execute(this);
return result;
} finally {
finished();
}
}
protected void finished() {
reset();
}
@Override
public final ExecutionFailure runWithFailure() {
fireBeforeExecute();
assertCanExecute();
collectStateBeforeExecution();
try {
ExecutionFailure executionFailure = doRunWithFailure();
if (errorsShouldAppearOnStdout()) {
executionFailure = new ErrorsOnStdoutScrapingExecutionFailure(executionFailure);
}
afterExecute.execute(this);
return executionFailure;
} finally {
finished();
}
}
private void collectStateBeforeExecution() {
if (!isSharedDaemons()) {
isolatedDaemonBaseDirs.add(daemonBaseDir);
}
}
private void fireBeforeExecute() {
beforeExecute.execute(this);
}
protected GradleHandle createGradleHandle() {
throw new UnsupportedOperationException(String.format("%s does not support running asynchronously.", getClass().getSimpleName()));
}
protected abstract ExecutionResult doRun();
protected abstract ExecutionFailure doRunWithFailure();
@Override
public GradleExecuter withCommandLineGradleOpts(Iterable<String> jvmOpts) {
CollectionUtils.addAll(commandLineJvmOpts, jvmOpts);
return this;
}
@Override
public GradleExecuter withCommandLineGradleOpts(String... jvmOpts) {
CollectionUtils.addAll(commandLineJvmOpts, jvmOpts);
return this;
}
@Override
public AbstractGradleExecuter withBuildJvmOpts(String... jvmOpts) {
CollectionUtils.addAll(buildJvmOpts, jvmOpts);
return this;
}
@Override
public GradleExecuter withBuildJvmOpts(Iterable<String> jvmOpts) {
CollectionUtils.addAll(buildJvmOpts, jvmOpts);
return this;
}
@Override
public GradleExecuter withBuildCacheEnabled() {
return withArgument("--build-cache");
}
protected Action<ExecutionResult> getResultAssertion() {
return new Action<ExecutionResult>() {
private int expectedGenericDeprecationWarnings = AbstractGradleExecuter.this.expectedGenericDeprecationWarnings;
private final List<String> expectedDeprecationWarnings = new ArrayList<>(AbstractGradleExecuter.this.expectedDeprecationWarnings);
private final boolean expectStackTraces = !AbstractGradleExecuter.this.stackTraceChecksOn;
private final boolean checkDeprecations = AbstractGradleExecuter.this.checkDeprecations;
@Override
public void execute(ExecutionResult executionResult) {
String normalizedOutput = executionResult.getNormalizedOutput();
String error = executionResult.getError();
boolean executionFailure = isExecutionFailure(executionResult);
// for tests using rich console standard out and error are combined in output of execution result
if (executionFailure) {
normalizedOutput = removeExceptionStackTraceForFailedExecution(normalizedOutput);
}
validate(normalizedOutput, "Standard output");
if (executionFailure) {
error = removeExceptionStackTraceForFailedExecution(error);
}
validate(error, "Standard error");
if (!expectedDeprecationWarnings.isEmpty()) {
throw new AssertionError(String.format("Expected the following deprecation warnings:%n%s",
expectedDeprecationWarnings.stream()
.map(warning -> " - " + warning)
.collect(Collectors.joining("\n"))));
}
if (expectedGenericDeprecationWarnings > 0) {
throw new AssertionError(String.format("Expected %d more deprecation warnings", expectedGenericDeprecationWarnings));
}
}
private boolean isErrorOutEmpty(String error) {
//remove SLF4J error out like 'Class path contains multiple SLF4J bindings.'
//See: https://github.com/gradle/performance/issues/375#issuecomment-315103861
return Strings.isNullOrEmpty(error.replaceAll("(?m)^SLF4J: .*", "").trim());
}
private boolean isExecutionFailure(ExecutionResult executionResult) {
return executionResult instanceof ExecutionFailure;
}
// Axe everything after the expected exception
private String removeExceptionStackTraceForFailedExecution(String text) {
int pos = text.indexOf("* Exception is:");
if (pos >= 0) {
text = text.substring(0, pos);
}
return text;
}
private void validate(String output, String displayName) {
List<String> lines;
try {
lines = CharSource.wrap(output).readLines();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
int i = 0;
boolean insideVariantDescriptionBlock = false;
while (i < lines.size()) {
String line = lines.get(i);
if (insideVariantDescriptionBlock && line.contains("]")) {
insideVariantDescriptionBlock = false;
} else if (!insideVariantDescriptionBlock && line.contains("variant \"")) {
insideVariantDescriptionBlock = true;
}
if (line.matches(".*use(s)? or override(s)? a deprecated API\\.")) {
// A javac warning, ignore
i++;
} else if (line.matches(".*w: .* is deprecated\\..*")) {
// A kotlinc warning, ignore
i++;
} else if (isDeprecationMessageInHelpDescription(line)) {
i++;
} else if (expectedDeprecationWarnings.remove(line)) {
// Deprecation warning is expected
i++;
i = skipStackTrace(lines, i);
} else if (line.matches(".*\\s+deprecated.*")) {
if (checkDeprecations && expectedGenericDeprecationWarnings <= 0) {
throw new AssertionError(String.format("%s line %d contains a deprecation warning: %s%n=====%n%s%n=====%n", displayName, i + 1, line, output));
}
expectedGenericDeprecationWarnings--;
// skip over stack trace
i++;
i = skipStackTrace(lines, i);
} else if (!expectStackTraces && !insideVariantDescriptionBlock && STACK_TRACE_ELEMENT.matcher(line).matches() && i < lines.size() - 1 && STACK_TRACE_ELEMENT.matcher(lines.get(i + 1)).matches()) {
// 2 or more lines that look like stack trace elements
throw new AssertionError(String.format("%s line %d contains an unexpected stack trace: %s%n=====%n%s%n=====%n", displayName, i + 1, line, output));
} else {
i++;
}
}
}
private int skipStackTrace(List<String> lines, int i) {
while (i < lines.size() && STACK_TRACE_ELEMENT.matcher(lines.get(i)).matches()) {
i++;
}
return i;
}
private boolean isDeprecationMessageInHelpDescription(String s) {
return s.matches(".*\\[deprecated.*]");
}
};
}
@Override
public GradleExecuter expectDeprecationWarning() {
return expectDeprecationWarnings(1);
}
@Override
public GradleExecuter expectDeprecationWarnings(int count) {
Preconditions.checkState(expectedGenericDeprecationWarnings == 0, "expected deprecation count is already set for this execution");
Preconditions.checkArgument(count > 0, "expected deprecation count must be positive");
expectedGenericDeprecationWarnings = count;
return this;
}
@Override
public GradleExecuter expectDeprecationWarning(String warning) {
expectedDeprecationWarnings.add(warning);
return this;
}
@Override
public GradleExecuter noDeprecationChecks() {
checkDeprecations = false;
return this;
}
@Override
public GradleExecuter withEagerClassLoaderCreationCheckDisabled() {
eagerClassLoaderCreationChecksOn = false;
return this;
}
@Override
public GradleExecuter withStackTraceChecksDisabled() {
stackTraceChecksOn = false;
return this;
}
protected TestFile getDefaultTmpDir() {
return buildContext.getTmpDir().createDir();
}
@Override
public GradleExecuter noExtraLogging() {
this.allowExtraLogging = false;
return this;
}
public boolean isAllowExtraLogging() {
return allowExtraLogging;
}
public boolean isRequiresGradleDistribution() {
return requiresGradleDistribution;
}
@Override
public GradleExecuter requireGradleDistribution() {
this.requiresGradleDistribution = true;
return this;
}
@Override
public GradleExecuter startBuildProcessInDebugger(boolean flag) {
debug = flag;
return this;
}
@Override
public GradleExecuter startLauncherInDebugger(boolean flag) {
debugLauncher = flag;
return this;
}
@Override
public boolean isDebugLauncher() {
return debugLauncher;
}
@Override
public GradleExecuter withProfiler(String args) {
profiler = args;
return this;
}
@Override
public GradleExecuter withForceInteractive(boolean flag) {
interactive = flag;
return this;
}
@Override
public GradleExecuter withNoExplicitTmpDir() {
noExplicitTmpDir = true;
return this;
}
@Override
public GradleExecuter withNoExplicitNativeServicesDir() {
noExplicitNativeServicesDir = true;
return this;
}
@Override
public GradleExecuter withFullDeprecationStackTraceDisabled() {
fullDeprecationStackTrace = false;
return this;
}
@Override
public boolean isDebug() {
return debug;
}
@Override
public boolean isProfile() {
return !profiler.isEmpty();
}
protected static class GradleInvocation {
final Map<String, String> environmentVars = new HashMap<String, String>();
final List<String> args = new ArrayList<String>();
// JVM args that must be used for the build JVM
final List<String> buildJvmArgs = new ArrayList<String>();
// JVM args that must be used to fork a JVM
final List<String> launcherJvmArgs = new ArrayList<String>();
// Implicit JVM args that should be used to fork a JVM
final List<String> implicitLauncherJvmArgs = new ArrayList<String>();
}
@Override
public void stop() {
cleanup();
}
@Override
public GradleExecuter withDurationMeasurement(DurationMeasurement durationMeasurement) {
this.durationMeasurement = durationMeasurement;
return this;
}
protected void startMeasurement() {
if (durationMeasurement != null) {
durationMeasurement.start();
}
}
protected void stopMeasurement() {
if (durationMeasurement != null) {
durationMeasurement.stop();
}
}
protected DurationMeasurement getDurationMeasurement() {
return durationMeasurement;
}
private static LoggingServiceRegistry newCommandLineProcessLogging() {
LoggingServiceRegistry loggingServices = LoggingServiceRegistry.newEmbeddableLogging();
LoggingManagerInternal rootLoggingManager = loggingServices.get(DefaultLoggingManagerFactory.class).getRoot();
rootLoggingManager.attachSystemOutAndErr();
return loggingServices;
}
@Override
public GradleExecuter withTestConsoleAttached() {
return withTestConsoleAttached(ConsoleAttachment.ATTACHED);
}
@Override
public GradleExecuter withTestConsoleAttached(ConsoleAttachment consoleAttachment) {
this.consoleAttachment = consoleAttachment;
return configureConsoleCommandLineArgs();
}
protected GradleExecuter configureConsoleCommandLineArgs() {
if (consoleAttachment == ConsoleAttachment.NOT_ATTACHED) {
return this;
} else {
return withCommandLineGradleOpts(consoleAttachment.getConsoleMetaData().getCommandLineArgument());
}
}
private boolean errorsShouldAppearOnStdout() {
// If stdout and stderr are attached to the console
return consoleAttachment.isStderrAttached() && consoleAttachment.isStdoutAttached();
}
}
| subprojects/internal-integ-testing/src/main/groovy/org/gradle/integtests/fixtures/executer/AbstractGradleExecuter.java | /*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.integtests.fixtures.executer;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import com.google.common.io.CharSource;
import groovy.lang.Closure;
import groovy.lang.DelegatesTo;
import org.gradle.api.Action;
import org.gradle.api.JavaVersion;
import org.gradle.api.Transformer;
import org.gradle.api.UncheckedIOException;
import org.gradle.api.internal.initialization.DefaultClassLoaderScope;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.logging.configuration.ConsoleOutput;
import org.gradle.api.logging.configuration.WarningMode;
import org.gradle.cache.internal.DefaultGeneratedGradleJarCache;
import org.gradle.integtests.fixtures.RepoScriptBlockUtil;
import org.gradle.integtests.fixtures.daemon.DaemonLogsAnalyzer;
import org.gradle.internal.ImmutableActionSet;
import org.gradle.internal.MutableActionSet;
import org.gradle.internal.UncheckedException;
import org.gradle.internal.featurelifecycle.LoggingDeprecatedFeatureHandler;
import org.gradle.internal.jvm.Jvm;
import org.gradle.internal.jvm.inspection.JvmVersionDetector;
import org.gradle.internal.logging.LoggingManagerInternal;
import org.gradle.internal.logging.services.DefaultLoggingManagerFactory;
import org.gradle.internal.logging.services.LoggingServiceRegistry;
import org.gradle.internal.nativeintegration.console.TestOverrideConsoleDetector;
import org.gradle.internal.nativeintegration.services.NativeServices;
import org.gradle.internal.service.ServiceRegistry;
import org.gradle.internal.service.ServiceRegistryBuilder;
import org.gradle.internal.service.scopes.GlobalScopeServices;
import org.gradle.launcher.cli.DefaultCommandLineActionFactory;
import org.gradle.launcher.daemon.configuration.DaemonBuildOptions;
import org.gradle.process.internal.streams.SafeStreams;
import org.gradle.test.fixtures.file.TestDirectoryProvider;
import org.gradle.test.fixtures.file.TestFile;
import org.gradle.testfixtures.internal.NativeServicesTestFixture;
import org.gradle.util.ClosureBackedAction;
import org.gradle.util.CollectionUtils;
import org.gradle.util.GradleVersion;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static org.gradle.api.internal.artifacts.BaseRepositoryFactory.PLUGIN_PORTAL_OVERRIDE_URL_PROPERTY;
import static org.gradle.integtests.fixtures.RepoScriptBlockUtil.gradlePluginRepositoryMirrorUrl;
import static org.gradle.integtests.fixtures.executer.AbstractGradleExecuter.CliDaemonArgument.DAEMON;
import static org.gradle.integtests.fixtures.executer.AbstractGradleExecuter.CliDaemonArgument.FOREGROUND;
import static org.gradle.integtests.fixtures.executer.AbstractGradleExecuter.CliDaemonArgument.NOT_DEFINED;
import static org.gradle.integtests.fixtures.executer.AbstractGradleExecuter.CliDaemonArgument.NO_DAEMON;
import static org.gradle.integtests.fixtures.executer.OutputScrapingExecutionResult.STACK_TRACE_ELEMENT;
import static org.gradle.internal.service.scopes.DefaultGradleUserHomeScopeServiceRegistry.REUSE_USER_HOME_SERVICES;
import static org.gradle.util.CollectionUtils.collect;
import static org.gradle.util.CollectionUtils.join;
public abstract class AbstractGradleExecuter implements GradleExecuter {
protected static final ServiceRegistry GLOBAL_SERVICES = ServiceRegistryBuilder.builder()
.displayName("Global services")
.parent(newCommandLineProcessLogging())
.parent(NativeServicesTestFixture.getInstance())
.provider(new GlobalScopeServices(true))
.build();
private static final JvmVersionDetector JVM_VERSION_DETECTOR = GLOBAL_SERVICES.get(JvmVersionDetector.class);
protected final static Set<String> PROPAGATED_SYSTEM_PROPERTIES = Sets.newHashSet();
public static void propagateSystemProperty(String name) {
PROPAGATED_SYSTEM_PROPERTIES.add(name);
}
private static final String DEBUG_SYSPROP = "org.gradle.integtest.debug";
private static final String LAUNCHER_DEBUG_SYSPROP = "org.gradle.integtest.launcher.debug";
private static final String PROFILE_SYSPROP = "org.gradle.integtest.profile";
protected static final List<String> DEBUG_ARGS = ImmutableList.of(
"-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005"
);
private final Logger logger;
protected final IntegrationTestBuildContext buildContext;
private final Set<File> isolatedDaemonBaseDirs = new HashSet<File>();
private final Set<GradleHandle> running = new HashSet<GradleHandle>();
private final List<String> args = new ArrayList<String>();
private final List<String> tasks = new ArrayList<String>();
private boolean allowExtraLogging = true;
protected ConsoleAttachment consoleAttachment = ConsoleAttachment.NOT_ATTACHED;
private File workingDir;
private boolean quiet;
private boolean taskList;
private boolean dependencyList;
private Map<String, String> environmentVars = new HashMap<String, String>();
private List<File> initScripts = new ArrayList<File>();
private String executable;
private TestFile gradleUserHomeDir;
private File userHomeDir;
private File javaHome;
private File buildScript;
private File projectDir;
private File settingsFile;
private boolean ignoreMissingSettingsFile;
private PipedOutputStream stdinPipe;
private String defaultCharacterEncoding;
private Locale defaultLocale;
private int daemonIdleTimeoutSecs = 120;
private boolean requireDaemon;
private File daemonBaseDir;
private final List<String> buildJvmOpts = new ArrayList<String>();
private final List<String> commandLineJvmOpts = new ArrayList<String>();
private boolean useOnlyRequestedJvmOpts;
private boolean requiresGradleDistribution;
private boolean useOwnUserHomeServices;
private ConsoleOutput consoleType;
protected WarningMode warningMode = WarningMode.All;
private boolean showStacktrace = true;
private boolean renderWelcomeMessage;
private int expectedGenericDeprecationWarnings;
private final List<String> expectedDeprecationWarnings = new ArrayList<>();
private boolean eagerClassLoaderCreationChecksOn = true;
private boolean stackTraceChecksOn = true;
private final MutableActionSet<GradleExecuter> beforeExecute = new MutableActionSet<GradleExecuter>();
private ImmutableActionSet<GradleExecuter> afterExecute = ImmutableActionSet.empty();
private final TestDirectoryProvider testDirectoryProvider;
protected final GradleVersion gradleVersion;
private final GradleDistribution distribution;
private boolean debug = Boolean.getBoolean(DEBUG_SYSPROP);
private boolean debugLauncher = Boolean.getBoolean(LAUNCHER_DEBUG_SYSPROP);
private String profiler = System.getProperty(PROFILE_SYSPROP, "");
protected boolean interactive;
private boolean noExplicitTmpDir;
protected boolean noExplicitNativeServicesDir;
private boolean fullDeprecationStackTrace = true;
private boolean checkDeprecations = true;
private TestFile tmpDir;
private DurationMeasurement durationMeasurement;
protected AbstractGradleExecuter(GradleDistribution distribution, TestDirectoryProvider testDirectoryProvider) {
this(distribution, testDirectoryProvider, GradleVersion.current());
}
protected AbstractGradleExecuter(GradleDistribution distribution, TestDirectoryProvider testDirectoryProvider, GradleVersion gradleVersion) {
this(distribution, testDirectoryProvider, gradleVersion, IntegrationTestBuildContext.INSTANCE);
}
protected AbstractGradleExecuter(GradleDistribution distribution, TestDirectoryProvider testDirectoryProvider, GradleVersion gradleVersion, IntegrationTestBuildContext buildContext) {
this.distribution = distribution;
this.testDirectoryProvider = testDirectoryProvider;
this.gradleVersion = gradleVersion;
logger = Logging.getLogger(getClass());
this.buildContext = buildContext;
gradleUserHomeDir = buildContext.getGradleUserHomeDir();
daemonBaseDir = buildContext.getDaemonBaseDir();
}
protected Logger getLogger() {
return logger;
}
@Override
public GradleExecuter reset() {
args.clear();
tasks.clear();
initScripts.clear();
workingDir = null;
projectDir = null;
buildScript = null;
settingsFile = null;
ignoreMissingSettingsFile = false;
quiet = false;
taskList = false;
dependencyList = false;
executable = null;
javaHome = null;
environmentVars.clear();
stdinPipe = null;
defaultCharacterEncoding = null;
defaultLocale = null;
commandLineJvmOpts.clear();
buildJvmOpts.clear();
useOnlyRequestedJvmOpts = false;
expectedGenericDeprecationWarnings = 0;
expectedDeprecationWarnings.clear();
stackTraceChecksOn = true;
renderWelcomeMessage = false;
debug = Boolean.getBoolean(DEBUG_SYSPROP);
debugLauncher = Boolean.getBoolean(LAUNCHER_DEBUG_SYSPROP);
profiler = System.getProperty(PROFILE_SYSPROP, "");
interactive = false;
checkDeprecations = true;
durationMeasurement = null;
consoleType = null;
warningMode = WarningMode.All;
return this;
}
@Override
public GradleDistribution getDistribution() {
return distribution;
}
@Override
public TestDirectoryProvider getTestDirectoryProvider() {
return testDirectoryProvider;
}
@Override
public void beforeExecute(Action<? super GradleExecuter> action) {
beforeExecute.add(action);
}
@Override
public void beforeExecute(@DelegatesTo(GradleExecuter.class) Closure action) {
beforeExecute.add(new ClosureBackedAction<GradleExecuter>(action));
}
@Override
public void afterExecute(Action<? super GradleExecuter> action) {
afterExecute = afterExecute.add(action);
}
@Override
public void afterExecute(@DelegatesTo(GradleExecuter.class) Closure action) {
afterExecute(new ClosureBackedAction<GradleExecuter>(action));
}
@Override
public GradleExecuter inDirectory(File directory) {
workingDir = directory;
return this;
}
public File getWorkingDir() {
return workingDir == null ? getTestDirectoryProvider().getTestDirectory() : workingDir;
}
@Override
public GradleExecuter copyTo(GradleExecuter executer) {
executer.withGradleUserHomeDir(gradleUserHomeDir);
executer.withDaemonIdleTimeoutSecs(daemonIdleTimeoutSecs);
executer.withDaemonBaseDir(daemonBaseDir);
if (workingDir != null) {
executer.inDirectory(workingDir);
}
if (projectDir != null) {
executer.usingProjectDirectory(projectDir);
}
if (buildScript != null) {
executer.usingBuildScript(buildScript);
}
if (settingsFile != null) {
executer.usingSettingsFile(settingsFile);
}
if (ignoreMissingSettingsFile) {
executer.ignoreMissingSettingsFile();
}
if (javaHome != null) {
executer.withJavaHome(javaHome);
}
for (File initScript : initScripts) {
executer.usingInitScript(initScript);
}
executer.withTasks(tasks);
executer.withArguments(args);
executer.withEnvironmentVars(environmentVars);
executer.usingExecutable(executable);
if (quiet) {
executer.withQuietLogging();
}
if (taskList) {
executer.withTaskList();
}
if (dependencyList) {
executer.withDependencyList();
}
if (userHomeDir != null) {
executer.withUserHomeDir(userHomeDir);
}
if (stdinPipe != null) {
executer.withStdinPipe(stdinPipe);
}
if (defaultCharacterEncoding != null) {
executer.withDefaultCharacterEncoding(defaultCharacterEncoding);
}
if (noExplicitTmpDir) {
executer.withNoExplicitTmpDir();
}
if (noExplicitNativeServicesDir) {
executer.withNoExplicitNativeServicesDir();
}
if (!fullDeprecationStackTrace) {
executer.withFullDeprecationStackTraceDisabled();
}
if (defaultLocale != null) {
executer.withDefaultLocale(defaultLocale);
}
executer.withCommandLineGradleOpts(commandLineJvmOpts);
executer.withBuildJvmOpts(buildJvmOpts);
if (useOnlyRequestedJvmOpts) {
executer.useOnlyRequestedJvmOpts();
}
executer.noExtraLogging();
if (expectedGenericDeprecationWarnings > 0) {
executer.expectDeprecationWarnings(expectedGenericDeprecationWarnings);
}
expectedDeprecationWarnings.forEach(executer::expectDeprecationWarning);
if (!eagerClassLoaderCreationChecksOn) {
executer.withEagerClassLoaderCreationCheckDisabled();
}
if (!stackTraceChecksOn) {
executer.withStackTraceChecksDisabled();
}
if (requiresGradleDistribution) {
executer.requireGradleDistribution();
}
if (useOwnUserHomeServices) {
executer.withOwnUserHomeServices();
}
if (requireDaemon) {
executer.requireDaemon();
}
executer.startBuildProcessInDebugger(debug);
executer.startLauncherInDebugger(debugLauncher);
executer.withProfiler(profiler);
executer.withForceInteractive(interactive);
if (!checkDeprecations) {
executer.noDeprecationChecks();
}
if (durationMeasurement != null) {
executer.withDurationMeasurement(durationMeasurement);
}
if (consoleType != null) {
executer.withConsole(consoleType);
}
executer.withWarningMode(warningMode);
if (!showStacktrace) {
executer.withStacktraceDisabled();
}
if (renderWelcomeMessage) {
executer.withWelcomeMessageEnabled();
}
executer.withTestConsoleAttached(consoleAttachment);
return executer;
}
@Override
public GradleExecuter usingBuildScript(File buildScript) {
this.buildScript = buildScript;
return this;
}
@Override
public GradleExecuter usingProjectDirectory(File projectDir) {
this.projectDir = projectDir;
return this;
}
@Override
public GradleExecuter usingSettingsFile(File settingsFile) {
this.settingsFile = settingsFile;
return this;
}
@Override
public GradleExecuter usingInitScript(File initScript) {
initScripts.add(initScript);
return this;
}
@Override
public TestFile getGradleUserHomeDir() {
return gradleUserHomeDir;
}
@Override
public GradleExecuter withGradleUserHomeDir(File userHomeDir) {
this.gradleUserHomeDir = userHomeDir == null ? null : new TestFile(userHomeDir);
return this;
}
@Override
public GradleExecuter requireOwnGradleUserHomeDir() {
return withGradleUserHomeDir(testDirectoryProvider.getTestDirectory().file("user-home"));
}
public File getUserHomeDir() {
return userHomeDir;
}
protected GradleInvocation buildInvocation() {
validateDaemonVisibility();
GradleInvocation gradleInvocation = new GradleInvocation();
gradleInvocation.environmentVars.putAll(environmentVars);
if (!useOnlyRequestedJvmOpts) {
gradleInvocation.buildJvmArgs.addAll(getImplicitBuildJvmArgs());
}
gradleInvocation.buildJvmArgs.addAll(buildJvmOpts);
calculateLauncherJvmArgs(gradleInvocation);
gradleInvocation.args.addAll(getAllArgs());
transformInvocation(gradleInvocation);
if (!gradleInvocation.implicitLauncherJvmArgs.isEmpty()) {
throw new IllegalStateException("Implicit JVM args have not been handled.");
}
return gradleInvocation;
}
protected void validateDaemonVisibility() {
if (isUseDaemon() && isSharedDaemons()) {
throw new IllegalStateException("Daemon that will be visible to other tests has been requested.");
}
}
/**
* Adjusts the calculated invocation prior to execution. This method is responsible for handling the implicit launcher JVM args in some way, by mutating the invocation appropriately.
*/
protected void transformInvocation(GradleInvocation gradleInvocation) {
gradleInvocation.launcherJvmArgs.addAll(0, gradleInvocation.implicitLauncherJvmArgs);
gradleInvocation.implicitLauncherJvmArgs.clear();
}
/**
* Returns the JVM opts that should be used to start a forked JVM.
*/
private void calculateLauncherJvmArgs(GradleInvocation gradleInvocation) {
// Add JVM args that were explicitly requested
gradleInvocation.launcherJvmArgs.addAll(commandLineJvmOpts);
if (isUseDaemon() && !gradleInvocation.buildJvmArgs.isEmpty()) {
// Pass build JVM args through to daemon via system property on the launcher JVM
String quotedArgs = join(" ", collect(gradleInvocation.buildJvmArgs, new Transformer<String, String>() {
@Override
public String transform(String input) {
return String.format("'%s'", input);
}
}));
gradleInvocation.implicitLauncherJvmArgs.add("-Dorg.gradle.jvmargs=" + quotedArgs);
} else {
// Have to pass build JVM args directly to launcher JVM
gradleInvocation.launcherJvmArgs.addAll(gradleInvocation.buildJvmArgs);
}
// Set the implicit system properties regardless of whether default JVM args are required or not, this should not interfere with tests' intentions
// These will also be copied across to any daemon used
for (Map.Entry<String, String> entry : getImplicitJvmSystemProperties().entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
gradleInvocation.implicitLauncherJvmArgs.add(String.format("-D%s=%s", key, value));
}
if (isDebugLauncher()) {
gradleInvocation.implicitLauncherJvmArgs.addAll(DEBUG_ARGS);
}
gradleInvocation.implicitLauncherJvmArgs.add("-ea");
}
/**
* Returns additional JVM args that should be used to start the build JVM.
*/
protected List<String> getImplicitBuildJvmArgs() {
List<String> buildJvmOpts = new ArrayList<String>();
buildJvmOpts.add("-ea");
if (isDebug()) {
buildJvmOpts.addAll(DEBUG_ARGS);
}
if (isProfile()) {
buildJvmOpts.add(profiler);
}
if (isSharedDaemons()) {
buildJvmOpts.add("-Xms256m");
buildJvmOpts.add("-Xmx1024m");
} else {
buildJvmOpts.add("-Xms256m");
buildJvmOpts.add("-Xmx512m");
}
if (JVM_VERSION_DETECTOR.getJavaVersion(Jvm.forHome(getJavaHome())).compareTo(JavaVersion.VERSION_1_8) < 0) {
buildJvmOpts.add("-XX:MaxPermSize=320m");
} else {
buildJvmOpts.add("-XX:MaxMetaspaceSize=512m");
}
buildJvmOpts.add("-XX:+HeapDumpOnOutOfMemoryError");
buildJvmOpts.add("-XX:HeapDumpPath=" + buildContext.getGradleUserHomeDir());
return buildJvmOpts;
}
private boolean xmxSpecified() {
for (String arg : buildJvmOpts) {
if (arg.startsWith("-Xmx")) {
return true;
}
}
return false;
}
@Override
public GradleExecuter withUserHomeDir(File userHomeDir) {
this.userHomeDir = userHomeDir;
return this;
}
public File getJavaHome() {
return javaHome == null ? Jvm.current().getJavaHome() : javaHome;
}
@Override
public GradleExecuter withJavaHome(File javaHome) {
this.javaHome = javaHome;
return this;
}
@Override
public GradleExecuter usingExecutable(String script) {
this.executable = script;
return this;
}
public String getExecutable() {
return executable;
}
@Override
public GradleExecuter withStdinPipe() {
return withStdinPipe(new PipedOutputStream());
}
@Override
public GradleExecuter withStdinPipe(PipedOutputStream stdInPipe) {
this.stdinPipe = stdInPipe;
return this;
}
public InputStream connectStdIn() {
try {
return stdinPipe == null ? SafeStreams.emptyInput() : new PipedInputStream(stdinPipe);
} catch (IOException e) {
throw UncheckedException.throwAsUncheckedException(e);
}
}
public PipedOutputStream getStdinPipe() {
return stdinPipe;
}
@Override
public GradleExecuter withDefaultCharacterEncoding(String defaultCharacterEncoding) {
this.defaultCharacterEncoding = defaultCharacterEncoding;
return this;
}
public String getDefaultCharacterEncoding() {
return defaultCharacterEncoding == null ? Charset.defaultCharset().name() : defaultCharacterEncoding;
}
@Override
public GradleExecuter withDefaultLocale(Locale defaultLocale) {
this.defaultLocale = defaultLocale;
return this;
}
public Locale getDefaultLocale() {
return defaultLocale;
}
public boolean isQuiet() {
return quiet;
}
@Override
public GradleExecuter withQuietLogging() {
quiet = true;
return this;
}
@Override
public GradleExecuter withTaskList() {
taskList = true;
return this;
}
@Override
public GradleExecuter withDependencyList() {
dependencyList = true;
return this;
}
@Override
public GradleExecuter withArguments(String... args) {
return withArguments(Arrays.asList(args));
}
@Override
public GradleExecuter withArguments(List<String> args) {
this.args.clear();
this.args.addAll(args);
return this;
}
@Override
public GradleExecuter withArgument(String arg) {
this.args.add(arg);
return this;
}
@Override
public GradleExecuter withEnvironmentVars(Map<String, ?> environment) {
environmentVars.clear();
for (Map.Entry<String, ?> entry : environment.entrySet()) {
environmentVars.put(entry.getKey(), entry.getValue().toString());
}
return this;
}
protected String toJvmArgsString(Iterable<String> jvmArgs) {
StringBuilder result = new StringBuilder();
for (String jvmArg : jvmArgs) {
if (result.length() > 0) {
result.append(" ");
}
if (jvmArg.contains(" ")) {
assert !jvmArg.contains("\"") : "jvmArg '" + jvmArg + "' contains '\"'";
result.append('"');
result.append(jvmArg);
result.append('"');
} else {
result.append(jvmArg);
}
}
return result.toString();
}
@Override
public GradleExecuter withTasks(String... names) {
return withTasks(Arrays.asList(names));
}
@Override
public GradleExecuter withTasks(List<String> names) {
tasks.clear();
tasks.addAll(names);
return this;
}
@Override
public GradleExecuter withDaemonIdleTimeoutSecs(int secs) {
daemonIdleTimeoutSecs = secs;
return this;
}
@Override
public GradleExecuter useOnlyRequestedJvmOpts() {
useOnlyRequestedJvmOpts = true;
return this;
}
@Override
public GradleExecuter withDaemonBaseDir(File daemonBaseDir) {
this.daemonBaseDir = daemonBaseDir;
return this;
}
@Override
public GradleExecuter requireIsolatedDaemons() {
return withDaemonBaseDir(testDirectoryProvider.getTestDirectory().file("daemon"));
}
@Override
public GradleExecuter withWorkerDaemonsExpirationDisabled() {
return withCommandLineGradleOpts("-Dorg.gradle.workers.internal.disable-daemons-expiration=true");
}
@Override
public boolean usesSharedDaemons() {
return isSharedDaemons();
}
@Override
public File getDaemonBaseDir() {
return daemonBaseDir;
}
@Override
public GradleExecuter requireDaemon() {
this.requireDaemon = true;
return this;
}
protected boolean isSharedDaemons() {
return daemonBaseDir.equals(buildContext.getDaemonBaseDir());
}
@Override
public boolean isUseDaemon() {
CliDaemonArgument cliDaemonArgument = resolveCliDaemonArgument();
if (cliDaemonArgument == NO_DAEMON || cliDaemonArgument == FOREGROUND) {
return false;
}
return requireDaemon || cliDaemonArgument == DAEMON;
}
@Override
public GradleExecuter withOwnUserHomeServices() {
useOwnUserHomeServices = true;
return this;
}
@Override
public GradleExecuter withWarningMode(WarningMode warningMode) {
this.warningMode = warningMode;
return this;
}
@Override
public GradleExecuter withConsole(ConsoleOutput consoleType) {
this.consoleType = consoleType;
return this;
}
@Override
public GradleExecuter withStacktraceDisabled() {
showStacktrace = false;
return this;
}
@Override
public GradleExecuter withWelcomeMessageEnabled() {
renderWelcomeMessage = true;
return this;
}
@Override
public GradleExecuter withRepositoryMirrors() {
beforeExecute(new Action<GradleExecuter>() {
@Override
public void execute(GradleExecuter gradleExecuter) {
usingInitScript(RepoScriptBlockUtil.createMirrorInitScript());
}
});
return this;
}
@Override
public GradleExecuter withGlobalRepositoryMirrors() {
beforeExecute(new Action<GradleExecuter>() {
@Override
public void execute(GradleExecuter gradleExecuter) {
TestFile userHome = testDirectoryProvider.getTestDirectory().file("user-home");
withGradleUserHomeDir(userHome);
userHome.file("init.d/mirrors.gradle").write(RepoScriptBlockUtil.mirrorInitScript());
}
});
return this;
}
@Override
public GradleExecuter withPluginRepositoryMirror() {
beforeExecute(new Action<GradleExecuter>() {
@Override
public void execute(GradleExecuter gradleExecuter) {
withArgument("-D" + PLUGIN_PORTAL_OVERRIDE_URL_PROPERTY + "=" + gradlePluginRepositoryMirrorUrl());
}
});
return this;
}
/**
* Performs cleanup at completion of the test.
*/
public void cleanup() {
stopRunningBuilds();
cleanupIsolatedDaemons();
}
private void stopRunningBuilds() {
for (GradleHandle handle : running) {
try {
handle.abort().waitForExit();
} catch (Exception e) {
getLogger().warn("Problem stopping running build", e);
}
}
}
private void cleanupIsolatedDaemons() {
for (File baseDir : isolatedDaemonBaseDirs) {
try {
new DaemonLogsAnalyzer(baseDir, gradleVersion.getVersion()).killAll();
} catch (Exception e) {
getLogger().warn("Problem killing isolated daemons of Gradle version " + gradleVersion + " in " + baseDir, e);
}
}
}
enum CliDaemonArgument {
NOT_DEFINED,
DAEMON,
NO_DAEMON,
FOREGROUND
}
protected CliDaemonArgument resolveCliDaemonArgument() {
for (int i = args.size() - 1; i >= 0; i--) {
final String arg = args.get(i);
if (arg.equals("--daemon")) {
return DAEMON;
}
if (arg.equals("--no-daemon")) {
return NO_DAEMON;
}
if (arg.equals("--foreground")) {
return FOREGROUND;
}
}
return NOT_DEFINED;
}
private boolean noDaemonArgumentGiven() {
return resolveCliDaemonArgument() == NOT_DEFINED;
}
protected List<String> getAllArgs() {
List<String> allArgs = new ArrayList<String>();
if (buildScript != null) {
allArgs.add("--build-file");
allArgs.add(buildScript.getAbsolutePath());
}
if (projectDir != null) {
allArgs.add("--project-dir");
allArgs.add(projectDir.getAbsolutePath());
}
for (File initScript : initScripts) {
allArgs.add("--init-script");
allArgs.add(initScript.getAbsolutePath());
}
if (settingsFile != null) {
allArgs.add("--settings-file");
allArgs.add(settingsFile.getAbsolutePath());
}
if (quiet) {
allArgs.add("--quiet");
}
if (noDaemonArgumentGiven()) {
if (isUseDaemon()) {
allArgs.add("--daemon");
} else {
allArgs.add("--no-daemon");
}
}
if (showStacktrace) {
allArgs.add("--stacktrace");
}
if (taskList) {
allArgs.add("tasks");
}
if (dependencyList) {
allArgs.add("dependencies");
}
if (settingsFile == null && !ignoreMissingSettingsFile) {
ensureSettingsFileAvailable();
}
// This will cause problems on Windows if the path to the Gradle executable that is used has a space in it (e.g. the user's dir is c:/Users/Luke Daley/)
// This is fundamentally a windows issue: You can't have arguments with spaces in them if the path to the batch script has a space
// We could work around this by setting -Dgradle.user.home but GRADLE-1730 (which affects 1.0-milestone-3) means that that
// is problematic as well. For now, we just don't support running the int tests from a path with a space in it on Windows.
// When we stop testing against M3 we should change to use the system property.
if (getGradleUserHomeDir() != null) {
allArgs.add("--gradle-user-home");
allArgs.add(getGradleUserHomeDir().getAbsolutePath());
}
if (consoleType != null) {
allArgs.add("--console=" + consoleType.toString().toLowerCase());
}
if (warningMode != null) {
allArgs.add("--warning-mode=" + warningMode.toString().toLowerCase(Locale.ENGLISH));
}
allArgs.addAll(args);
allArgs.addAll(tasks);
return allArgs;
}
@Override
public GradleExecuter ignoreMissingSettingsFile() {
ignoreMissingSettingsFile = true;
return this;
}
private void ensureSettingsFileAvailable() {
TestFile workingDir = new TestFile(getWorkingDir());
TestFile dir = workingDir;
while (dir != null && getTestDirectoryProvider().getTestDirectory().isSelfOrDescendent(dir)) {
if (hasSettingsFile(dir) || hasSettingsFile(dir.file("master"))) {
return;
}
dir = dir.getParentFile();
}
workingDir.createFile("settings.gradle");
}
private boolean hasSettingsFile(TestFile dir) {
if (dir.isDirectory()) {
return dir.file("settings.gradle").isFile() || dir.file("settings.gradle.kts").isFile();
}
return false;
}
/**
* Returns the set of system properties that should be set on every JVM used by this executer.
*/
protected Map<String, String> getImplicitJvmSystemProperties() {
Map<String, String> properties = new LinkedHashMap<String, String>();
if (getUserHomeDir() != null) {
properties.put("user.home", getUserHomeDir().getAbsolutePath());
}
properties.put(DaemonBuildOptions.IdleTimeoutOption.GRADLE_PROPERTY, "" + (daemonIdleTimeoutSecs * 1000));
properties.put(DaemonBuildOptions.BaseDirOption.GRADLE_PROPERTY, daemonBaseDir.getAbsolutePath());
if (!noExplicitNativeServicesDir) {
properties.put(NativeServices.NATIVE_DIR_OVERRIDE, buildContext.getNativeServicesDir().getAbsolutePath());
}
properties.put(LoggingDeprecatedFeatureHandler.ORG_GRADLE_DEPRECATION_TRACE_PROPERTY_NAME, Boolean.toString(fullDeprecationStackTrace));
boolean useCustomGradleUserHomeDir = gradleUserHomeDir != null && !gradleUserHomeDir.equals(buildContext.getGradleUserHomeDir());
if (useOwnUserHomeServices || useCustomGradleUserHomeDir) {
properties.put(REUSE_USER_HOME_SERVICES, "false");
}
if (!useCustomGradleUserHomeDir) {
TestFile generatedApiJarCacheDir = buildContext.getGradleGeneratedApiJarCacheDir();
if (generatedApiJarCacheDir != null) {
properties.put(DefaultGeneratedGradleJarCache.BASE_DIR_OVERRIDE_PROPERTY, generatedApiJarCacheDir.getAbsolutePath());
}
}
if (!noExplicitTmpDir) {
if (tmpDir == null) {
tmpDir = getDefaultTmpDir();
}
String tmpDirPath = tmpDir.createDir().getAbsolutePath();
if (!tmpDirPath.contains(" ") || (getDistribution().isSupportsSpacesInGradleAndJavaOpts() && supportsWhiteSpaceInEnvVars())) {
properties.put("java.io.tmpdir", tmpDirPath);
}
}
properties.put("file.encoding", getDefaultCharacterEncoding());
Locale locale = getDefaultLocale();
if (locale != null) {
properties.put("user.language", locale.getLanguage());
properties.put("user.country", locale.getCountry());
properties.put("user.variant", locale.getVariant());
}
if (eagerClassLoaderCreationChecksOn) {
properties.put(DefaultClassLoaderScope.STRICT_MODE_PROPERTY, "true");
}
if (interactive) {
properties.put(TestOverrideConsoleDetector.INTERACTIVE_TOGGLE, "true");
}
properties.put(DefaultCommandLineActionFactory.WELCOME_MESSAGE_ENABLED_SYSTEM_PROPERTY, Boolean.toString(renderWelcomeMessage));
return properties;
}
protected boolean supportsWhiteSpaceInEnvVars() {
return true;
}
@Override
public final GradleHandle start() {
assert afterExecute.isEmpty() : "afterExecute actions are not implemented for async execution";
return startHandle();
}
protected GradleHandle startHandle() {
fireBeforeExecute();
assertCanExecute();
collectStateBeforeExecution();
try {
GradleHandle handle = createGradleHandle();
running.add(handle);
return handle;
} finally {
reset();
}
}
@Override
public final ExecutionResult run() {
fireBeforeExecute();
assertCanExecute();
collectStateBeforeExecution();
try {
ExecutionResult result = doRun();
if (errorsShouldAppearOnStdout()) {
result = new ErrorsOnStdoutScrapingExecutionResult(result);
}
afterExecute.execute(this);
return result;
} finally {
finished();
}
}
protected void finished() {
reset();
}
@Override
public final ExecutionFailure runWithFailure() {
fireBeforeExecute();
assertCanExecute();
collectStateBeforeExecution();
try {
ExecutionFailure executionFailure = doRunWithFailure();
if (errorsShouldAppearOnStdout()) {
executionFailure = new ErrorsOnStdoutScrapingExecutionFailure(executionFailure);
}
afterExecute.execute(this);
return executionFailure;
} finally {
finished();
}
}
private void collectStateBeforeExecution() {
if (!isSharedDaemons()) {
isolatedDaemonBaseDirs.add(daemonBaseDir);
}
}
private void fireBeforeExecute() {
beforeExecute.execute(this);
}
protected GradleHandle createGradleHandle() {
throw new UnsupportedOperationException(String.format("%s does not support running asynchronously.", getClass().getSimpleName()));
}
protected abstract ExecutionResult doRun();
protected abstract ExecutionFailure doRunWithFailure();
@Override
public GradleExecuter withCommandLineGradleOpts(Iterable<String> jvmOpts) {
CollectionUtils.addAll(commandLineJvmOpts, jvmOpts);
return this;
}
@Override
public GradleExecuter withCommandLineGradleOpts(String... jvmOpts) {
CollectionUtils.addAll(commandLineJvmOpts, jvmOpts);
return this;
}
@Override
public AbstractGradleExecuter withBuildJvmOpts(String... jvmOpts) {
CollectionUtils.addAll(buildJvmOpts, jvmOpts);
return this;
}
@Override
public GradleExecuter withBuildJvmOpts(Iterable<String> jvmOpts) {
CollectionUtils.addAll(buildJvmOpts, jvmOpts);
return this;
}
@Override
public GradleExecuter withBuildCacheEnabled() {
return withArgument("--build-cache");
}
protected Action<ExecutionResult> getResultAssertion() {
return new Action<ExecutionResult>() {
private int expectedGenericDeprecationWarnings = AbstractGradleExecuter.this.expectedGenericDeprecationWarnings;
private final List<String> expectedDeprecationWarnings = new ArrayList<>(AbstractGradleExecuter.this.expectedDeprecationWarnings);
private final boolean expectStackTraces = !AbstractGradleExecuter.this.stackTraceChecksOn;
private final boolean checkDeprecations = AbstractGradleExecuter.this.checkDeprecations;
@Override
public void execute(ExecutionResult executionResult) {
String normalizedOutput = executionResult.getNormalizedOutput();
String error = executionResult.getError();
boolean executionFailure = isExecutionFailure(executionResult);
// for tests using rich console standard out and error are combined in output of execution result
if (executionFailure) {
normalizedOutput = removeExceptionStackTraceForFailedExecution(normalizedOutput);
}
validate(normalizedOutput, "Standard output");
if (executionFailure) {
error = removeExceptionStackTraceForFailedExecution(error);
}
validate(error, "Standard error");
if (!expectedDeprecationWarnings.isEmpty()) {
throw new AssertionError(String.format("Expected the following deprecation warnings:%n%s",
expectedDeprecationWarnings.stream()
.map(warning -> " - " + warning)
.collect(Collectors.joining("\n"))));
}
if (expectedGenericDeprecationWarnings > 0) {
throw new AssertionError(String.format("Expected %d more deprecation warnings", expectedGenericDeprecationWarnings));
}
}
private boolean isErrorOutEmpty(String error) {
//remove SLF4J error out like 'Class path contains multiple SLF4J bindings.'
//See: https://github.com/gradle/performance/issues/375#issuecomment-315103861
return Strings.isNullOrEmpty(error.replaceAll("(?m)^SLF4J: .*", "").trim());
}
private boolean isExecutionFailure(ExecutionResult executionResult) {
return executionResult instanceof ExecutionFailure;
}
// Axe everything after the expected exception
private String removeExceptionStackTraceForFailedExecution(String text) {
int pos = text.indexOf("* Exception is:");
if (pos >= 0) {
text = text.substring(0, pos);
}
return text;
}
private void validate(String output, String displayName) {
List<String> lines;
try {
lines = CharSource.wrap(output).readLines();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
int i = 0;
boolean insideVariantDescriptionBlock = false;
while (i < lines.size()) {
String line = lines.get(i);
if (insideVariantDescriptionBlock && line.contains("]")) {
insideVariantDescriptionBlock = false;
} else if (!insideVariantDescriptionBlock && line.contains("variant \"")) {
insideVariantDescriptionBlock = true;
}
if (line.matches(".*use(s)? or override(s)? a deprecated API\\.")) {
// A javac warning, ignore
i++;
} else if (line.matches(".*w: .* is deprecated\\..*")) {
// A kotlinc warning, ignore
i++;
} else if (isDeprecationMessageInHelpDescription(line)) {
i++;
} else if (expectedDeprecationWarnings.remove(line)) {
// Deprecation warning is expected
i++;
i = skipStackTrace(lines, i);
} else if (line.matches(".*\\s+deprecated.*")) {
if (checkDeprecations && expectedGenericDeprecationWarnings <= 0) {
throw new AssertionError(String.format("%s line %d contains a deprecation warning: %s%n=====%n%s%n=====%n", displayName, i + 1, line, output));
}
expectedGenericDeprecationWarnings--;
// skip over stack trace
i++;
i = skipStackTrace(lines, i);
} else if (!expectStackTraces && !insideVariantDescriptionBlock && STACK_TRACE_ELEMENT.matcher(line).matches() && i < lines.size() - 1 && STACK_TRACE_ELEMENT.matcher(lines.get(i + 1)).matches()) {
// 2 or more lines that look like stack trace elements
throw new AssertionError(String.format("%s line %d contains an unexpected stack trace: %s%n=====%n%s%n=====%n", displayName, i + 1, line, output));
} else {
i++;
}
}
}
private int skipStackTrace(List<String> lines, int i) {
while (i < lines.size() && STACK_TRACE_ELEMENT.matcher(lines.get(i)).matches()) {
i++;
}
return i;
}
private boolean isDeprecationMessageInHelpDescription(String s) {
return s.matches(".*\\[deprecated.*]");
}
};
}
@Override
public GradleExecuter expectDeprecationWarning() {
return expectDeprecationWarnings(1);
}
@Override
public GradleExecuter expectDeprecationWarnings(int count) {
Preconditions.checkState(expectedGenericDeprecationWarnings == 0, "expected deprecation count is already set for this execution");
Preconditions.checkArgument(count > 0, "expected deprecation count must be positive");
expectedGenericDeprecationWarnings = count;
return this;
}
@Override
public GradleExecuter expectDeprecationWarning(String warning) {
expectedDeprecationWarnings.add(warning);
return this;
}
@Override
public GradleExecuter noDeprecationChecks() {
checkDeprecations = false;
return this;
}
@Override
public GradleExecuter withEagerClassLoaderCreationCheckDisabled() {
eagerClassLoaderCreationChecksOn = false;
return this;
}
@Override
public GradleExecuter withStackTraceChecksDisabled() {
stackTraceChecksOn = false;
return this;
}
protected TestFile getDefaultTmpDir() {
return buildContext.getTmpDir().createDir();
}
@Override
public GradleExecuter noExtraLogging() {
this.allowExtraLogging = false;
return this;
}
public boolean isAllowExtraLogging() {
return allowExtraLogging;
}
public boolean isRequiresGradleDistribution() {
return requiresGradleDistribution;
}
@Override
public GradleExecuter requireGradleDistribution() {
this.requiresGradleDistribution = true;
return this;
}
@Override
public GradleExecuter startBuildProcessInDebugger(boolean flag) {
debug = flag;
return this;
}
@Override
public GradleExecuter startLauncherInDebugger(boolean flag) {
debugLauncher = flag;
return this;
}
@Override
public boolean isDebugLauncher() {
return debugLauncher;
}
@Override
public GradleExecuter withProfiler(String args) {
profiler = args;
return this;
}
@Override
public GradleExecuter withForceInteractive(boolean flag) {
interactive = flag;
return this;
}
@Override
public GradleExecuter withNoExplicitTmpDir() {
noExplicitTmpDir = true;
return this;
}
@Override
public GradleExecuter withNoExplicitNativeServicesDir() {
noExplicitNativeServicesDir = true;
return this;
}
@Override
public GradleExecuter withFullDeprecationStackTraceDisabled() {
fullDeprecationStackTrace = false;
return this;
}
@Override
public boolean isDebug() {
return debug;
}
@Override
public boolean isProfile() {
return !profiler.isEmpty();
}
protected static class GradleInvocation {
final Map<String, String> environmentVars = new HashMap<String, String>();
final List<String> args = new ArrayList<String>();
// JVM args that must be used for the build JVM
final List<String> buildJvmArgs = new ArrayList<String>();
// JVM args that must be used to fork a JVM
final List<String> launcherJvmArgs = new ArrayList<String>();
// Implicit JVM args that should be used to fork a JVM
final List<String> implicitLauncherJvmArgs = new ArrayList<String>();
}
@Override
public void stop() {
cleanup();
}
@Override
public GradleExecuter withDurationMeasurement(DurationMeasurement durationMeasurement) {
this.durationMeasurement = durationMeasurement;
return this;
}
protected void startMeasurement() {
if (durationMeasurement != null) {
durationMeasurement.start();
}
}
protected void stopMeasurement() {
if (durationMeasurement != null) {
durationMeasurement.stop();
}
}
protected DurationMeasurement getDurationMeasurement() {
return durationMeasurement;
}
private static LoggingServiceRegistry newCommandLineProcessLogging() {
LoggingServiceRegistry loggingServices = LoggingServiceRegistry.newEmbeddableLogging();
LoggingManagerInternal rootLoggingManager = loggingServices.get(DefaultLoggingManagerFactory.class).getRoot();
rootLoggingManager.attachSystemOutAndErr();
return loggingServices;
}
@Override
public GradleExecuter withTestConsoleAttached() {
return withTestConsoleAttached(ConsoleAttachment.ATTACHED);
}
@Override
public GradleExecuter withTestConsoleAttached(ConsoleAttachment consoleAttachment) {
this.consoleAttachment = consoleAttachment;
return configureConsoleCommandLineArgs();
}
protected GradleExecuter configureConsoleCommandLineArgs() {
if (consoleAttachment == ConsoleAttachment.NOT_ATTACHED) {
return this;
} else {
return withCommandLineGradleOpts(consoleAttachment.getConsoleMetaData().getCommandLineArgument());
}
}
private boolean errorsShouldAppearOnStdout() {
// If stdout and stderr are attached to the console
return consoleAttachment.isStderrAttached() && consoleAttachment.isStdoutAttached();
}
}
| Enable VFS by default in tests
| subprojects/internal-integ-testing/src/main/groovy/org/gradle/integtests/fixtures/executer/AbstractGradleExecuter.java | Enable VFS by default in tests |
|
Java | apache-2.0 | f3a435499664da59ad1da284af9a4ff7ed8560be | 0 | crockpotveggies/deeplearning4j,crockpotveggies/deeplearning4j,xuzhongxing/deeplearning4j,xuzhongxing/deeplearning4j,crockpotveggies/deeplearning4j,xuzhongxing/deeplearning4j,xuzhongxing/deeplearning4j,xuzhongxing/deeplearning4j,crockpotveggies/deeplearning4j,crockpotveggies/deeplearning4j,crockpotveggies/deeplearning4j,xuzhongxing/deeplearning4j | package org.deeplearning4j.scaleout.perform.models.word2vec.iterator;
import org.deeplearning4j.bagofwords.vectorizer.TextVectorizer;
import org.deeplearning4j.models.embeddings.WeightLookupTable;
import org.deeplearning4j.models.embeddings.inmemory.InMemoryLookupTable;
import org.deeplearning4j.models.word2vec.VocabWord;
import org.deeplearning4j.models.word2vec.wordstore.VocabCache;
import org.deeplearning4j.models.word2vec.wordstore.inmemory.InMemoryLookupCache;
import org.deeplearning4j.scaleout.api.statetracker.NewUpdateListener;
import org.deeplearning4j.scaleout.api.statetracker.StateTracker;
import org.deeplearning4j.scaleout.job.Job;
import org.deeplearning4j.scaleout.job.JobIterator;
import org.deeplearning4j.scaleout.perform.models.word2vec.Word2VecResult;
import org.deeplearning4j.scaleout.perform.models.word2vec.Word2VecWork;
import org.deeplearning4j.text.invertedindex.InvertedIndex;
import java.io.Serializable;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
/**
* Word2vec job iterator
*
*
* @author Adam Gibson
*/
public class Word2VecJobIterator implements JobIterator {
private Iterator<List<List<VocabWord>>> sentenceIterator;
private WeightLookupTable table;
private VocabCache cache;
private int batchSize = 100;
public Word2VecJobIterator(Iterator<List<List<VocabWord>>> sentenceIterator,WeightLookupTable table,VocabCache cache,StateTracker stateTracker,int batchSize) {
this.sentenceIterator = sentenceIterator;
this.table = table;
this.cache = cache;
addListener(stateTracker);
this.batchSize = batchSize;
}
public Word2VecJobIterator(TextVectorizer textVectorizer,WeightLookupTable table,VocabCache cache,StateTracker stateTracker,int batchSize) {
this.sentenceIterator = textVectorizer.index().batchIter(batchSize);
this.cache = cache;
this.table = table;
addListener(stateTracker);
this.batchSize = batchSize;
}
public Word2VecJobIterator(Iterator<List<List<VocabWord>>> sentenceIterator,WeightLookupTable table,VocabCache cache,StateTracker stateTracker) {
this.sentenceIterator = sentenceIterator;
this.table = table;
this.cache = cache;
addListener(stateTracker);
}
public Word2VecJobIterator(TextVectorizer textVectorizer,WeightLookupTable table,VocabCache cache,StateTracker stateTracker) {
this.sentenceIterator = textVectorizer.index().batchIter(batchSize);
this.cache = cache;
this.table = table;
addListener(stateTracker);
}
public Word2VecJobIterator(InvertedIndex invertedIndex, WeightLookupTable table,VocabCache cache,StateTracker stateTracker,int batchSize) {
this.sentenceIterator = invertedIndex.batchIter(batchSize);
this.cache = cache;
this.table = table;
this.batchSize = batchSize;
addListener(stateTracker);
}
private void addListener(StateTracker stateTracker) {
stateTracker.addUpdateListener(new NewUpdateListener() {
@Override
public void onUpdate(Serializable update) {
Job j = (Job) update;
Collection<Word2VecResult> work = (Collection<Word2VecResult>) j.getResult();
if(work == null || work.isEmpty())
return;
InMemoryLookupTable l = (InMemoryLookupTable) table;
for(Word2VecResult work1 : work) {
for(String s : work1.getSyn0Change().keySet()) {
l.getSyn0().getRow(cache.indexOf(s)).addi(work1.getSyn0Change().get(s));
l.getSyn1().getRow(cache.indexOf(s)).addi(work1.getSyn1Change().get(s));
if(l.getSyn1Neg() != null)
l.getSyn1Neg().getRow(cache.indexOf(s)).addi(work1.getNegativeChange().get(s));
}
}
}
});
}
private Word2VecWork create(List<List<VocabWord>> sentence) {
Word2VecWork work = new Word2VecWork((InMemoryLookupTable) table,(InMemoryLookupCache) cache,sentence);
return work;
}
@Override
public Job next(String workerId) {
List<List<VocabWord>> next = sentenceIterator.next();
return new Job(create(next),workerId);
}
@Override
public Job next() {
List<List<VocabWord>> next = sentenceIterator.next();
return new Job(create(next),"");
}
@Override
public boolean hasNext() {
return sentenceIterator.hasNext();
}
@Override
public void reset() {
}
}
| deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/scaleout/perform/models/word2vec/iterator/Word2VecJobIterator.java | package org.deeplearning4j.scaleout.perform.models.word2vec.iterator;
import org.deeplearning4j.bagofwords.vectorizer.TextVectorizer;
import org.deeplearning4j.models.embeddings.WeightLookupTable;
import org.deeplearning4j.models.embeddings.inmemory.InMemoryLookupTable;
import org.deeplearning4j.models.word2vec.VocabWord;
import org.deeplearning4j.models.word2vec.wordstore.VocabCache;
import org.deeplearning4j.models.word2vec.wordstore.inmemory.InMemoryLookupCache;
import org.deeplearning4j.scaleout.api.statetracker.NewUpdateListener;
import org.deeplearning4j.scaleout.api.statetracker.StateTracker;
import org.deeplearning4j.scaleout.job.Job;
import org.deeplearning4j.scaleout.job.JobIterator;
import org.deeplearning4j.scaleout.perform.models.word2vec.Word2VecResult;
import org.deeplearning4j.scaleout.perform.models.word2vec.Word2VecWork;
import org.deeplearning4j.text.invertedindex.InvertedIndex;
import java.io.Serializable;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
/**
* Word2vec job iterator
*
*
* @author Adam Gibson
*/
public class Word2VecJobIterator implements JobIterator {
private Iterator<List<List<VocabWord>>> sentenceIterator;
private WeightLookupTable table;
private VocabCache cache;
public Word2VecJobIterator(Iterator<List<List<VocabWord>>> sentenceIterator,WeightLookupTable table,VocabCache cache,StateTracker stateTracker) {
this.sentenceIterator = sentenceIterator;
this.table = table;
this.cache = cache;
addListener(stateTracker);
}
public Word2VecJobIterator(TextVectorizer textVectorizer,WeightLookupTable table,VocabCache cache,StateTracker stateTracker) {
this.sentenceIterator = textVectorizer.index().batchIter(1000);
this.cache = cache;
this.table = table;
addListener(stateTracker);
}
public Word2VecJobIterator(InvertedIndex invertedIndex, WeightLookupTable table,VocabCache cache,StateTracker stateTracker) {
this.sentenceIterator = invertedIndex.batchIter(1000);
this.cache = cache;
this.table = table;
addListener(stateTracker);
}
private void addListener(StateTracker stateTracker) {
stateTracker.addUpdateListener(new NewUpdateListener() {
@Override
public void onUpdate(Serializable update) {
Job j = (Job) update;
Collection<Word2VecResult> work = (Collection<Word2VecResult>) j.getResult();
if(work == null || work.isEmpty())
return;
InMemoryLookupTable l = (InMemoryLookupTable) table;
for(Word2VecResult work1 : work) {
for(String s : work1.getSyn0Change().keySet()) {
l.getSyn0().getRow(cache.indexOf(s)).addi(work1.getSyn0Change().get(s));
l.getSyn1().getRow(cache.indexOf(s)).addi(work1.getSyn1Change().get(s));
if(l.getSyn1Neg() != null)
l.getSyn1Neg().getRow(cache.indexOf(s)).addi(work1.getNegativeChange().get(s));
}
}
}
});
}
private Word2VecWork create(List<List<VocabWord>> sentence) {
Word2VecWork work = new Word2VecWork((InMemoryLookupTable) table,(InMemoryLookupCache) cache,sentence);
return work;
}
@Override
public Job next(String workerId) {
List<List<VocabWord>> next = sentenceIterator.next();
return new Job(create(next),workerId);
}
@Override
public Job next() {
List<List<VocabWord>> next = sentenceIterator.next();
return new Job(create(next),"");
}
@Override
public boolean hasNext() {
return sentenceIterator.hasNext();
}
@Override
public void reset() {
}
}
| multiprocessing fixed
| deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/scaleout/perform/models/word2vec/iterator/Word2VecJobIterator.java | multiprocessing fixed |
|
Java | apache-2.0 | b2c0c7b3dea785de34cf95e6e43d24cee295e442 | 0 | porcelli-forks/kie-wb-common,manstis/kie-wb-common,romartin/kie-wb-common,porcelli-forks/kie-wb-common,romartin/kie-wb-common,manstis/kie-wb-common,dgutierr/kie-wb-common,romartin/kie-wb-common,droolsjbpm/kie-wb-common,cristianonicolai/kie-wb-common,ederign/kie-wb-common,jhrcek/kie-wb-common,manstis/kie-wb-common,porcelli-forks/kie-wb-common,porcelli-forks/kie-wb-common,ederign/kie-wb-common,droolsjbpm/kie-wb-common,jhrcek/kie-wb-common,manstis/kie-wb-common,manstis/kie-wb-common,dgutierr/kie-wb-common,scandihealth/kie-wb-common,ederign/kie-wb-common,ederign/kie-wb-common,cristianonicolai/kie-wb-common,jhrcek/kie-wb-common,psiroky/kie-wb-common,romartin/kie-wb-common,romartin/kie-wb-common,jhrcek/kie-wb-common,scandihealth/kie-wb-common,nmirasch/kie-wb-common | /*
* Copyright 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.services.datamodeller.driver;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.nio.charset.Charset;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.uberfire.io.IOService;
import org.uberfire.java.nio.IOException;
import org.uberfire.java.nio.channels.SeekableByteChannel;
import org.uberfire.java.nio.file.AtomicMoveNotSupportedException;
import org.uberfire.java.nio.file.CopyOption;
import org.uberfire.java.nio.file.DeleteOption;
import org.uberfire.java.nio.file.DirectoryNotEmptyException;
import org.uberfire.java.nio.file.DirectoryStream;
import org.uberfire.java.nio.file.FileAlreadyExistsException;
import org.uberfire.java.nio.file.FileSystem;
import org.uberfire.java.nio.file.FileSystemAlreadyExistsException;
import org.uberfire.java.nio.file.FileSystemNotFoundException;
import org.uberfire.java.nio.file.Files;
import org.uberfire.java.nio.file.NoSuchFileException;
import org.uberfire.java.nio.file.NotDirectoryException;
import org.uberfire.java.nio.file.OpenOption;
import org.uberfire.java.nio.file.Option;
import org.uberfire.java.nio.file.Path;
import org.uberfire.java.nio.file.ProviderNotFoundException;
import org.uberfire.java.nio.file.attribute.FileAttribute;
import org.uberfire.java.nio.file.attribute.FileAttributeView;
import org.uberfire.java.nio.file.attribute.FileTime;
public class IOServiceMock
implements IOService {
@Override
public void dispose() {
}
@Override
public void startBatch( FileSystem fs ) {
}
@Override
public void startBatch( FileSystem[] fs,
Option... options ) {
}
@Override
public void startBatch( FileSystem fileSystem,
Option... options ) {
}
@Override
public void startBatch( FileSystem... fileSystems ) {
}
@Override
public void endBatch() {
}
@Override
public FileAttribute<?>[] convert( Map<String, ?> stringMap ) {
return new FileAttribute<?>[ 0 ];
}
@Override
public Path get( String s,
String... strings ) throws IllegalArgumentException {
return null;
}
@Override
public Path get( URI uri ) throws IllegalArgumentException, FileSystemNotFoundException, SecurityException {
return null;
}
@Override
public Iterable<FileSystem> getFileSystems() {
return null;
}
@Override
public FileSystem getFileSystem( URI uri ) throws IllegalArgumentException, FileSystemNotFoundException, ProviderNotFoundException, SecurityException {
return null;
}
@Override
public FileSystem newFileSystem( URI uri,
Map<String, ?> stringMap ) throws IllegalArgumentException, FileSystemAlreadyExistsException, ProviderNotFoundException, IOException, SecurityException {
return null;
}
@Override
public void onNewFileSystem( NewFileSystemListener newFileSystemListener ) {
}
@Override
public InputStream newInputStream( Path path,
OpenOption... openOptions ) throws IllegalArgumentException, NoSuchFileException, UnsupportedOperationException, IOException, SecurityException {
return null;
}
@Override
public OutputStream newOutputStream( Path path,
OpenOption... openOptions ) throws IllegalArgumentException, UnsupportedOperationException, IOException, SecurityException {
return null;
}
@Override
public SeekableByteChannel newByteChannel( Path path,
OpenOption... openOptions ) throws IllegalArgumentException, UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public SeekableByteChannel newByteChannel( Path path,
Set<? extends OpenOption> openOptions,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public DirectoryStream<Path> newDirectoryStream( Path path ) throws IllegalArgumentException, NotDirectoryException, IOException, SecurityException {
return null;
}
@Override
public DirectoryStream<Path> newDirectoryStream( Path path,
DirectoryStream.Filter<Path> pathFilter ) throws IllegalArgumentException, NotDirectoryException, IOException, SecurityException {
return Files.newDirectoryStream( path, pathFilter );
}
@Override
public Path createFile( Path path,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public Path createDirectory( Path path,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public Path createDirectories( Path path,
FileAttribute<?>... fileAttributes ) throws UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public Path createDirectory( Path path,
Map<String, ?> stringMap ) throws IllegalArgumentException, UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public Path createDirectories( Path path,
Map<String, ?> stringMap ) throws UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public void delete( Path path,
DeleteOption... deleteOptions ) throws IllegalArgumentException, NoSuchFileException, DirectoryNotEmptyException, IOException, SecurityException {
}
@Override
public boolean deleteIfExists( Path path,
DeleteOption... deleteOptions ) throws IllegalArgumentException, DirectoryNotEmptyException, IOException, SecurityException {
return false;
}
@Override
public Path createTempFile( String s,
String s2,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, IOException, SecurityException {
return null;
}
@Override
public Path createTempFile( Path path,
String s,
String s2,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, IOException, SecurityException {
return null;
}
@Override
public Path createTempDirectory( String s,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, IOException, SecurityException {
return null;
}
@Override
public Path createTempDirectory( Path path,
String s,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, IOException, SecurityException {
return null;
}
@Override
public Path copy( Path path,
Path path2,
CopyOption... copyOptions ) throws UnsupportedOperationException, FileAlreadyExistsException, DirectoryNotEmptyException, IOException, SecurityException {
return null;
}
@Override
public Path move( Path path,
Path path2,
CopyOption... copyOptions ) throws UnsupportedOperationException, FileAlreadyExistsException, DirectoryNotEmptyException, AtomicMoveNotSupportedException, IOException, SecurityException {
return null;
}
@Override
public <V extends FileAttributeView> V getFileAttributeView( Path path,
Class<V> vClass ) throws IllegalArgumentException {
return null;
}
@Override
public Map<String, Object> readAttributes( Path path ) throws UnsupportedOperationException, NoSuchFileException, IllegalArgumentException, IOException, SecurityException {
return null;
}
@Override
public Map<String, Object> readAttributes( Path path,
String s ) throws UnsupportedOperationException, NoSuchFileException, IllegalArgumentException, IOException, SecurityException {
return null;
}
@Override
public Path setAttributes( Path path,
FileAttribute<?>... fileAttributes ) throws UnsupportedOperationException, IllegalArgumentException, ClassCastException, IOException, SecurityException {
return null;
}
@Override
public Path setAttributes( Path path,
Map<String, Object> stringObjectMap ) throws UnsupportedOperationException, IllegalArgumentException, ClassCastException, IOException, SecurityException {
return null;
}
@Override
public Path setAttribute( Path path,
String s,
Object o ) throws UnsupportedOperationException, IllegalArgumentException, ClassCastException, IOException, SecurityException {
return null;
}
@Override
public Object getAttribute( Path path,
String s ) throws UnsupportedOperationException, IllegalArgumentException, IOException, SecurityException {
return null;
}
@Override
public FileTime getLastModifiedTime( Path path ) throws IllegalArgumentException, IOException, SecurityException {
return null;
}
@Override
public long size( Path path ) throws IllegalArgumentException, IOException, SecurityException {
return 0;
}
@Override
public boolean exists( Path path ) throws IllegalArgumentException, SecurityException {
return false;
}
@Override
public boolean notExists( Path path ) throws IllegalArgumentException, SecurityException {
return false;
}
@Override
public boolean isSameFile( Path path,
Path path2 ) throws IllegalArgumentException, IOException, SecurityException {
return false;
}
@Override
public BufferedReader newBufferedReader( Path path,
Charset charset ) throws IllegalArgumentException, NoSuchFileException, IOException, SecurityException {
return null;
}
@Override
public BufferedWriter newBufferedWriter( Path path,
Charset charset,
OpenOption... openOptions ) throws IllegalArgumentException, IOException, UnsupportedOperationException, SecurityException {
return null;
}
@Override
public long copy( InputStream inputStream,
Path path,
CopyOption... copyOptions ) throws IOException, FileAlreadyExistsException, DirectoryNotEmptyException, UnsupportedOperationException, SecurityException {
return 0;
}
@Override
public long copy( Path path,
OutputStream outputStream ) throws IOException, SecurityException {
return 0;
}
@Override
public byte[] readAllBytes( Path path ) throws IOException, OutOfMemoryError, SecurityException {
return new byte[ 0 ];
}
@Override
public List<String> readAllLines( Path path ) throws IllegalArgumentException, NoSuchFileException, IOException, SecurityException {
return null;
}
@Override
public List<String> readAllLines( Path path,
Charset charset ) throws IllegalArgumentException, NoSuchFileException, IOException, SecurityException {
return null;
}
@Override
public String readAllString( Path path,
Charset charset ) throws IllegalArgumentException, NoSuchFileException, IOException {
return null;
}
@Override
public String readAllString( Path path ) throws IllegalArgumentException, NoSuchFileException, IOException {
return null;
}
@Override
public Path write( Path path,
byte[] bytes,
OpenOption... openOptions ) throws IOException, UnsupportedOperationException, SecurityException {
return null;
}
@Override
public Path write( Path path,
byte[] bytes,
Map<String, ?> stringMap,
OpenOption... openOptions ) throws IOException, UnsupportedOperationException, SecurityException {
return null;
}
@Override
public Path write( Path path,
byte[] bytes,
Set<? extends OpenOption> openOptions,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public Path write( Path path,
Iterable<? extends CharSequence> charSequences,
Charset charset,
OpenOption... openOptions ) throws IllegalArgumentException, IOException, UnsupportedOperationException, SecurityException {
return null;
}
@Override
public Path write( Path path,
String s,
OpenOption... openOptions ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public Path write( Path path,
String s,
Charset charset,
OpenOption... openOptions ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public Path write( Path path,
String s,
Set<? extends OpenOption> openOptions,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public Path write( Path path,
String s,
Charset charset,
Set<? extends OpenOption> openOptions,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public Path write( Path path,
String s,
Map<String, ?> stringMap,
OpenOption... openOptions ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public Path write( Path path,
String s,
Charset charset,
Map<String, ?> stringMap,
OpenOption... openOptions ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public int priority() {
return 0;
}
} | kie-wb-common-services/kie-wb-common-data-modeller-core/src/test/java/org/kie/workbench/common/services/datamodeller/driver/IOServiceMock.java | /*
* Copyright 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.services.datamodeller.driver;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.nio.charset.Charset;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.uberfire.io.IOService;
import org.uberfire.java.nio.IOException;
import org.uberfire.java.nio.channels.SeekableByteChannel;
import org.uberfire.java.nio.file.AtomicMoveNotSupportedException;
import org.uberfire.java.nio.file.CopyOption;
import org.uberfire.java.nio.file.DeleteOption;
import org.uberfire.java.nio.file.DirectoryNotEmptyException;
import org.uberfire.java.nio.file.DirectoryStream;
import org.uberfire.java.nio.file.FileAlreadyExistsException;
import org.uberfire.java.nio.file.FileSystem;
import org.uberfire.java.nio.file.FileSystemAlreadyExistsException;
import org.uberfire.java.nio.file.FileSystemNotFoundException;
import org.uberfire.java.nio.file.Files;
import org.uberfire.java.nio.file.NoSuchFileException;
import org.uberfire.java.nio.file.NotDirectoryException;
import org.uberfire.java.nio.file.OpenOption;
import org.uberfire.java.nio.file.Option;
import org.uberfire.java.nio.file.Path;
import org.uberfire.java.nio.file.ProviderNotFoundException;
import org.uberfire.java.nio.file.attribute.FileAttribute;
import org.uberfire.java.nio.file.attribute.FileAttributeView;
import org.uberfire.java.nio.file.attribute.FileTime;
public class IOServiceMock
implements IOService {
@Override
public void dispose() {
}
@Override
public void startBatch( FileSystem fs ) {
}
@Override
public void startBatch( FileSystem[] fs,
Option... options ) {
}
@Override
public void startBatch( FileSystem fileSystem,
Option... options ) {
}
@Override
public void startBatch( FileSystem... fileSystems ) {
}
@Override
public void endBatch() {
}
@Override
public FileAttribute<?>[] convert( Map<String, ?> stringMap ) {
return new FileAttribute<?>[ 0 ];
}
@Override
public Path get( String s,
String... strings ) throws IllegalArgumentException {
return null;
}
@Override
public Path get( URI uri ) throws IllegalArgumentException, FileSystemNotFoundException, SecurityException {
return null;
}
@Override
public Iterable<FileSystem> getFileSystems() {
return null;
}
@Override
public FileSystem getFileSystem( URI uri ) throws IllegalArgumentException, FileSystemNotFoundException, ProviderNotFoundException, SecurityException {
return null;
}
@Override
public FileSystem newFileSystem( URI uri,
Map<String, ?> stringMap ) throws IllegalArgumentException, FileSystemAlreadyExistsException, ProviderNotFoundException, IOException, SecurityException {
return null;
}
@Override
public void onNewFileSystem( NewFileSystemListener newFileSystemListener ) {
}
@Override
public InputStream newInputStream( Path path,
OpenOption... openOptions ) throws IllegalArgumentException, NoSuchFileException, UnsupportedOperationException, IOException, SecurityException {
return null;
}
@Override
public OutputStream newOutputStream( Path path,
OpenOption... openOptions ) throws IllegalArgumentException, UnsupportedOperationException, IOException, SecurityException {
return null;
}
@Override
public SeekableByteChannel newByteChannel( Path path,
OpenOption... openOptions ) throws IllegalArgumentException, UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public SeekableByteChannel newByteChannel( Path path,
Set<? extends OpenOption> openOptions,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public DirectoryStream<Path> newDirectoryStream( Path path ) throws IllegalArgumentException, NotDirectoryException, IOException, SecurityException {
return null;
}
@Override
public DirectoryStream<Path> newDirectoryStream( Path path,
DirectoryStream.Filter<Path> pathFilter ) throws IllegalArgumentException, NotDirectoryException, IOException, SecurityException {
return Files.newDirectoryStream( path, pathFilter );
}
@Override
public Path createFile( Path path,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public Path createDirectory( Path path,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public Path createDirectories( Path path,
FileAttribute<?>... fileAttributes ) throws UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public Path createDirectory( Path path,
Map<String, ?> stringMap ) throws IllegalArgumentException, UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public Path createDirectories( Path path,
Map<String, ?> stringMap ) throws UnsupportedOperationException, FileAlreadyExistsException, IOException, SecurityException {
return null;
}
@Override
public void delete( Path path,
DeleteOption... deleteOptions ) throws IllegalArgumentException, NoSuchFileException, DirectoryNotEmptyException, IOException, SecurityException {
}
@Override
public boolean deleteIfExists( Path path,
DeleteOption... deleteOptions ) throws IllegalArgumentException, DirectoryNotEmptyException, IOException, SecurityException {
return false;
}
@Override
public Path createTempFile( String s,
String s2,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, IOException, SecurityException {
return null;
}
@Override
public Path createTempFile( Path path,
String s,
String s2,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, IOException, SecurityException {
return null;
}
@Override
public Path createTempDirectory( String s,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, IOException, SecurityException {
return null;
}
@Override
public Path createTempDirectory( Path path,
String s,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, UnsupportedOperationException, IOException, SecurityException {
return null;
}
@Override
public Path copy( Path path,
Path path2,
CopyOption... copyOptions ) throws UnsupportedOperationException, FileAlreadyExistsException, DirectoryNotEmptyException, IOException, SecurityException {
return null;
}
@Override
public Path move( Path path,
Path path2,
CopyOption... copyOptions ) throws UnsupportedOperationException, FileAlreadyExistsException, DirectoryNotEmptyException, AtomicMoveNotSupportedException, IOException, SecurityException {
return null;
}
@Override
public <V extends FileAttributeView> V getFileAttributeView( Path path,
Class<V> vClass ) throws IllegalArgumentException {
return null;
}
@Override
public Map<String, Object> readAttributes( Path path ) throws UnsupportedOperationException, NoSuchFileException, IllegalArgumentException, IOException, SecurityException {
return null;
}
@Override
public Map<String, Object> readAttributes( Path path,
String s ) throws UnsupportedOperationException, NoSuchFileException, IllegalArgumentException, IOException, SecurityException {
return null;
}
@Override
public Path setAttributes( Path path,
FileAttribute<?>... fileAttributes ) throws UnsupportedOperationException, IllegalArgumentException, ClassCastException, IOException, SecurityException {
return null;
}
@Override
public Path setAttributes( Path path,
Map<String, Object> stringObjectMap ) throws UnsupportedOperationException, IllegalArgumentException, ClassCastException, IOException, SecurityException {
return null;
}
@Override
public Path setAttribute( Path path,
String s,
Object o ) throws UnsupportedOperationException, IllegalArgumentException, ClassCastException, IOException, SecurityException {
return null;
}
@Override
public Object getAttribute( Path path,
String s ) throws UnsupportedOperationException, IllegalArgumentException, IOException, SecurityException {
return null;
}
@Override
public FileTime getLastModifiedTime( Path path ) throws IllegalArgumentException, IOException, SecurityException {
return null;
}
@Override
public long size( Path path ) throws IllegalArgumentException, IOException, SecurityException {
return 0;
}
@Override
public boolean exists( Path path ) throws IllegalArgumentException, SecurityException {
return false;
}
@Override
public boolean notExists( Path path ) throws IllegalArgumentException, SecurityException {
return false;
}
@Override
public boolean isSameFile( Path path,
Path path2 ) throws IllegalArgumentException, IOException, SecurityException {
return false;
}
@Override
public BufferedReader newBufferedReader( Path path,
Charset charset ) throws IllegalArgumentException, NoSuchFileException, IOException, SecurityException {
return null;
}
@Override
public BufferedWriter newBufferedWriter( Path path,
Charset charset,
OpenOption... openOptions ) throws IllegalArgumentException, IOException, UnsupportedOperationException, SecurityException {
return null;
}
@Override
public long copy( InputStream inputStream,
Path path,
CopyOption... copyOptions ) throws IOException, FileAlreadyExistsException, DirectoryNotEmptyException, UnsupportedOperationException, SecurityException {
return 0;
}
@Override
public long copy( Path path,
OutputStream outputStream ) throws IOException, SecurityException {
return 0;
}
@Override
public byte[] readAllBytes( Path path ) throws IOException, OutOfMemoryError, SecurityException {
return new byte[ 0 ];
}
@Override
public List<String> readAllLines( Path path ) throws IllegalArgumentException, NoSuchFileException, IOException, SecurityException {
return null;
}
@Override
public List<String> readAllLines( Path path,
Charset charset ) throws IllegalArgumentException, NoSuchFileException, IOException, SecurityException {
return null;
}
@Override
public String readAllString( Path path,
Charset charset ) throws IllegalArgumentException, NoSuchFileException, IOException {
return null;
}
@Override
public String readAllString( Path path ) throws IllegalArgumentException, NoSuchFileException, IOException {
return null;
}
@Override
public Path write( Path path,
byte[] bytes,
OpenOption... openOptions ) throws IOException, UnsupportedOperationException, SecurityException {
return null;
}
@Override
public Path write( Path path,
byte[] bytes,
Map<String, ?> stringMap,
OpenOption... openOptions ) throws IOException, UnsupportedOperationException, SecurityException {
return null;
}
@Override
public Path write( Path path,
byte[] bytes,
Set<? extends OpenOption> openOptions,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public Path write( Path path,
Iterable<? extends CharSequence> charSequences,
Charset charset,
OpenOption... openOptions ) throws IllegalArgumentException, IOException, UnsupportedOperationException, SecurityException {
return null;
}
@Override
public Path write( Path path,
String s,
OpenOption... openOptions ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public Path write( Path path,
String s,
Charset charset,
OpenOption... openOptions ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public Path write( Path path,
String s,
Set<? extends OpenOption> openOptions,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public Path write( Path path,
String s,
Charset charset,
Set<? extends OpenOption> openOptions,
FileAttribute<?>... fileAttributes ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public Path write( Path path,
String s,
Map<String, ?> stringMap,
OpenOption... openOptions ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
@Override
public Path write( Path path,
String s,
Charset charset,
Map<String, ?> stringMap,
OpenOption... openOptions ) throws IllegalArgumentException, IOException, UnsupportedOperationException {
return null;
}
} | BZ-1206627: adapted to new UF shutdown mechanism
| kie-wb-common-services/kie-wb-common-data-modeller-core/src/test/java/org/kie/workbench/common/services/datamodeller/driver/IOServiceMock.java | BZ-1206627: adapted to new UF shutdown mechanism |
|
Java | apache-2.0 | c990464e84b1f894b90bd2543b4e5f9f5ca194a7 | 0 | gchq/Gaffer,GovernmentCommunicationsHeadquarters/Gaffer,gchq/Gaffer,GovernmentCommunicationsHeadquarters/Gaffer,GovernmentCommunicationsHeadquarters/Gaffer,gchq/Gaffer,GovernmentCommunicationsHeadquarters/Gaffer,gchq/Gaffer | /*
* Copyright 2016-2018 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.gchq.gaffer.federatedstore.operation;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonGetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.exception.CloneFailedException;
import uk.gov.gchq.gaffer.commonutil.Required;
import uk.gov.gchq.gaffer.commonutil.ToStringBuilder;
import uk.gov.gchq.gaffer.commonutil.iterable.CloseableIterable;
import uk.gov.gchq.gaffer.operation.Operation;
import uk.gov.gchq.gaffer.operation.OperationChain;
import uk.gov.gchq.gaffer.operation.OperationChainDAO;
import uk.gov.gchq.gaffer.operation.Operations;
import uk.gov.gchq.gaffer.operation.io.GenericInput;
import uk.gov.gchq.gaffer.operation.io.InputOutput;
import uk.gov.gchq.gaffer.operation.serialisation.TypeReferenceImpl;
import uk.gov.gchq.koryphe.Since;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
* <p>
* An {@code FederatedOperationChain} holds an {@link OperationChain} that will
* be executed in one go on the federated graphs.
* </p>
*
* @param <I> the input type of the {@code FederatedOperationChain}.
* @param <O_ITEM> the output iterable type of the {@code FederatedOperationChain}.
**/
@JsonPropertyOrder(value = {"class", "operationChain", "options"}, alphabetic = true)
@Since("1.1.0")
public class FederatedOperationChain<I, O_ITEM> extends GenericInput<I>
implements InputOutput<I, CloseableIterable<O_ITEM>>,
Operations<OperationChain> {
@Required
private OperationChain operationChain;
private Map<String, String> options;
public FederatedOperationChain() {
this(new OperationChain());
}
public FederatedOperationChain(final Operation... operations) {
this(new OperationChain(operations));
}
public FederatedOperationChain(final OperationChain operationChain) {
setOperationChain(operationChain);
}
@JsonCreator
public FederatedOperationChain(@JsonProperty("operationChain") final OperationChainDAO operationChain,
@JsonProperty("options") final Map<String, String> options) {
this(operationChain);
setOptions(options);
}
@Override
public TypeReference<CloseableIterable<O_ITEM>> getOutputTypeReference() {
return (TypeReference) new TypeReferenceImpl.CloseableIterableObj();
}
public OperationChain getOperationChain() {
return operationChain;
}
@JsonGetter("operationChain")
OperationChainDAO getOperationChainDao() {
if (operationChain instanceof OperationChainDAO) {
return (OperationChainDAO) operationChain;
}
return new OperationChainDAO(operationChain);
}
@JsonIgnore
@Override
public List<OperationChain> getOperations() {
return Lists.newArrayList(operationChain);
}
public FederatedOperationChain<I, O_ITEM> shallowClone() throws CloneFailedException {
return new FederatedOperationChain.Builder<I, O_ITEM>()
.operationChain(operationChain.shallowClone())
.options(options)
.input(getInput())
.build();
}
@Override
public Map<String, String> getOptions() {
return options;
}
@Override
public void setOptions(final Map<String, String> options) {
this.options = options;
}
private void setOperationChain(final OperationChain operationChain) {
if (null == operationChain) {
throw new IllegalArgumentException("operationChain is required");
}
this.operationChain = operationChain;
}
@Override
public String toString() {
return new ToStringBuilder(this)
.append("input", getInput())
.append("operationChain", operationChain)
.append("options", options)
.build();
}
@Override
public void close() throws IOException {
operationChain.close();
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (null == obj || getClass() != obj.getClass()) {
return false;
}
final FederatedOperationChain<?, ?> federatedOperationChain = (FederatedOperationChain<?, ?>) obj;
return new EqualsBuilder()
.append(operationChain, federatedOperationChain.operationChain)
.append(options, federatedOperationChain.options)
.append(getInput(), federatedOperationChain.getInput())
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(13, 23)
.append(operationChain)
.append(options)
.append(getInput())
.toHashCode();
}
public static class Builder<I, O_ITEM> extends
Operation.BaseBuilder<FederatedOperationChain<I, O_ITEM>, Builder<I, O_ITEM>>
implements InputOutput.Builder<FederatedOperationChain<I, O_ITEM>, I, CloseableIterable<O_ITEM>, Builder<I, O_ITEM>> {
public Builder() {
super(new FederatedOperationChain<>(new OperationChain()));
}
public Builder<I, O_ITEM> operationChain(final OperationChain operationChain) {
_getOp().setOperationChain(operationChain);
return this;
}
}
}
| store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationChain.java | /*
* Copyright 2016-2018 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.gchq.gaffer.federatedstore.operation;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonGetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.exception.CloneFailedException;
import uk.gov.gchq.gaffer.commonutil.Required;
import uk.gov.gchq.gaffer.commonutil.ToStringBuilder;
import uk.gov.gchq.gaffer.commonutil.iterable.CloseableIterable;
import uk.gov.gchq.gaffer.operation.Operation;
import uk.gov.gchq.gaffer.operation.OperationChain;
import uk.gov.gchq.gaffer.operation.OperationChainDAO;
import uk.gov.gchq.gaffer.operation.Operations;
import uk.gov.gchq.gaffer.operation.io.GenericInput;
import uk.gov.gchq.gaffer.operation.io.InputOutput;
import uk.gov.gchq.gaffer.operation.serialisation.TypeReferenceImpl;
import uk.gov.gchq.koryphe.Since;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
* <p>
* An {@code FederatedOperationChain} holds an {@link OperationChain} that will
* be executed in one go on the federated graphs.
* </p>
*
* @param <O_ITEM> the output iterable type of the {@code FederatedOperationChain}.
**/
@JsonPropertyOrder(value = {"class", "operationChain", "options"}, alphabetic = true)
@Since("1.1.0")
public class FederatedOperationChain<I, O_ITEM> extends GenericInput<I>
implements InputOutput<I, CloseableIterable<O_ITEM>>,
Operations<OperationChain> {
@Required
private OperationChain operationChain;
private Map<String, String> options;
public FederatedOperationChain() {
this(new OperationChain());
}
public FederatedOperationChain(final Operation... operations) {
this(new OperationChain(operations));
}
public FederatedOperationChain(final OperationChain operationChain) {
setOperationChain(operationChain);
}
@JsonCreator
public FederatedOperationChain(@JsonProperty("operationChain") final OperationChainDAO operationChain,
@JsonProperty("options") final Map<String, String> options) {
this(operationChain);
setOptions(options);
}
@Override
public TypeReference<CloseableIterable<O_ITEM>> getOutputTypeReference() {
return (TypeReference) new TypeReferenceImpl.CloseableIterableObj();
}
public OperationChain getOperationChain() {
return operationChain;
}
@JsonGetter("operationChain")
OperationChainDAO getOperationChainDao() {
if (operationChain instanceof OperationChainDAO) {
return (OperationChainDAO) operationChain;
}
return new OperationChainDAO(operationChain);
}
@JsonIgnore
@Override
public List<OperationChain> getOperations() {
return Lists.newArrayList(operationChain);
}
public FederatedOperationChain<I, O_ITEM> shallowClone() throws CloneFailedException {
return new FederatedOperationChain.Builder<I, O_ITEM>()
.operationChain(operationChain.shallowClone())
.options(options)
.input(getInput())
.build();
}
@Override
public Map<String, String> getOptions() {
return options;
}
@Override
public void setOptions(final Map<String, String> options) {
this.options = options;
}
private void setOperationChain(final OperationChain operationChain) {
if (null == operationChain) {
throw new IllegalArgumentException("operationChain is required");
}
this.operationChain = operationChain;
}
@Override
public String toString() {
return new ToStringBuilder(this)
.append("input", getInput())
.append("operationChain", operationChain)
.append("options", options)
.build();
}
@Override
public void close() throws IOException {
operationChain.close();
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (null == obj || getClass() != obj.getClass()) {
return false;
}
final FederatedOperationChain<?, ?> federatedOperationChain = (FederatedOperationChain<?, ?>) obj;
return new EqualsBuilder()
.append(operationChain, federatedOperationChain.operationChain)
.append(options, federatedOperationChain.options)
.append(getInput(), federatedOperationChain.getInput())
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(13, 23)
.append(operationChain)
.append(options)
.append(getInput())
.toHashCode();
}
public static class Builder<I, O_ITEM> extends
Operation.BaseBuilder<FederatedOperationChain<I, O_ITEM>, Builder<I, O_ITEM>>
implements InputOutput.Builder<FederatedOperationChain<I, O_ITEM>, I, CloseableIterable<O_ITEM>, Builder<I, O_ITEM>> {
public Builder() {
super(new FederatedOperationChain<>(new OperationChain()));
}
public Builder<I, O_ITEM> operationChain(final OperationChain operationChain) {
_getOp().setOperationChain(operationChain);
return this;
}
}
}
| gh-1764 - Updated the javadoc
| store-implementation/federated-store/src/main/java/uk/gov/gchq/gaffer/federatedstore/operation/FederatedOperationChain.java | gh-1764 - Updated the javadoc |
|
Java | apache-2.0 | f80d96eae26690bd303ac76083d65e9515b6db60 | 0 | subutai-io/base,subutai-io/base,subutai-io/base,subutai-io/Subutai,subutai-io/Subutai,subutai-io/base,subutai-io/Subutai,subutai-io/Subutai,subutai-io/Subutai,subutai-io/Subutai | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.safehaus.kiskis.mgmt.impl.dbmanager;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.util.List;
import org.cassandraunit.CassandraCQLUnit;
import org.cassandraunit.dataset.cql.FileCQLDataSet;
import org.junit.After;
import org.junit.AfterClass;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.Rule;
import org.safehaus.kiskis.mgmt.api.dbmanager.DbManager;
/**
*
* @author dilshat
*/
@Ignore
public class DbManagerImplTest {
@Rule
public CassandraCQLUnit cassandraCQLUnit = new CassandraCQLUnit(new FileCQLDataSet("../../../../../keyspace/kiskis.sql", false, true, "kiskis"));
private static final Gson gson = new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create();
private final DbManager dbManager = new DbManagerImpl();
private final String source = "source";
private final String key = "key";
private final String content = "content";
public DbManagerImplTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
((DbManagerImpl) dbManager).setTestSession(cassandraCQLUnit.session);
}
@After
public void tearDown() {
dbManager.deleteInfo(source, key);
}
private static class MyPojo {
private String content;
public MyPojo(String test) {
this.content = test;
}
public String getTest() {
return content;
}
@Override
public int hashCode() {
int hash = 5;
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final MyPojo other = (MyPojo) obj;
if ((this.content == null) ? (other.content != null) : !this.content.equals(other.content)) {
return false;
}
return true;
}
@Override
public String toString() {
return "MyPojo{" + "content=" + content + '}';
}
}
@Test
public void testSaveInfo() {
assertTrue(dbManager.saveInfo(source, key, new MyPojo(content)));
}
@Test
public void testGetInfo() {
MyPojo myPojo = new MyPojo(content);
dbManager.saveInfo(source, key, myPojo);
MyPojo myPojo2 = dbManager.getInfo(source, key, MyPojo.class);
assertEquals(myPojo, myPojo2);
}
@Test
public void testGetInfoList() {
MyPojo myPojo = new MyPojo(content);
dbManager.saveInfo(source, key, myPojo);
List<MyPojo> list = dbManager.getInfo(source, MyPojo.class);
assertFalse(list.isEmpty());
}
@Test
public void testDeleteInfo() {
MyPojo myPojo = new MyPojo(content);
dbManager.saveInfo(source, key, myPojo);
dbManager.deleteInfo(source, key);
List<MyPojo> list = dbManager.getInfo(source, MyPojo.class);
assertTrue(list.isEmpty());
}
}
| management/server/core/db-manager/db-manager-impl/src/test/java/org/safehaus/kiskis/mgmt/impl/dbmanager/DbManagerImplTest.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.safehaus.kiskis.mgmt.impl.dbmanager;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.util.List;
import org.cassandraunit.CassandraCQLUnit;
import org.cassandraunit.dataset.cql.FileCQLDataSet;
import org.junit.After;
import org.junit.AfterClass;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.Rule;
import org.safehaus.kiskis.mgmt.api.dbmanager.DbManager;
/**
*
* @author dilshat
*/
public class DbManagerImplTest {
@Rule
public CassandraCQLUnit cassandraCQLUnit = new CassandraCQLUnit(new FileCQLDataSet("../../../../../keyspace/kiskis.sql", false, true, "kiskis"));
private static final Gson gson = new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create();
private final DbManager dbManager = new DbManagerImpl();
private final String source = "source";
private final String key = "key";
private final String content = "content";
public DbManagerImplTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
((DbManagerImpl) dbManager).setTestSession(cassandraCQLUnit.session);
}
@After
public void tearDown() {
dbManager.deleteInfo(source, key);
}
private static class MyPojo {
private String content;
public MyPojo(String test) {
this.content = test;
}
public String getTest() {
return content;
}
@Override
public int hashCode() {
int hash = 5;
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final MyPojo other = (MyPojo) obj;
if ((this.content == null) ? (other.content != null) : !this.content.equals(other.content)) {
return false;
}
return true;
}
@Override
public String toString() {
return "MyPojo{" + "content=" + content + '}';
}
}
@Test
public void testSaveInfo() {
assertTrue(dbManager.saveInfo(source, key, new MyPojo(content)));
}
@Test
public void testGetInfo() {
MyPojo myPojo = new MyPojo(content);
dbManager.saveInfo(source, key, myPojo);
MyPojo myPojo2 = dbManager.getInfo(source, key, MyPojo.class);
assertEquals(myPojo, myPojo2);
}
@Test
public void testGetInfoList() {
MyPojo myPojo = new MyPojo(content);
dbManager.saveInfo(source, key, myPojo);
List<MyPojo> list = dbManager.getInfo(source, MyPojo.class);
assertFalse(list.isEmpty());
}
@Test
public void testDeleteInfo() {
MyPojo myPojo = new MyPojo(content);
dbManager.saveInfo(source, key, myPojo);
dbManager.deleteInfo(source, key);
List<MyPojo> list = dbManager.getInfo(source, MyPojo.class);
assertTrue(list.isEmpty());
}
}
| Covering db manager with unit tests and comments
Former-commit-id: 3e84b4f81fa4729c7e5c8d985f03500f2b1636b2 | management/server/core/db-manager/db-manager-impl/src/test/java/org/safehaus/kiskis/mgmt/impl/dbmanager/DbManagerImplTest.java | Covering db manager with unit tests and comments |
|
Java | apache-2.0 | 5ae6fa35b6f1b56b31838f682f6ffe8fea759731 | 0 | atteo/classindex | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.atteo.classindex.processor;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.lang.annotation.Annotation;
import java.lang.annotation.Inherited;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Filer;
import javax.annotation.processing.Messager;
import javax.annotation.processing.ProcessingEnvironment;
import javax.annotation.processing.RoundEnvironment;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.Element;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.ElementScanner6;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
import javax.tools.Diagnostic;
import javax.tools.FileObject;
import javax.tools.StandardLocation;
import org.atteo.classindex.ClassIndex;
import org.atteo.classindex.IndexAnnotated;
import org.atteo.classindex.IndexSubclasses;
/**
* Generates index files for {@link ClassIndex}.
*/
public class ClassIndexProcessor extends AbstractProcessor {
private Map<String, Set<String>> subclassMap = new HashMap<>();
private Map<String, Set<String>> annotatedMap = new HashMap<>();
private Map<String, Set<String>> packageMap = new HashMap<>();
private boolean annotationDriven = true;
private Set<String> indexedAnnotations = new HashSet<>();
private Set<String> indexedSuperclasses = new HashSet<>();
private Set<String> indexedPackages = new HashSet<>();
private Set<TypeElement> javadocAlreadyStored = new HashSet<>();
private Types types;
private Filer filer;
private Elements elementUtils;
private Messager messager;
public ClassIndexProcessor() {
}
/**
* Used when creating subclasses of the processor which will index some annotations
* which cannot be itself annotated with {@link IndexAnnotated} or {@link IndexSubclasses}.
*
* @param classes list of classes which the processor will be indexing
*/
protected ClassIndexProcessor(Class<?>... classes) {
if (classes.length == 0) {
return;
}
annotationDriven = false;
for (Class<?> klass : classes) {
indexedAnnotations.add(klass.getCanonicalName());
}
}
/**
* Adds given annotations for indexing.
*/
protected final void indexAnnotations(Class<?>... classes) {
for (Class<?> klass : classes) {
indexedAnnotations.add(klass.getCanonicalName());
}
annotationDriven = false;
}
/**
* Adds given classes for subclass indexing.
*/
protected final void indexSubclasses(Class<?>... classes) {
for (Class<?> klass : classes) {
indexedSuperclasses.add(klass.getCanonicalName());
}
annotationDriven = false;
}
/**
* Adds given package for indexing.
*/
protected final void indexPackages(String... packages) {
Collections.addAll(indexedPackages, packages);
annotationDriven = false;
}
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.latest();
}
@Override
public Set<String> getSupportedAnnotationTypes() {
return Collections.singleton("*");
}
@Override
public synchronized void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
types = processingEnv.getTypeUtils();
filer = processingEnv.getFiler();
elementUtils = processingEnv.getElementUtils();
messager = processingEnv.getMessager();
}
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
try {
for (Element element : roundEnv.getRootElements()) {
if (!(element instanceof TypeElement)) {
continue;
}
final PackageElement packageElement = getPackage(element);
element.accept(new ElementScanner6<Void, Void>() {
@Override
public Void visitType(TypeElement typeElement, Void o) {
try {
for (AnnotationMirror mirror : typeElement.getAnnotationMirrors()) {
final TypeElement annotationElement = (TypeElement) mirror.getAnnotationType().asElement();
storeAnnotation(annotationElement, typeElement);
}
indexSupertypes(typeElement, typeElement);
if (packageElement != null) {
storeClassFromPackage(packageElement, typeElement);
}
} catch (IOException e) {
messager.printMessage(Diagnostic.Kind.ERROR, "[ClassIndexProcessor] " + e.getMessage());
}
return super.visitType(typeElement, o);
}
}, null);
}
if (!roundEnv.processingOver()) {
return false;
}
writeIndexFiles(ClassIndex.SUBCLASS_INDEX_PREFIX, subclassMap);
writeIndexFiles(ClassIndex.ANNOTATED_INDEX_PREFIX, annotatedMap);
for (Map.Entry<String, Set<String>> entry : packageMap.entrySet()) {
writeSimpleNameIndexFile(entry.getValue(), entry.getKey()
.replace(".", "/")
+ "/" + ClassIndex.PACKAGE_INDEX_NAME);
}
} catch (IOException e) {
messager.printMessage(Diagnostic.Kind.ERROR, "[ClassIndexProcessor] Can't write index file: " + e.getMessage());
} catch (Throwable e) {
e.printStackTrace();
messager.printMessage(Diagnostic.Kind.ERROR, "[ClassIndexProcessor] Internal error: " + e.getMessage());
}
return false;
}
private void writeIndexFiles(String prefix, Map<String, Set<String>> indexMap) throws IOException {
for (Map.Entry<String, Set<String>> entry : indexMap.entrySet()) {
writeSimpleNameIndexFile(entry.getValue(), prefix + entry.getKey());
}
}
private FileObject readOldIndexFile(Set<String> entries, String resourceName) throws IOException {
Reader reader = null;
try {
final FileObject resource = filer.getResource(StandardLocation.CLASS_OUTPUT, "", resourceName);
reader = resource.openReader(true);
readOldIndexFile(entries, reader);
return resource;
} catch (FileNotFoundException e) {
/**
* Ugly hack for Intellij IDEA incremental compilation.
* The problem is that it throws FileNotFoundException on the files, if they were not created during the
* current session of compilation.
*/
final String realPath = e.getMessage();
if (new File(realPath).exists()) {
try (Reader fileReader = new FileReader(realPath)) {
readOldIndexFile(entries, fileReader);
}
}
} catch (IOException e) {
// Thrown by Eclipse JDT when not found
} catch (UnsupportedOperationException e) {
// Java6 does not support reading old index files
} finally {
if (reader != null) {
reader.close();
}
}
return null;
}
private static void readOldIndexFile(Set<String> entries, Reader reader) throws IOException {
try (BufferedReader bufferedReader = new BufferedReader(reader)) {
String line = bufferedReader.readLine();
while (line != null) {
entries.add(line);
line = bufferedReader.readLine();
}
}
}
private void writeIndexFile(Set<String> entries, String resourceName, FileObject overrideFile) throws IOException {
FileObject file = overrideFile;
if (file == null) {
file = filer.createResource(StandardLocation.CLASS_OUTPUT, "", resourceName);
}
try (Writer writer = file.openWriter()) {
for (String entry : entries) {
writer.write(entry);
writer.write("\n");
}
}
}
private void writeSimpleNameIndexFile(Set<String> elementList, String resourceName)
throws IOException {
FileObject file = readOldIndexFile(elementList, resourceName);
if (file != null) {
/**
* Ugly hack for Eclipse JDT incremental compilation.
* Eclipse JDT can't createResource() after successful getResource().
* But we can file.openWriter().
*/
try {
writeIndexFile(elementList, resourceName, file);
return;
} catch (IllegalStateException e) {
// Thrown by HotSpot Java Compiler
}
}
writeIndexFile(elementList, resourceName, null);
}
private void writeFile(String content, String resourceName) throws IOException {
FileObject file = filer.createResource(StandardLocation.CLASS_OUTPUT, "", resourceName);
try (Writer writer = file.openWriter()) {
writer.write(content);
}
}
/**
* Index super types for {@link IndexSubclasses} and any {@link IndexAnnotated}
* additionally accompanied by {@link Inherited}.
*/
private void indexSupertypes(TypeElement rootElement, TypeElement element) throws IOException {
for (TypeMirror mirror : types.directSupertypes(element.asType())) {
if (mirror.getKind() != TypeKind.DECLARED) {
continue;
}
DeclaredType superType = (DeclaredType) mirror;
TypeElement superTypeElement = (TypeElement) superType.asElement();
storeSubclass(superTypeElement, rootElement);
for (AnnotationMirror annotationMirror : superTypeElement.getAnnotationMirrors()) {
TypeElement annotationElement = (TypeElement) annotationMirror.getAnnotationType()
.asElement();
if (hasAnnotation(annotationElement, Inherited.class)) {
storeAnnotation(annotationElement, rootElement);
}
}
indexSupertypes(rootElement, superTypeElement);
}
}
private boolean hasAnnotation(TypeElement element, Class<? extends Annotation> inheritedClass) {
try {
for (AnnotationMirror annotationMirror : element.getAnnotationMirrors()) {
if (annotationMirror.getAnnotationType().toString().equals(inheritedClass.getName())) {
return true;
}
}
} catch (RuntimeException e) {
if (!e.getClass().getName().equals("com.sun.tools.javac.code.Symbol$CompletionFailure")) {
messager.printMessage(Diagnostic.Kind.ERROR, "[ClassIndexProcessor] Can't check annotation: " + e.getMessage());
}
}
return false;
}
private void storeAnnotation(TypeElement annotationElement, TypeElement rootElement) throws IOException {
if (indexedAnnotations.contains(annotationElement.getQualifiedName().toString())) {
putElement(annotatedMap, annotationElement.getQualifiedName().toString(), rootElement);
} else if (annotationDriven) {
IndexAnnotated indexAnnotated = annotationElement.getAnnotation(IndexAnnotated.class);
if (indexAnnotated != null) {
putElement(annotatedMap, annotationElement.getQualifiedName().toString(), rootElement);
if (indexAnnotated.storeJavadoc()) {
storeJavadoc(rootElement);
}
}
}
}
private void storeSubclass(TypeElement superTypeElement, TypeElement rootElement) throws IOException {
if (indexedSuperclasses.contains(superTypeElement.getQualifiedName().toString())) {
putElement(subclassMap, superTypeElement.getQualifiedName().toString(), rootElement);
} else if (annotationDriven) {
IndexSubclasses indexSubclasses = superTypeElement.getAnnotation(IndexSubclasses.class);
if (indexSubclasses != null) {
putElement(subclassMap, superTypeElement.getQualifiedName().toString(), rootElement);
if (indexSubclasses.storeJavadoc()) {
storeJavadoc(rootElement);
}
}
}
if (indexedSuperclasses.contains(superTypeElement.getQualifiedName().toString())
|| (annotationDriven && superTypeElement.getAnnotation(IndexSubclasses.class) != null)) {
putElement(subclassMap, superTypeElement.getQualifiedName().toString(), rootElement);
}
}
private void storeClassFromPackage(PackageElement packageElement, TypeElement rootElement) throws IOException {
if (indexedPackages.contains(packageElement.getQualifiedName().toString())) {
putElement(packageMap, packageElement.getQualifiedName().toString(), rootElement);
} else if (annotationDriven) {
IndexSubclasses indexSubclasses = packageElement.getAnnotation(IndexSubclasses.class);
if (indexSubclasses != null) {
String simpleName = getShortName(rootElement);
if (simpleName != null) {
putElement(packageMap, packageElement.getQualifiedName().toString(), simpleName);
if (indexSubclasses.storeJavadoc()) {
storeJavadoc(rootElement);
}
}
}
}
}
private <K> void putElement(Map<K, Set<String>> map, K keyElement, TypeElement valueElement) {
final String fullName = getFullName(valueElement);
if (fullName != null) {
putElement(map, keyElement, fullName);
}
}
private <K> void putElement(Map<K, Set<String>> map, K keyElement, String valueElement) {
Set<String> set = map.get(keyElement);
if (set == null) {
set = new TreeSet<>();
map.put(keyElement, set);
}
set.add(valueElement);
}
private String getFullName(TypeElement typeElement) {
switch (typeElement.getNestingKind()) {
case TOP_LEVEL:
return typeElement.getQualifiedName().toString();
case MEMBER:
final Element enclosingElement = typeElement.getEnclosingElement();
if (enclosingElement instanceof TypeElement) {
final String enclosingName = getFullName(((TypeElement) enclosingElement));
if (enclosingName != null) {
return enclosingName + '$' + typeElement.getSimpleName().toString();
}
}
return null;
case ANONYMOUS:
case LOCAL:
default:
return null;
}
}
private String getShortName(TypeElement typeElement) {
switch (typeElement.getNestingKind()) {
case TOP_LEVEL:
return typeElement.getSimpleName().toString();
case MEMBER:
final Element enclosingElement = typeElement.getEnclosingElement();
if (enclosingElement instanceof TypeElement) {
final String enclosingName = getShortName(((TypeElement) enclosingElement));
if (enclosingName != null) {
return enclosingName + '$' + typeElement.getSimpleName().toString();
}
}
return null;
case ANONYMOUS:
case LOCAL:
default:
return null;
}
}
private PackageElement getPackage(Element typeElement) {
Element element = typeElement;
while (element != null) {
if (element instanceof PackageElement) {
return (PackageElement) element;
}
element = element.getEnclosingElement();
}
return null;
}
private void storeJavadoc(TypeElement element) throws IOException {
if (javadocAlreadyStored.contains(element)) {
return;
}
javadocAlreadyStored.add(element);
String docComment = elementUtils.getDocComment(element);
if (docComment == null) {
return;
}
writeFile(docComment, ClassIndex.JAVADOC_PREFIX + element.getQualifiedName().toString());
}
}
| classindex/src/main/java/org/atteo/classindex/processor/ClassIndexProcessor.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.atteo.classindex.processor;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.lang.annotation.Annotation;
import java.lang.annotation.Inherited;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Filer;
import javax.annotation.processing.Messager;
import javax.annotation.processing.ProcessingEnvironment;
import javax.annotation.processing.RoundEnvironment;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.Element;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.ElementScanner6;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
import javax.tools.Diagnostic;
import javax.tools.FileObject;
import javax.tools.StandardLocation;
import org.atteo.classindex.ClassIndex;
import org.atteo.classindex.IndexAnnotated;
import org.atteo.classindex.IndexSubclasses;
/**
* Generates index files for {@link ClassIndex}.
*/
public class ClassIndexProcessor extends AbstractProcessor {
private Map<String, Set<String>> subclassMap = new HashMap<>();
private Map<String, Set<String>> annotatedMap = new HashMap<>();
private Map<String, Set<String>> packageMap = new HashMap<>();
private boolean annotationDriven = true;
private Set<String> indexedAnnotations = new HashSet<>();
private Set<String> indexedSuperclasses = new HashSet<>();
private Set<String> indexedPackages = new HashSet<>();
private Set<TypeElement> javadocAlreadyStored = new HashSet<>();
private Types types;
private Filer filer;
private Elements elementUtils;
private Messager messager;
public ClassIndexProcessor() {
}
/**
* Used when creating subclasses of the processor which will index some annotations
* which cannot be itself annotated with {@link IndexAnnotated} or {@link IndexSubclasses}.
*
* @param classes list of classes which the processor will be indexing
*/
protected ClassIndexProcessor(Class<?>... classes) {
if (classes.length == 0) {
return;
}
annotationDriven = false;
for (Class<?> klass : classes) {
indexedAnnotations.add(klass.getCanonicalName());
}
}
/**
* Adds given annotations for indexing.
*/
protected final void indexAnnotations(Class<?>... classes) {
for (Class<?> klass : classes) {
indexedAnnotations.add(klass.getCanonicalName());
}
annotationDriven = false;
}
/**
* Adds given classes for subclass indexing.
*/
protected final void indexSubclasses(Class<?>... classes) {
for (Class<?> klass : classes) {
indexedSuperclasses.add(klass.getCanonicalName());
}
annotationDriven = false;
}
/**
* Adds given package for indexing.
*/
protected final void indexPackages(String... packages) {
Collections.addAll(indexedPackages, packages);
annotationDriven = false;
}
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.latest();
}
@Override
public Set<String> getSupportedAnnotationTypes() {
return Collections.singleton("*");
}
@Override
public synchronized void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
types = processingEnv.getTypeUtils();
filer = processingEnv.getFiler();
elementUtils = processingEnv.getElementUtils();
messager = processingEnv.getMessager();
}
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
try {
for (Element element : roundEnv.getRootElements()) {
if (!(element instanceof TypeElement)) {
continue;
}
final PackageElement packageElement = getPackage(element);
element.accept(new ElementScanner6<Void, Void>() {
@Override
public Void visitType(TypeElement typeElement, Void o) {
try {
for (AnnotationMirror mirror : typeElement.getAnnotationMirrors()) {
final TypeElement annotationElement = (TypeElement) mirror.getAnnotationType().asElement();
storeAnnotation(annotationElement, typeElement);
}
indexSupertypes(typeElement, typeElement);
if (packageElement != null) {
storeClassFromPackage(packageElement, typeElement);
}
} catch (IOException e) {
messager.printMessage(Diagnostic.Kind.ERROR, "[ClassIndexProcessor] " + e.getMessage());
}
return super.visitType(typeElement, o);
}
}, null);
}
if (!roundEnv.processingOver()) {
return false;
}
writeIndexFiles(ClassIndex.SUBCLASS_INDEX_PREFIX, subclassMap);
writeIndexFiles(ClassIndex.ANNOTATED_INDEX_PREFIX, annotatedMap);
for (Map.Entry<String, Set<String>> entry : packageMap.entrySet()) {
writeSimpleNameIndexFile(entry.getValue(), entry.getKey()
.replace(".", "/")
+ "/" + ClassIndex.PACKAGE_INDEX_NAME);
}
} catch (IOException e) {
messager.printMessage(Diagnostic.Kind.ERROR, "[ClassIndexProcessor] Can't write index file: " + e.getMessage());
} catch (Throwable e) {
e.printStackTrace();
messager.printMessage(Diagnostic.Kind.ERROR, "[ClassIndexProcessor] Internal error: " + e.getMessage());
}
return false;
}
private void writeIndexFiles(String prefix, Map<String, Set<String>> indexMap) throws IOException {
for (Map.Entry<String, Set<String>> entry : indexMap.entrySet()) {
writeSimpleNameIndexFile(entry.getValue(), prefix + entry.getKey());
}
}
private void readOldIndexFile(Set<String> entries, String resourceName) throws IOException {
Reader reader = null;
try {
final FileObject resource = filer.getResource(StandardLocation.CLASS_OUTPUT, "", resourceName);
reader = resource.openReader(true);
readOldIndexFile(entries, reader);
} catch (FileNotFoundException e) {
/**
* Ugly hack for Intellij IDEA incremental compilation.
* The problem is that it throws FileNotFoundException on the files, if they were not created during the
* current session of compilation.
*/
final String realPath = e.getMessage();
if (new File(realPath).exists()) {
try (Reader fileReader = new FileReader(realPath)) {
readOldIndexFile(entries, fileReader);
}
}
} catch (IOException e) {
// Thrown by Eclipse JDT when not found
} catch (UnsupportedOperationException e) {
// Java6 does not support reading old index files
} finally {
if (reader != null) {
reader.close();
}
}
}
private static void readOldIndexFile(Set<String> entries, Reader reader) throws IOException {
try (BufferedReader bufferedReader = new BufferedReader(reader)) {
String line = bufferedReader.readLine();
while (line != null) {
entries.add(line);
line = bufferedReader.readLine();
}
}
}
private void writeIndexFile(Set<String> entries, String resourceName) throws IOException {
FileObject file = filer.createResource(StandardLocation.CLASS_OUTPUT, "", resourceName);
try (Writer writer = file.openWriter()) {
for (String entry : entries) {
writer.write(entry);
writer.write("\n");
}
}
}
private void writeSimpleNameIndexFile(Set<String> elementList, String resourceName)
throws IOException {
readOldIndexFile(elementList, resourceName);
writeIndexFile(elementList, resourceName);
}
private void writeFile(String content, String resourceName) throws IOException {
FileObject file = filer.createResource(StandardLocation.CLASS_OUTPUT, "", resourceName);
try (Writer writer = file.openWriter()) {
writer.write(content);
}
}
/**
* Index super types for {@link IndexSubclasses} and any {@link IndexAnnotated}
* additionally accompanied by {@link Inherited}.
*/
private void indexSupertypes(TypeElement rootElement, TypeElement element) throws IOException {
for (TypeMirror mirror : types.directSupertypes(element.asType())) {
if (mirror.getKind() != TypeKind.DECLARED) {
continue;
}
DeclaredType superType = (DeclaredType) mirror;
TypeElement superTypeElement = (TypeElement) superType.asElement();
storeSubclass(superTypeElement, rootElement);
for (AnnotationMirror annotationMirror : superTypeElement.getAnnotationMirrors()) {
TypeElement annotationElement = (TypeElement) annotationMirror.getAnnotationType()
.asElement();
if (hasAnnotation(annotationElement, Inherited.class)) {
storeAnnotation(annotationElement, rootElement);
}
}
indexSupertypes(rootElement, superTypeElement);
}
}
private boolean hasAnnotation(TypeElement element, Class<? extends Annotation> inheritedClass) {
try {
for (AnnotationMirror annotationMirror : element.getAnnotationMirrors()) {
if (annotationMirror.getAnnotationType().toString().equals(inheritedClass.getName())) {
return true;
}
}
} catch (RuntimeException e) {
if (!e.getClass().getName().equals("com.sun.tools.javac.code.Symbol$CompletionFailure")) {
messager.printMessage(Diagnostic.Kind.ERROR, "[ClassIndexProcessor] Can't check annotation: " + e.getMessage());
}
}
return false;
}
private void storeAnnotation(TypeElement annotationElement, TypeElement rootElement) throws IOException {
if (indexedAnnotations.contains(annotationElement.getQualifiedName().toString())) {
putElement(annotatedMap, annotationElement.getQualifiedName().toString(), rootElement);
} else if (annotationDriven) {
IndexAnnotated indexAnnotated = annotationElement.getAnnotation(IndexAnnotated.class);
if (indexAnnotated != null) {
putElement(annotatedMap, annotationElement.getQualifiedName().toString(), rootElement);
if (indexAnnotated.storeJavadoc()) {
storeJavadoc(rootElement);
}
}
}
}
private void storeSubclass(TypeElement superTypeElement, TypeElement rootElement) throws IOException {
if (indexedSuperclasses.contains(superTypeElement.getQualifiedName().toString())) {
putElement(subclassMap, superTypeElement.getQualifiedName().toString(), rootElement);
} else if (annotationDriven) {
IndexSubclasses indexSubclasses = superTypeElement.getAnnotation(IndexSubclasses.class);
if (indexSubclasses != null) {
putElement(subclassMap, superTypeElement.getQualifiedName().toString(), rootElement);
if (indexSubclasses.storeJavadoc()) {
storeJavadoc(rootElement);
}
}
}
if (indexedSuperclasses.contains(superTypeElement.getQualifiedName().toString())
|| (annotationDriven && superTypeElement.getAnnotation(IndexSubclasses.class) != null)) {
putElement(subclassMap, superTypeElement.getQualifiedName().toString(), rootElement);
}
}
private void storeClassFromPackage(PackageElement packageElement, TypeElement rootElement) throws IOException {
if (indexedPackages.contains(packageElement.getQualifiedName().toString())) {
putElement(packageMap, packageElement.getQualifiedName().toString(), rootElement);
} else if (annotationDriven) {
IndexSubclasses indexSubclasses = packageElement.getAnnotation(IndexSubclasses.class);
if (indexSubclasses != null) {
String simpleName = getShortName(rootElement);
if (simpleName != null) {
putElement(packageMap, packageElement.getQualifiedName().toString(), simpleName);
if (indexSubclasses.storeJavadoc()) {
storeJavadoc(rootElement);
}
}
}
}
}
private <K> void putElement(Map<K, Set<String>> map, K keyElement, TypeElement valueElement) {
final String fullName = getFullName(valueElement);
if (fullName != null) {
putElement(map, keyElement, fullName);
}
}
private <K> void putElement(Map<K, Set<String>> map, K keyElement, String valueElement) {
Set<String> set = map.get(keyElement);
if (set == null) {
set = new TreeSet<>();
map.put(keyElement, set);
}
set.add(valueElement);
}
private String getFullName(TypeElement typeElement) {
switch (typeElement.getNestingKind()) {
case TOP_LEVEL:
return typeElement.getQualifiedName().toString();
case MEMBER:
final Element enclosingElement = typeElement.getEnclosingElement();
if (enclosingElement instanceof TypeElement) {
final String enclosingName = getFullName(((TypeElement) enclosingElement));
if (enclosingName != null) {
return enclosingName + '$' + typeElement.getSimpleName().toString();
}
}
return null;
case ANONYMOUS:
case LOCAL:
default:
return null;
}
}
private String getShortName(TypeElement typeElement) {
switch (typeElement.getNestingKind()) {
case TOP_LEVEL:
return typeElement.getSimpleName().toString();
case MEMBER:
final Element enclosingElement = typeElement.getEnclosingElement();
if (enclosingElement instanceof TypeElement) {
final String enclosingName = getShortName(((TypeElement) enclosingElement));
if (enclosingName != null) {
return enclosingName + '$' + typeElement.getSimpleName().toString();
}
}
return null;
case ANONYMOUS:
case LOCAL:
default:
return null;
}
}
private PackageElement getPackage(Element typeElement) {
Element element = typeElement;
while (element != null) {
if (element instanceof PackageElement) {
return (PackageElement) element;
}
element = element.getEnclosingElement();
}
return null;
}
private void storeJavadoc(TypeElement element) throws IOException {
if (javadocAlreadyStored.contains(element)) {
return;
}
javadocAlreadyStored.add(element);
String docComment = elementUtils.getDocComment(element);
if (docComment == null) {
return;
}
writeFile(docComment, ClassIndex.JAVADOC_PREFIX + element.getQualifiedName().toString());
}
}
| Fix Eclipse JDT incremental compilation
Example of error message:
```
[ant:iajc] [error] [ClassIndexProcessor] Can't write index file: createResource. Resource already created : CLASS_OUTPUT//META-INF/annotations/config.meta.ConfigRoot --- uri = file:/E:/Work/Project/build/classes/main/META-INF/annotations/config.meta.ConfigRoot
```
In Eclipse JDT we can't run createResource() for object loaded by getResource().
| classindex/src/main/java/org/atteo/classindex/processor/ClassIndexProcessor.java | Fix Eclipse JDT incremental compilation |
|
Java | bsd-2-clause | cfcf5f72fc4476d242edc1251302389445d69c90 | 0 | gagoel/freebase-java | package com.freebase.api.tests;
import static com.freebase.json.JSON.o;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
import com.freebase.api.Freebase;
import com.freebase.json.JSON;
public class Tests {
@Before public void setup() {
System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.SimpleLog");
System.setProperty("org.apache.commons.logging.simplelog.showdatetime", "true");
System.setProperty("org.apache.commons.logging.simplelog.log.org.apache.http", "debug");
System.setProperty("org.apache.commons.logging.simplelog.log.org.apache.http.wire", "debug");
}
@Test public void testMQLRead() {
Freebase freebase = Freebase.getFreebase();
JSON query = o("id",null,"limit",1);
JSON response = freebase.mqlread(query);
assertTrue("/user/root".equals(response.get("result").get("id").string()));
}
@Test public void testMQLReadWithCursors() {
Freebase freebase = Freebase.getFreebase();
JSON query = o("id",null,"limit",1);
JSON envelope = o("cursor",true);
JSON response = freebase.mqlread(query, envelope, null);
String cursor = response.get("cursor").string();
assertTrue(cursor != null);
}
@Test public void testSearch() {
Freebase freebase = Freebase.getFreebase();
JSON result = freebase.search("blade runner");
String id = result.get("result").get(0).get("id").string();
assertTrue(id.equals("/en/blade_runner"));
}
@Test public void testGetTopic() {
Freebase freebase = Freebase.getFreebase();
JSON topic = freebase.get_topic("/en/blade_runner",o("mode","basic"));
String name = topic.get("result").get("text").string();
assertTrue(name.equals("Blade Runner"));
}
@Test public void testMQLWrite() {
Freebase sandbox = Freebase.getFreebaseSandbox();
sandbox.sign_in("stefanomazzocchi2", "stefano");
String name = "test" + System.currentTimeMillis();
JSON query = o()
._("create","unless_exists")
._("name",name)
._("type","/common/topic")
._("id",null);
String id = sandbox.mqlwrite(query).get("result").get("id").string();
String name2 = sandbox.mqlread(JSON.o("id",id,"name",null)).get("result").get("name").string();
assertTrue(name.equals(name2));
}
@Test public void testUpload() {
Freebase sandbox = Freebase.getFreebaseSandbox();
sandbox.sign_in("stefanomazzocchi2", "stefano");
String content = "blah blah";
String media_type = "text/plain";
JSON result = sandbox.upload(content, media_type);
String id = result.get("result").get("id").string();
String retrieved_content = sandbox.get_blob(id);
assertTrue(content.equals(retrieved_content));
String retrieved_media_type = result.get("result").get("/type/content/media_type").string();
assertTrue(media_type.equals(retrieved_media_type));
}
}
| src/test/java/com/freebase/api/tests/Tests.java | package com.freebase.api.tests;
import static com.freebase.json.JSON.o;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
import com.freebase.api.Freebase;
import com.freebase.json.JSON;
public class Tests {
@Before public void setup() {
System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.SimpleLog");
System.setProperty("org.apache.commons.logging.simplelog.showdatetime", "true");
System.setProperty("org.apache.commons.logging.simplelog.log.org.apache.http", "debug");
System.setProperty("org.apache.commons.logging.simplelog.log.org.apache.http.wire", "debug");
}
@Test public void testMQLRead() {
Freebase freebase = Freebase.getFreebase();
JSON query = o("id",null,"limit",1);
JSON response = freebase.mqlread(query);
assertTrue("/user/root".equals(response.get("result").get("id").string()));
}
@Test public void testSearch() {
Freebase freebase = Freebase.getFreebase();
JSON result = freebase.search("blade runner");
String id = result.get("result").get(0).get("id").string();
assertTrue(id.equals("/en/blade_runner"));
}
@Test public void testGetTopic() {
Freebase freebase = Freebase.getFreebase();
JSON topic = freebase.get_topic("/en/blade_runner",o("mode","basic"));
String name = topic.get("result").get("text").string();
assertTrue(name.equals("Blade Runner"));
}
@Test public void testMQLWrite() {
Freebase sandbox = Freebase.getFreebaseSandbox();
sandbox.sign_in("stefanomazzocchi2", "stefano");
String name = "test" + System.currentTimeMillis();
JSON query = o()
._("create","unless_exists")
._("name",name)
._("type","/common/topic")
._("id",null);
String id = sandbox.mqlwrite(query).get("result").get("id").string();
String name2 = sandbox.mqlread(JSON.o("id",id,"name",null)).get("result").get("name").string();
assertTrue(name.equals(name2));
}
@Test public void testUpload() {
Freebase sandbox = Freebase.getFreebaseSandbox();
sandbox.sign_in("stefanomazzocchi2", "stefano");
String content = "blah blah";
String media_type = "text/plain";
JSON result = sandbox.upload(content, media_type);
String id = result.get("result").get("id").string();
String retrieved_content = sandbox.get_blob(id);
assertTrue(content.equals(retrieved_content));
String retrieved_media_type = result.get("result").get("/type/content/media_type").string();
assertTrue(media_type.equals(retrieved_media_type));
}
}
| adding a new test for mql with cursors
| src/test/java/com/freebase/api/tests/Tests.java | adding a new test for mql with cursors |
|
Java | mit | d3de193e94e1091a4426bc5d00c46e4540ab1d27 | 0 | nhochberger/Custos,nhochberger/Custos | package view;
import hochberger.utilities.application.ApplicationShutdownEvent;
import hochberger.utilities.application.session.BasicSession;
import hochberger.utilities.gui.ImageButton;
import hochberger.utilities.gui.UndecoratedEDTSafeFrame;
import hochberger.utilities.images.loader.ImageLoader;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import javax.swing.JFrame;
import modules.CustosModule;
import modules.VisibleCustosModule;
import net.miginfocom.swing.MigLayout;
import controller.SystemMessage;
import controller.SystemMessageMemory;
public class CustosMainFrame extends UndecoratedEDTSafeFrame {
private final List<VisibleCustosModule> modules;
private final ColorProvider colorProvider;
private final SystemMessageLabel systemMessageLabel;
private final SystemMessageDialog systemMessageDialog;
private final BasicSession session;
public CustosMainFrame(final BasicSession session, final ColorProvider colorProvider, final SystemMessageMemory messageMemory) {
super(session.getProperties().title());
this.session = session;
this.colorProvider = colorProvider;
this.modules = new CopyOnWriteArrayList<>();
this.systemMessageLabel = new SystemMessageLabel(colorProvider);
session.getEventBus().register(this.systemMessageLabel, SystemMessage.class);
this.systemMessageDialog = new SystemMessageDialog(colorProvider, messageMemory);
session.getEventBus().register(this.systemMessageDialog, SystemMessage.class);
}
@Override
protected void buildUI() {
frame().setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
frame().addWindowListener(new WindowAdapter() {
@Override
public void windowClosed(final WindowEvent e) {
CustosMainFrame.this.session.getEventBus().publishFromEDT(new ApplicationShutdownEvent());
}
});
center();
frame().getContentPane().setBackground(this.colorProvider.backgroundColor());
useLayoutManager(new MigLayout("wrap 3", ":push[400!, left]30![400!, center]30![400!, right]:push", "20![200!, top]30[200!, center]30[200!, bottom]push"));
frame().setAlwaysOnTop(true);
this.systemMessageLabel.build();
this.systemMessageDialog.build();
this.systemMessageLabel.getLabel().addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(final MouseEvent arg0) {
CustosMainFrame.this.systemMessageDialog.show();
}
});
final ImageButton closeApplicationButton = new ImageButton(ImageLoader.loadImage("close.png"));
closeApplicationButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
CustosMainFrame.this.session.getEventBus().publishFromEDT(new ApplicationShutdownEvent());
}
});
add(closeApplicationButton, "north");
for (final CustosModule module : this.modules) {
add(module.getWidget().getComponent(), module.getWidget().getLayoutConstraints());
}
add(this.systemMessageLabel.getLabel(), "dock south, gapleft 5, gapright 5, gapbottom 5");
maximize();
}
public void addModuleToView(final VisibleCustosModule module) {
this.modules.add(module);
}
public void update() {
if (!isBuilt()) {
return;
}
frame().getContentPane().setBackground(this.colorProvider.backgroundColor());
for (final VisibleCustosModule custosModule : this.modules) {
custosModule.updateWidget();
}
}
}
| src/view/CustosMainFrame.java | package view;
import hochberger.utilities.application.ApplicationShutdownEvent;
import hochberger.utilities.application.session.BasicSession;
import hochberger.utilities.gui.ImageButton;
import hochberger.utilities.gui.UndecoratedEDTSafeFrame;
import hochberger.utilities.images.loader.ImageLoader;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import javax.swing.JFrame;
import modules.CustosModule;
import modules.VisibleCustosModule;
import net.miginfocom.swing.MigLayout;
import controller.SystemMessage;
import controller.SystemMessageMemory;
public class CustosMainFrame extends UndecoratedEDTSafeFrame {
private final List<VisibleCustosModule> modules;
private final ColorProvider colorProvider;
private final SystemMessageLabel systemMessageLabel;
private final SystemMessageDialog systemMessageDialog;
private final BasicSession session;
public CustosMainFrame(final BasicSession session, final ColorProvider colorProvider, final SystemMessageMemory messageMemory) {
super(session.getProperties().title());
this.session = session;
this.colorProvider = colorProvider;
this.modules = new CopyOnWriteArrayList<>();
this.systemMessageLabel = new SystemMessageLabel(colorProvider);
session.getEventBus().register(this.systemMessageLabel, SystemMessage.class);
this.systemMessageDialog = new SystemMessageDialog(colorProvider, messageMemory);
session.getEventBus().register(this.systemMessageDialog, SystemMessage.class);
}
@Override
protected void buildUI() {
frame().setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
frame().addWindowListener(new WindowAdapter() {
@Override
public void windowClosed(final WindowEvent e) {
CustosMainFrame.this.session.getEventBus().publishFromEDT(new ApplicationShutdownEvent());
}
});
center();
frame().getContentPane().setBackground(this.colorProvider.backgroundColor());
useLayoutManager(new MigLayout("debug, wrap 3", ":push[400!, left]30![400!, center]30![400!, right]:push", "20![200!, top]30[200!, center]30[200!, bottom]push"));
frame().setAlwaysOnTop(true);
this.systemMessageLabel.build();
this.systemMessageDialog.build();
this.systemMessageLabel.getLabel().addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(final MouseEvent arg0) {
CustosMainFrame.this.systemMessageDialog.show();
}
});
final ImageButton closeApplicationButton = new ImageButton(ImageLoader.loadImage("close.png"));
closeApplicationButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
CustosMainFrame.this.session.getEventBus().publishFromEDT(new ApplicationShutdownEvent());
}
});
add(closeApplicationButton, "north");
for (final CustosModule module : this.modules) {
add(module.getWidget().getComponent(), module.getWidget().getLayoutConstraints());
}
add(this.systemMessageLabel.getLabel(), "dock south, gapleft 5, gapright 5, gapbottom 5");
maximize();
}
public void addModuleToView(final VisibleCustosModule module) {
this.modules.add(module);
}
public void update() {
if (!isBuilt()) {
return;
}
frame().getContentPane().setBackground(this.colorProvider.backgroundColor());
for (final VisibleCustosModule custosModule : this.modules) {
custosModule.updateWidget();
}
}
}
| fixes #31 | src/view/CustosMainFrame.java | fixes #31 |
|
Java | mit | 3bb7e7d325f5b484569f6a76b7d651b1d3f77c41 | 0 | Ano-Tech-Computers/TelePlugin | package no.atc.floyd.bukkit.tele;
import org.bukkit.entity.Player;
import org.bukkit.GameMode;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.Server;
import org.bukkit.World;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerLoginEvent;
import org.bukkit.event.player.PlayerMoveEvent;
import org.bukkit.event.player.PlayerQuitEvent;
import org.bukkit.event.player.PlayerTeleportEvent;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.PluginManager;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.bukkit.block.Block;
import org.bukkit.command.*;
import com.sk89q.worldguard.bukkit.WorldGuardPlugin;
import com.sk89q.worldguard.protection.flags.DefaultFlag;
import com.sk89q.worldguard.protection.managers.RegionManager;
import com.sk89q.worldguard.protection.ApplicableRegionSet;
/**
* TelePlugin plugin for Bukkit
*
* @author FloydATC
*/
public class TelePlugin extends JavaPlugin implements Listener {
//public static Permissions Permissions = null;
private File req_dir = new File(this.getDataFolder(), "requests");
private File loc_dir = new File(this.getDataFolder(), "locations");
private File warp_dir = new File(this.getDataFolder(), "warps");
private final long cooldown = 86400 * 1000; // Milliseconds
private ConcurrentHashMap<String,ConcurrentHashMap<Integer,Location>> locs = new ConcurrentHashMap<String,ConcurrentHashMap<Integer,Location>>();
private Integer max_tpback = 1440; // Number of MINUTES to keep
WorldGuardPlugin worldguard = null;
public void onDisable() {
for (String pname : locs.keySet()) {
saveLocations(pname);
}
}
public void onEnable() {
// Set up directory for request, denial and permission tokens.
// Clear out any stale files (i.e. older than cooldown)
if (req_dir.exists() == false) { req_dir.mkdirs(); }
File[] files = req_dir.listFiles();
long now = System.currentTimeMillis();
for (int i = 0; i < files.length; i++) {
if (files[i].lastModified() + cooldown < now) {
files[i].delete();
}
}
// Set up directory for player location data (used for /tpback)
if (loc_dir.exists() == false) { loc_dir.mkdirs(); }
// WorldGuard integration
Plugin wg = getServer().getPluginManager().getPlugin("WorldGuard");
if (wg == null || !(wg instanceof WorldGuardPlugin)) {
getLogger().info("WorldGuard not loaded, will not detect PVP regions");
} else {
worldguard = (WorldGuardPlugin) wg;
getLogger().info("Using WorldGuard to detect PVP regions");
}
// Register event handlers
PluginManager pm = getServer().getPluginManager();
pm.registerEvents(this, this);
}
public boolean onCommand(CommandSender sender, Command cmd, String commandLabel, String[] args ) {
String cmdname = cmd.getName().toLowerCase();
Player player = null;
if (sender instanceof Player) {
player = (Player) sender;
//getLogger().info("player="+player+" cmd="+cmdname+" args="+Arrays.toString(args));
}
// See if any options were specified
Boolean force = false;
Integer options = numOptions(args);
if (options > 0) {
String[] revised_args = new String[args.length - options];
Integer index = 0;
for (String s: args) {
if (s.startsWith("--")) {
// Supported options go here
if (s.equalsIgnoreCase("--force")) {
force = true;
}
} else {
revised_args[index] = s;
index++;
}
}
getLogger().info("done revising argument list");
args = revised_args;
}
if (cmdname.equalsIgnoreCase("tp") && player != null && player.hasPermission("teleplugin.tp")) {
if (args.length == 0 || args.length > 3) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tp <player>");
player.sendMessage("§7[§6TP§7]§b /tp <x> <z>");
player.sendMessage("§7[§6TP§7]§b /tp <x> <z> <world>");
return true;
}
if (args.length == 1) {
// Teleport to player
if (teleport(player.getName(), args[0], force)) {
getLogger().info(player.getName() + " teleported to " + args[0] );
} else {
player.sendMessage("§7[§6TP§7]§c Could not teleport you to §6" + args[0]);
getLogger().info(player.getName() + " could not teleport to " + args[0] );
}
return true;
}
if (args.length == 2) {
// Teleport to specific coordinates in current world at maximum height
Location loc = player.getLocation();
Integer x = Integer.parseInt(args[0]);
Integer z = Integer.parseInt(args[1]);
Integer y = loc.getWorld().getHighestBlockYAt(x, z);
loc.setX(x);
loc.setZ(z);
loc.setY(y);
if (safeTeleport(player, loc, force)) {
player.sendMessage("§7[§6TP§7]§b Teleported you to coordinates x=" +x+ " z="+z);
getLogger().info(player.getName() + " teleported to coordinates x=" +x+ " z="+z);
} else {
player.sendMessage("§7[§6TP§7]§c Could not teleport you to coordinates x="+x+" z="+z);
getLogger().info(player.getName() + " could not teleport to coordinates x="+x+" z="+z);
}
return true;
}
if (args.length == 3) {
// Teleport to specific coordinates in alternate world at maximum height
Location loc = player.getLocation();
Integer x = Integer.parseInt(args[0]);
Integer z = Integer.parseInt(args[1]);
World w = player.getServer().getWorld(args[2]);
if (w == null) {
player.sendMessage("§7[§6TP§7]§c There is no world called " + args[2]);
return true;
}
Integer y = w.getHighestBlockYAt(x, z);
loc.setX(x);
loc.setZ(z);
loc.setY(y);
loc.setWorld(w);
if (safeTeleport(player, loc, force)) {
player.sendMessage("§7[§6TP§7]§b Teleported you to coordinates x=" +x+ " z="+z+ " in world "+w.getName());
getLogger().info(player.getName() + " teleported to oordinates x=" +x+ " z="+z+ " in world "+w.getName());
} else {
player.sendMessage("§7[§6TP§7]§c Could not teleport you to oordinates x=" +x+ " z="+z+ " in world "+w.getName());
getLogger().info(player.getName() + " could not teleport to oordinates x=" +x+ " z="+z+ " in world "+w.getName());
}
return true;
}
}
if (cmdname.equalsIgnoreCase("tphelp") && player != null) {
player.sendMessage("§7[§6TP§7]§b Personal teleportation commands: §c[EXPERIMENTAL]");
player.sendMessage("§7[§6TP§7]§6 /tpa <name> §bRequest teleport to player");
player.sendMessage("§7[§6TP§7]§6 /tpy <name> §bGrant teleport access");
player.sendMessage("§7[§6TP§7]§6 /tpn <name> §bDeny teleport access");
player.sendMessage("§7[§6TP§7]§b Access is granted for 24 hours or until denied");
player.sendMessage("§7[§6TP§7]§b Repeated requests/grants/denials are ignored");
if (player != null && player.hasPermission("teleplugin.tpa") == false) {
player.sendMessage("§7[§6TP§7]§c You do not yet have permission to use this feature");
}
return true;
}
if (cmdname.equalsIgnoreCase("tpa") && player != null && player.hasPermission("teleplugin.tpa")) {
force = false; // Disallowed
if (args.length == 0 || args.length > 1) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tpa <player>");
return true;
}
if (args.length == 1) {
Player target = null;
if (args.length == 1) {
target = this.getServer().getPlayer(args[0]);
if (target != null) {
args[0] = target.getName();
}
}
if (target != null && target.hasPermission("teleplugin.tpa") == false) {
player.sendMessage("§7[§6TP§7]§b §6" + args[0] + "§c does not yet have permission to use this feature");
return true;
}
if (has_denial(player.getName(), args[0])) {
player.sendMessage("§7[§6TP§7]§b §6" + args[0] + "§c has denied you teleport permission.");
return true;
}
if (has_permission(player.getName(), args[0])) {
if (teleport(player.getName(), args[0], force)) {
getLogger().info(player.getName() + " teleported to " + args[0] );
cancel_request(player.getName(), args[0]);
} else {
player.sendMessage("§7[§6TP§7]§c Could not teleport you to §6" + args[0]);
getLogger().info(player.getName() + " could not teleport to " + args[0] );
}
} else {
if (request_permission(player.getName(), args[0])) {
player.sendMessage("§7[§6TP§7]§b Requested teleport to §6" + args[0]);
} else {
player.sendMessage("§7[§6TP§7]§c Could not request teleport to §6" + args[0] + "§c at this time");
}
}
return true;
}
}
if (cmdname.equalsIgnoreCase("tpy") && player != null && player.hasPermission("teleplugin.tpa")) {
force = false; // Disallowed
Player target = null;
if (args.length == 0 || args.length > 1) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tpy <player>");
return true;
}
if (args.length == 1) {
target = this.getServer().getPlayer(args[0]);
if (target != null) {
args[0] = target.getName();
}
}
if (args.length == 1) {
if (target != null && target.hasPermission("teleplugin.tpa") == false) {
player.sendMessage("§7[§6TP§7]§b §6" + args[0] + "§c does not yet have permission to use this feature");
return true;
}
if (grant_permission(player.getName(), args[0])) {
player.sendMessage("§7[§6TP§7]§b Teleport permission granted to §6" + args[0]);
} else {
player.sendMessage("§7[§6TP§7]§c Could not grant teleport permission to §6" + args[0] + "§c at this time");
}
if (has_request(args[0], player.getName()) && has_permission(args[0], player.getName())) {
if (teleport(args[0], player.getName(), force)) {
getLogger().info(args[0] + " teleported to " + player.getName() );
cancel_request(args[0], player.getName());
}
}
return true;
}
}
if (cmdname.equalsIgnoreCase("tpn") && player != null && player.hasPermission("teleplugin.tpa")) {
if (args.length == 0 || args.length > 1) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tpn <player>");
return true;
}
if (args.length == 1) {
if (deny_permission(player.getName(), args[0])) {
player.sendMessage("§7[§6TP§7]§b Teleport permission denied to §6" + args[0]);
} else {
player.sendMessage("§7[§6TP§7]§c Could not deny teleport permission to §6" + args[0] + "§c at this time");
}
return true;
}
}
if (cmdname.equalsIgnoreCase("tphere") && player != null && player.hasPermission("teleplugin.tphere")) {
if (args.length == 0) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tphere <player> [<player> ...]");
return true;
}
if (args.length >= 1) {
for (String subject : args) {
if (teleport(subject, player.getName(), force)) {
getLogger().info(player.getName() + " teleported " + args[0] + " to self");
player.sendMessage("§7[§6TP§7]§b Teleported §6" + args[0] + "§b to you");
} else {
player.sendMessage("§7[§6TP§7]§c Could not teleport §6" + args[0] + "§c to you");
}
}
return true;
}
}
if (cmdname.equalsIgnoreCase("tpto") && (player == null || player.hasPermission("teleplugin.tpto"))) {
if (args.length < 2) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tpto <to_player> <player> [<player> ...]");
return true;
}
if (args.length >= 2) {
String performer = "(Server)";
if (player != null) {
performer = player.getName();
}
String destination = args[args.length-1];
for (Integer i=0; i<args.length-1; i++) {
if (teleport(args[i], destination, force)) {
getLogger().info(performer + " teleported " + args[i] + " to " + destination);
if (player != null) {
player.sendMessage("§7[§6TP§7]§b Teleported §6" + args[i] + "§b to §6" + destination);
}
} else {
getLogger().info("Could not teleport " + args[i] + " to " + destination);
if (player != null) {
player.sendMessage("§7[§6TP§7]§c Could not teleport §6" + args[i] + "§c to §6" + destination);
}
}
}
return true;
}
}
/* if (cmdname.equalsIgnoreCase("spawn")) {
if (args.length > 0) {
// Spawn others
if (player == null || player.hasPermission("teleplugin.spawn.other")) {
String admin = "Server";
if (player != null) {
admin = player.getName();
}
for (String pname : args) {
Player p = getServer().getPlayer(pname);
if (p != null) {
p.teleport(p.getWorld().getSpawnLocation());
p.sendMessage("§7[§6TP§7]§b You were respawned by " + admin);
getLogger().info("Player " + pname + " was respawned by " + admin);
}
}
} else {
player.sendMessage("§7[§6TP§7]§c You do not have permission to spawn other players");
}
} else {
// Spawn self
if (player != null && player.hasPermission("teleplugin.spawn.self")) {
player.teleport(player.getWorld().getSpawnLocation());
player.sendMessage("§7[§6TP§7]§b You were respawned");
getLogger().info("Player " + player.getName() + " respawned");
} else {
player.sendMessage("§7[§6TP§7]§c Spawn who?");
}
}
}
*/ if (cmdname.equalsIgnoreCase("tpback") && player != null && player.hasPermission("teleplugin.tpback")) {
if (args.length == 0 || args.length > 2) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tpback <minutes> [<player>]");
player.sendMessage("§7[§6TP§7]§b /tpback <hh>:<mm> [<player>]");
return true;
}
if (args.length == 1) {
// Get delta
Integer delta = 1;
// Is this a time in HH:MM format? Convert to delta
if (args[0].matches("^[0-2][0-9]:[0-5][0-9]$")) {
args[0] = time_to_delta(args[0]);
}
// The argument should now be an integer
try {
delta = Integer.valueOf(args[0]);
}
catch (Exception e) {
player.sendMessage("§7[§6TP§7]§c Expected a number");
//e.printStackTrace();
return false;
}
// Teleport back to own location
Location loc = getLocation(player.getName(), delta);
if (loc == null) {
player.sendMessage("§7[§6TP§7]§c No location recorded "+delta+" minute"+(delta==1?"":"s")+" ago");
Integer oldest = getOldestDelta(player.getName());
if (oldest != null) {
player.sendMessage("§7[§6TP§7]§c The earliest location is "+oldest+" minute"+(oldest==1?"":"s")+" old");
}
} else {
if (safeTeleport(player, loc, force)) {
getLogger().info(player.getName() + " teleported "+delta+" minute"+(delta==1?"":"s")+" back");
player.sendMessage("§7[§6TP§7]§b Teleported you "+delta+" minute"+(delta==1?"":"s")+" back");
} else {
player.sendMessage("§7[§6TP§7]§c Teleport to "+delta+" minute"+(delta==1?"":"s")+" ago failed");
}
}
return true;
}
if (args.length == 2 && player.hasPermission("teleplugin.tpback.other")) {
// Is this a time in HH:MM format? Convert to delta
if (args[0].matches("^[0-2][0-9]:[0-5][0-9]$")) {
args[0] = time_to_delta(args[0]);
}
// Get delta
Integer delta = 1;
try {
delta = Integer.valueOf(args[0]);
}
catch (Exception e) {
player.sendMessage("§7[§6TP§7]§c Expected a number");
//e.printStackTrace();
return false;
}
// Get player name
Player p = null;
String pname = args[1];
p = player.getServer().getPlayer(pname);
if (p != null) {
pname = p.getName();
}
// Teleport back to player's location
Location loc = getLocation(pname, delta);
if (loc == null) {
player.sendMessage("§7[§6TP§7]§c No location recorded for §6"+pname+"§c "+delta+" minute"+(delta==1?"":"s")+" ago");
Integer oldest = getOldestDelta(pname);
if (oldest != null) {
player.sendMessage("§7[§6TP§7]§c The earliest location for §6"+pname+"§c is "+oldest+" minute"+(oldest==1?"":"s")+" old");
}
} else {
if (safeTeleport(player, loc, force)) {
getLogger().info(player.getName() + " teleported to where "+pname+" was "+delta+" minute"+(delta==1?"":"s")+" ago");
player.sendMessage("§7[§6TP§7]§b Teleported you to where §6"+pname+"§c was "+delta+" minute"+(delta==1?"":"s")+" ago");
} else {
player.sendMessage("§7[§6TP§7]§c Teleport to where §6"+pname+"§c was "+delta+" minute"+(delta==1?"":"s")+" ago failed");
}
}
return true;
}
}
if (cmdname.equalsIgnoreCase("warp")) {
if (args.length == 0) {
respond(player, "§7[§6TP§7]§b Syntax:");
respond(player, "§7[§6TP§7]§b /warp <place> [--force|<player> [...]]");
return true;
}
Warp w = new Warp(args[0], player, this);
if (w.exists()) {
if (w.usePermitted(player)) {
if (args.length == 1) {
// Warp self
if (player == null) {
respond(player, "§7[§6TP§7]§c Must specify a player from the console");
} else {
if (safeTeleport(player, w.location(), force)) {
getLogger().info(player.getName() + " warped to "+args[0]);
respond(player, "§7[§6TP§7]§b Warped you to "+args[0]);
w.touch();
} else {
respond(player, "§7[§6TP§7]§c Warp to "+args[0]+" failed");
}
}
} else {
// Warp others
if (player.hasPermission("teleplugin.warpother")) {
for (Integer i = 1; i < args.length; i++) {
Player p = getServer().getPlayer(args[i]);
if (p != null) {
if (safeTeleport(p, w.location(), false)) {
getLogger().info(player.getName() + " warped "+p.getName()+" to "+args[0]);
respond(player, "§7[§6TP§7]§b Warped "+p.getName()+" to "+args[0]);
respond(p, "§7[§6TP§7]§b "+player.getName()+" warped you to "+w.name());
w.touch();
} else {
respond(player, "§7[§6TP§7]§c "+p.getName()+" was not warped to "+args[1]);
}
} else {
respond(player, "§7[§6TP§7]§c Player "+args[i]+" is not online");
}
}
} else {
getLogger().info(player.getName() + " warp others to "+args[0]+" denied");
respond(player, "§7[§6TP§7]§c You don't have permission to warp other players");
}
}
} else {
getLogger().info(player.getName() + " warp to "+args[0]+" denied");
respond(player, "§7[§6TP§7]§c You don't have permission to use warp point '"+args[0]+"'");
}
} else {
respond(player, "§7[§6TP§7]§c Warp point '"+args[0]+"' not found");
}
return true;
}
if (cmdname.equalsIgnoreCase("setwarp")) {
if (args.length == 0 || args.length > 1) {
respond(player, "§7[§6TP§7]§b Syntax:");
respond(player, "§7[§6TP§7]§b /setwarp <place>");
return true;
}
if (player == null) {
respond(player, "§7[§6TP§7]§c You can't set a warp point from the console");
return true;
}
Warp w = new Warp(args[0], player, this);
if (w.exists()) {
respond(player, "§7[§6TP§7]§c Warp point '"+args[0]+"' already exists");
} else {
if (w.createPermitted(player)) {
if (w.create()) {
getLogger().info(player.getName()+" created warp point "+args[0]);
respond(player, "§7[§6TP§7]§b Warp point '"+args[0]+"' was created");
} else {
getLogger().warning("Error creating warp point '"+args[0]+"': "+w.error());
respond(player, "§7[§6TP§7]§c Internal error: "+w.error());
}
} else {
getLogger().warning(player.getName()+" was not permitted to create warp point "+args[0]);
respond(player, "§7[§6TP§7]§c You don't have permission to create warp point '"+args[0]+"'");
}
}
return true;
}
if (cmdname.equalsIgnoreCase("delwarp")) {
if (args.length == 0 || args.length > 1) {
respond(player, "§7[§6TP§7]§b Syntax:");
respond(player, "§7[§6TP§7]§b /delwarp <place>");
return true;
}
Warp w = new Warp(args[0], player, this);
if (w.exists()) {
if (w.deletePermitted(player)) {
w.delete();
getLogger().info(player.getName()+" deleted warp point "+args[0]);
respond(player, "§7[§6TP§7]§b Warp point '"+args[0]+"' has now been deleted");
} else {
getLogger().warning(player.getName()+" was not permitted to delete warp point "+args[0]);
respond(player, "§7[§6TP§7]§c You don't have permission to delete warp point '"+args[0]+"'");
}
} else {
respond(player, "§7[§6TP§7]§c Warp point '"+args[0]+"' does not exist");
}
return true;
}
if (cmdname.equalsIgnoreCase("movewarp")) {
if (args.length == 0 || args.length > 1) {
respond(player, "§7[§6TP§7]§b Syntax:");
respond(player, "§7[§6TP§7]§b /movewarp <place>");
return true;
}
if (player == null) {
respond(player, "§7[§6TP§7]§c You can't move a warp point from the console");
return true;
}
Warp w = new Warp(args[0], player, this);
if (w.exists()) {
if (w.movePermitted(player)) {
if (w.delete() && w.create()) {
getLogger().info(player.getName()+" moved warp point "+args[0]);
respond(player, "§7[§6TP§7]§b Warp point '"+args[0]+"' was moved");
} else {
getLogger().warning("Error moving warp point"+args[0]+": "+w.error());
respond(player, "§7[§6TP§7]§c Internal error: "+w.error());
}
} else {
getLogger().warning(player.getName()+" was not permitted to move warp point "+args[0]);
respond(player, "§7[§6TP§7]§c You don't have permission to move warp point '"+args[0]+"'");
}
} else {
respond(player, "§7[§6TP§7]§c Warp point '"+args[0]+"' does not exist");
}
return true;
}
if (cmdname.equalsIgnoreCase("listwarps"))
{
if (args.length > 2)
{
respond(player, "§7[§6TP§7]§b Syntax:");
respond(player, "§7[§6TP§7]§b /listwarps [<player>] [<page>]");
return true;
}
// Parse arguments
String owner = null;
int page = 0;
if (args.length >= 1)
{
if (Character.isDigit(args[0].charAt(0)))
{
try {
page = Integer.parseInt(args[0]) - 1;
}
catch (NumberFormatException e) {
player.sendMessage("§7[§6TP§7]§c Expected a number");
//e.printStackTrace();
return false;
}
page = Integer.parseInt(args[0]);
}
else if (args[0].equals("."))
{
owner = player.getName();
}
else
{
owner = args[0];
}
}
if (page < 0) page = 0;
// Sanitize owner name
if (owner != null)
{
Pattern pattern = Pattern.compile("[^\\w@\\.\\'\\-]");
Matcher matcher = pattern.matcher(owner);
matcher.replaceAll("");
}
// Check permissions
if (owner == null)
{
if (!player.hasPermission("teleplugin.listwarps.global"))
{
getLogger().warning(player.getName() + " was not permitted to list global warp points");
respond(player, "§7[§6TP§7]§c You don't have permission to list these warp points");
}
}
else if (owner.equalsIgnoreCase(player.getName()))
{
if (!player.hasPermission("teleplugin.listwarps.own"))
{
getLogger().warning(player.getName() + " was not permitted to list own warp points");
respond(player, "§7[§6TP§7]§c You don't have permission to list these warp points");
}
}
else
{
if (!player.hasPermission("teleplugin.listwarps.other"))
{
getLogger().warning(player.getName() + " was not permitted to list " + owner + "'s warp points");
respond(player, "§7[§6TP§7]§c You don't have permission to list these warp points");
}
}
File dir = new File(this.getDataFolder(), "warps");
if (owner != null)
dir = new File(dir, owner);
// List all files ending with ".loc"
File[] warps = dir.listFiles(new FilenameFilter()
{
@Override
public boolean accept(File dir, String name)
{
return new File(dir, name).isFile() && name.toLowerCase().endsWith(".loc");
}
});
/* warps is null if the warps/ directory does not exist (e.g. if no warps have been created yet);
* to avoid NullPointerException, warps is instantiated as an empty array */
if (warps == null)
warps = new File[0];
int pageCount = (int) Math.ceil(warps.length/10f);
respond(player, "§7[§6TP§7]§b Warps (page " + (page + 1) + " of " + pageCount + "):");
int startIndex = page * 10;
for (int i = startIndex; i < startIndex + 10 && i < warps.length; i++)
respond(player, "§7[§6TP§7]§9 " + warps[i].getName());
return true;
}
return false;
}
@EventHandler
public boolean onLogin( PlayerLoginEvent event ) {
Player player = event.getPlayer();
registerLocation(player.getName(), player.getLocation());
return true;
}
@EventHandler
public boolean onQuit( PlayerQuitEvent event ) {
// Save and unload locations from memory
String pname = event.getPlayer().getName();
registerLocation(pname, event.getPlayer().getLocation());
saveLocations(pname);
locs.remove(pname);
return true;
}
@EventHandler
public boolean onMove( PlayerMoveEvent event ) {
registerLocation(event.getPlayer().getName(), event.getFrom());
return true;
}
@EventHandler(priority = EventPriority.LOW) // Must process before CreativeControl
public void onTeleport( PlayerTeleportEvent event ) {
Player player = event.getPlayer();
String pname = player.getName();
registerLocation(pname, event.getFrom());
String from_world = event.getFrom().getWorld().getName().toLowerCase();
String to_world = event.getTo().getWorld().getName().toLowerCase();
getLogger().fine("DEBUG: " + pname + " teleporting from '" + from_world + "' to '" + to_world + "'");
if (from_world.equals(to_world)) {
return; // Teleporting within the same world always permitted
} else {
if (player.hasPermission("teleplugin.creative."+to_world)) {
// Switch to CREATIVE mode
player.setGameMode(GameMode.CREATIVE);
} else {
// Switch to SURVIVAL mode
player.setGameMode(GameMode.SURVIVAL);
}
}
if (event.getCause() == PlayerTeleportEvent.TeleportCause.NETHER_PORTAL) {
getLogger().info(pname + " used a Nether portal");
return; // Allow game mechanic
}
if (event.getCause() == PlayerTeleportEvent.TeleportCause.END_PORTAL) {
getLogger().info(pname + " used a The End portal");
return; // Allow game mechanic
}
if (player.hasPermission("teleplugin.enter.any") == false && player.hasPermission("teleplugin.enter." + to_world) == false) {
getLogger().info(pname + " was denied access to enter world '" + to_world + "'");
player.sendMessage("§7[§6TP§7]§c You are not allowed to enter " + to_world + "§c this way");
event.setCancelled(true);
return;
}
if (player.hasPermission("teleplugin.leave.any") == false && player.hasPermission("teleplugin.leave." + from_world) == false) {
getLogger().info(pname + " was denied access to leave world '" + from_world + "'");
player.sendMessage("§7[§6TP§7]§c You are not allowed to leave " + to_world + "§c this way");
event.setCancelled(true);
return;
}
return;
}
private boolean teleport(String subject, String destination, Boolean force) {
Player subj = getServer().getPlayer(subject);
Player dest = getServer().getPlayer(destination);
if (subject.equals(destination)) {
System.out.println("[TP] Teleport "+subject+" to "+destination+"..?");
return true;
}
if (subj != null && dest != null) {
Location loc = dest.getLocation();
if (safeTeleport(subj, loc, force)) {
subj.sendMessage("§7[§6TP§7]§b Teleporting you to §6" + dest.getName());
return true;
} else {
subj.sendMessage("§7[§6TP§7]§c Teleport to §6" + dest.getName() + "§c failed");
return false;
}
} else {
if (subj == null) {
System.out.println("[TP] Teleport who?");
}
if (dest == null) {
System.out.println("[TP] Teleport where?");
}
return false;
}
}
private boolean request_permission(String subject, String destination) {
File f = null;
long now = System.currentTimeMillis();
// Requesting permission to self? Go away.
if (subject.equalsIgnoreCase(destination)) {
return false;
}
// Create request token
f = new File(req_dir, subject + "-to-" + destination + ".requested");
if (f.exists() && f.lastModified() + cooldown > now) {
// Already requested so renew quietly
getLogger().info(subject + " renewing /tpa request to " + destination);
f.setLastModified(now);
return true;
} else {
// This is a new request
Player p = this.getServer().getPlayer(destination);
if (p != null) {
getLogger().info(subject + " sending new /tpa request to " + destination);
try {
f.createNewFile();
p.sendMessage("§7[§6TP§7]§b §6" + subject + "§b has requested /tpa permission (See '/tphelp')");
} catch (IOException e) {
e.printStackTrace();
getLogger().warning("Unexpected error creating "+f.getName()+": "+e.getLocalizedMessage());
return false;
}
return true;
} else {
return false;
}
}
}
private boolean cancel_request(String subject, String destination) {
File f = null;
// Create request token
f = new File(req_dir, subject + "-to-" + destination + ".requested");
f.delete();
return true;
}
private boolean grant_permission(String subject, String destination) {
File f = null;
long now = System.currentTimeMillis();
// Granting permission to self? Go away.
if (subject.equalsIgnoreCase(destination)) {
return false;
}
// Delete denial token, if any
f = new File(req_dir, destination + "-to-" + subject + ".denied");
f.delete();
// Create permission token
f = new File(req_dir, destination + "-to-" + subject + ".granted");
if (f.exists() && f.lastModified() + cooldown > now) {
// Already granted so renew quietly
getLogger().info(subject + " renewing /tpa permission for §6" + destination);
f.setLastModified(now);
return true;
} else {
// This is a new permission
Player p = this.getServer().getPlayer(destination);
if (p != null) {
getLogger().info(subject + " granting new /tpa permission for " + destination);
try {
f.createNewFile();
p.sendMessage("§7[§6TP§7]§b §6" + subject + "§b has granted you /tpa permission (See '/tphelp')");
} catch (IOException e) {
e.printStackTrace();
getLogger().warning("Unexpected error creating "+f.getName()+": "+e.getLocalizedMessage());
return false;
}
return true;
} else {
return false;
}
}
}
private boolean deny_permission(String subject, String destination) {
File f = null;
long now = System.currentTimeMillis();
// Denying permission to self? Go away.
if (subject.equalsIgnoreCase(destination)) {
return false;
}
// Delete permission token, if any
f = new File(req_dir, destination + "-to-" + subject + ".granted");
f.delete();
// Delete request token, if any
f = new File(req_dir, destination + "-to-" + subject + ".requested");
f.delete();
// Create denial token
f = new File(req_dir, destination + "-to-" + subject + ".denied");
if (f.exists() && f.lastModified() + cooldown > now) {
// Already denied so renew quietly
getLogger().info(subject + " renewing /tpa denial for " + destination);
f.setLastModified(now);
return true;
} else {
// This is a new denial
Player p = this.getServer().getPlayer(destination);
getLogger().info(subject + " creating new /tpa denial for " + destination);
try {
f.createNewFile();
p.sendMessage("§7[§6TP§7]§b §6" + subject + "§c has denied you /tpa permission (See '/tphelp')");
} catch (IOException e) {
e.printStackTrace();
getLogger().warning("Unexpected error creating "+f.getName()+": "+e.getLocalizedMessage());
return false;
}
return true;
}
}
private boolean has_denial(String subject, String destination) {
File f = null;
long now = System.currentTimeMillis();
// Checking permission to self? Go away.
if (subject.equalsIgnoreCase(destination)) {
return false;
}
// Check for valid permission token
f = new File(req_dir, subject + "-to-" + destination + ".denied");
if (f.exists() && f.lastModified() + cooldown > now) {
return true;
} else {
return false;
}
}
private boolean has_permission(String subject, String destination) {
File f = null;
long now = System.currentTimeMillis();
// Checking permission to self? Go away.
if (subject.equalsIgnoreCase(destination)) {
return false;
}
// Check for valid permission token
f = new File(req_dir, subject + "-to-" + destination + ".granted");
if (f.exists() && f.lastModified() + cooldown > now) {
return true;
} else {
return false;
}
}
private boolean has_request(String subject, String destination) {
File f = null;
long now = System.currentTimeMillis();
// Checking for request to self? Go away.
if (subject.equalsIgnoreCase(destination)) {
return false;
}
// Check for valid request token
f = new File(req_dir, subject + "-to-" + destination + ".requested");
if (f.exists() && f.lastModified() + cooldown > now) {
return true;
} else {
return false;
}
}
private Integer getUnixtime() {
return (int) (System.currentTimeMillis() / 1000L);
}
private void registerLocation(String pname, Location loc) {
Integer minute_now = (getUnixtime() / 60);
Integer minute_limit = minute_now - max_tpback;
// Fetch this player's location table
ConcurrentHashMap<Integer,Location> playerlocs = locs.get(pname);
if (playerlocs == null) {
playerlocs = loadLocations(pname);
locs.put(pname, playerlocs);
}
// Unless already done this minute, record current location
playerlocs.putIfAbsent(minute_now, loc);
// Purge data older than 1 hour
for (Integer minute : playerlocs.keySet()) {
if (minute > minute_limit) {
break;
}
playerlocs.remove(minute);
}
}
private Location getLocation(String pname, Integer delta) {
Location loc = null;
Integer minute_now = (getUnixtime() / 60) - delta;
// Fetch this player's location table
ConcurrentHashMap<Integer,Location> playerlocs = locs.get(pname);
if (playerlocs == null) {
//logger.info("[TP] I have no data for "+pname);
return loc;
}
// Play back the last hour until we get past the moment we're looking for
//logger.info("[TP] Searching "+pname+"'s CHM for delta "+minutes+" (unixtime "+unixtime+")");
List<Integer> keys = new ArrayList<Integer>(playerlocs.keySet());
Collections.sort(keys);
for (Integer minute : keys) {
if (minute > minute_now) {
//logger.info("[TP] "+pname+"'s next location is at "+minute+" which is past delta.");
break;
}
//logger.info("[TP] Found "+pname+"'s location at "+minute);
loc = playerlocs.get(minute);
}
return loc;
}
private Integer getOldestDelta(String pname) {
// Fetch this player's location table
ConcurrentHashMap<Integer,Location> playerlocs = locs.get(pname);
if (playerlocs == null) {
return null;
}
List<Integer> keys = new ArrayList<Integer>(playerlocs.keySet());
Collections.sort(keys);
for (Integer minute : keys) {
// Don't actually loop, just return the first one
//logger.info("[TP] Found "+pname+"'s location at "+minute);
return (getUnixtime()/60) - minute;
}
return null; // Unreachable
}
private ConcurrentHashMap<Integer,Location> loadLocations(String pname) {
Integer minute_now = (getUnixtime() / 60);
Integer minute_limit = minute_now - max_tpback;
ConcurrentHashMap<Integer,Location> playerlocs = new ConcurrentHashMap<Integer,Location>();
File file = new File(loc_dir, pname + ".dat");
if (file.exists() == false) {
return playerlocs;
}
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(file));
String line = null;
while ((line = reader.readLine()) != null) {
String[] pair = line.split(":");
Integer minute = Integer.valueOf(pair[0]);
if (minute >= minute_limit) {
// Data is recent enough to keep
String[] values = pair[1].split(",");
World world = getServer().getWorld(values[0]);
if (world != null) {
// World still exists
Double x = Double.valueOf(values[1]);
Double y = Double.valueOf(values[2]);
Double z = Double.valueOf(values[3]);
Float yaw = Float.valueOf(values[4]);
Float pitch = Float.valueOf(values[5]);
Location loc = new Location(world, x, y, z, yaw, pitch);
// Remember this location
playerlocs.put(minute, loc);
}
}
}
} catch (FileNotFoundException e) {
// Should not be possible
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (reader != null) {
reader.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return playerlocs;
}
private void saveLocations(String pname) {
ConcurrentHashMap<Integer,Location> playerlocs = locs.get(pname);
if (playerlocs == null) {
getLogger().warning("Internal error: No location data to save for player "+pname);
return;
}
File file = new File(loc_dir, pname + ".dat");
try {
FileWriter outFile = new FileWriter(file);
PrintWriter out = new PrintWriter(outFile);
for (Integer minute : playerlocs.keySet()) {
Location loc = playerlocs.get(minute);
// Serialize location manually
String w = loc.getWorld().getName();
Double x = loc.getX();
Double y = loc.getY();
Double z = loc.getZ();
Float yaw = loc.getYaw();
Float pitch = loc.getPitch();
// Write to file
out.println(minute+":"+w+","+x+","+y+","+z+","+yaw+","+pitch);
}
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private boolean safeTeleport(Player player, Location location, Boolean force) {
if (location == null) {
player.sendMessage("§7[§6TP§7]§c Invalid location");
return false;
}
Integer x = location.getBlockX();
Integer y = location.getBlockY();
Integer z = location.getBlockZ();
Block b = null;
World world = location.getWorld();
if (y < world.getMaxHeight()) { y++; }
Integer needAir = 2;
Boolean danger = false;
for (Integer check = y; check >= 0; check--) {
b = world.getBlockAt(x, check, z);
Material type = b.getType();
if (isAir(type)) {
needAir--;
continue;
}
if (type == Material.LAVA || type == Material.STATIONARY_LAVA) {
player.sendMessage("§7[§6TP§7]§c Lava detected");
danger = true;
break;
}
if (type == Material.FIRE) {
player.sendMessage("§7[§6TP§7]§c Fire detected");
danger = true;
}
if (type == Material.TRAP_DOOR) {
player.sendMessage("§7[§6TP§7]§c Trapdoor detected");
danger = true;
break;
}
if (needAir > 0) {
player.sendMessage("§7[§6TP§7]§c Blocked location detected ("+b.getType().name()+")");
danger = true;
}
break; // Found a safe platform
}
if (isPVP(location)) {
player.sendMessage("§7[§6TP§7]§c Player vs Player (PVP) region detected");
danger = true;
}
if (danger == false) {
return player.teleport(location);
} else if (force == true) {
getLogger().info(player.getName() + " used the safety override");
player.sendMessage("§7[§6TP§7]§c Safety override (-force) is in effect");
return player.teleport(location);
}
return false;
}
private boolean isAir(Material type) {
boolean ret = !type.isSolid();
// exceptions
switch (type)
{
case WATER:
case STATIONARY_WATER:
case LAVA:
case STATIONARY_LAVA:
case PORTAL:
// Add more non-solid materials that do NOT allow breathing here...
ret = false;
break;
case PISTON_EXTENSION:
case PISTON_MOVING_PIECE:
case STEP:
case SIGN_POST:
case WOODEN_DOOR:
case WALL_SIGN:
case STONE_PLATE:
case IRON_DOOR_BLOCK:
case WOOD_PLATE:
case FENCE:
case CAKE_BLOCK:
case TRAP_DOOR:
case IRON_FENCE:
case THIN_GLASS:
case FENCE_GATE:
case NETHER_FENCE:
case BREWING_STAND:
case CAULDRON:
case WOOD_STEP:
case GOLD_PLATE:
case IRON_PLATE:
// Add more solid materials that do allow breathing here...
ret = true;
break;
default:
break;
}
return ret;
}
private String time_to_delta(String timestr) {
String[] parts = timestr.split(":");
Integer hh = Integer.parseInt(parts[0]);
Integer mm = Integer.parseInt(parts[1]);
// Clamp values
if (hh < 0) { hh += 24; }
if (hh > 23) { hh -= 24; }
if (mm < 0) { mm += 60; }
if (mm > 59) { hh -= 60; }
// Use Calendar to calculate the difference
Calendar now = Calendar.getInstance();
Calendar then = Calendar.getInstance();
then.set(Calendar.HOUR_OF_DAY, hh);
then.set(Calendar.MINUTE, mm);
long ms = now.getTimeInMillis() - then.getTimeInMillis();
//getLogger().info("The time is now "+now);
//getLogger().info("The time you requested (hours="+hh+", minutes="+mm+") I interpret as "+then);
//getLogger().info("The difference is "+ms+" milliseconds");
if (ms < 0) {
// Requested time is in the future so we must assume the user means yesterday
//getLogger().info("No, that's in the future, let's go one day back.");
then.add(Calendar.DATE, -1);
//getLogger().info("The time you requested (hours="+hh+", minutes="+mm+") I interpret as "+then);
ms = now.getTimeInMillis() - then.getTimeInMillis();
//getLogger().info("The difference is "+ms+" milliseconds");
}
// Copnvert from milliseconds to minutes and return to caller
Integer delta = (int) ms/(1000*60);
//getLogger().info("Or "+delta+" minutes");
return delta.toString();
}
private Integer numOptions(String[] array) {
Integer options = 0;
for (String str : array) {
if (str.startsWith("--")) { options++; }
}
return options;
}
private void respond(Player p, String msg) {
if (p != null) {
p.sendMessage(msg);
} else {
Server server = getServer();
ConsoleCommandSender console = server.getConsoleSender();
console.sendMessage(msg);
}
}
private boolean isPVP(Location loc) {
if (worldguard == null) { return false; }
RegionManager regionManager = worldguard.getRegionContainer().get(loc.getWorld());
ApplicableRegionSet set = regionManager.getApplicableRegions(loc);
return set.testState(null, DefaultFlag.PVP);
}
}
| src/no/atc/floyd/bukkit/tele/TelePlugin.java | package no.atc.floyd.bukkit.tele;
import org.bukkit.entity.Player;
import org.bukkit.GameMode;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.Server;
import org.bukkit.World;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerLoginEvent;
import org.bukkit.event.player.PlayerMoveEvent;
import org.bukkit.event.player.PlayerQuitEvent;
import org.bukkit.event.player.PlayerTeleportEvent;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.PluginManager;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.bukkit.block.Block;
import org.bukkit.command.*;
import com.sk89q.worldguard.bukkit.WorldGuardPlugin;
import com.sk89q.worldguard.protection.flags.DefaultFlag;
import com.sk89q.worldguard.protection.managers.RegionManager;
import com.sk89q.worldguard.protection.ApplicableRegionSet;
/**
* TelePlugin plugin for Bukkit
*
* @author FloydATC
*/
public class TelePlugin extends JavaPlugin implements Listener {
//public static Permissions Permissions = null;
private File req_dir = new File(this.getDataFolder(), "requests");
private File loc_dir = new File(this.getDataFolder(), "locations");
private final long cooldown = 86400 * 1000; // Milliseconds
private ConcurrentHashMap<String,ConcurrentHashMap<Integer,Location>> locs = new ConcurrentHashMap<String,ConcurrentHashMap<Integer,Location>>();
private Integer max_tpback = 1440; // Number of MINUTES to keep
WorldGuardPlugin worldguard = null;
public void onDisable() {
for (String pname : locs.keySet()) {
saveLocations(pname);
}
}
public void onEnable() {
// Set up directory for request, denial and permission tokens.
// Clear out any stale files (i.e. older than cooldown)
if (req_dir.exists() == false) { req_dir.mkdirs(); }
File[] files = req_dir.listFiles();
long now = System.currentTimeMillis();
for (int i = 0; i < files.length; i++) {
if (files[i].lastModified() + cooldown < now) {
files[i].delete();
}
}
// Set up directory for player location data (used for /tpback)
if (loc_dir.exists() == false) { loc_dir.mkdirs(); }
// WorldGuard integration
Plugin wg = getServer().getPluginManager().getPlugin("WorldGuard");
if (wg == null || !(wg instanceof WorldGuardPlugin)) {
getLogger().info("WorldGuard not loaded, will not detect PVP regions");
} else {
worldguard = (WorldGuardPlugin) wg;
getLogger().info("Using WorldGuard to detect PVP regions");
}
// Register event handlers
PluginManager pm = getServer().getPluginManager();
pm.registerEvents(this, this);
}
public boolean onCommand(CommandSender sender, Command cmd, String commandLabel, String[] args ) {
String cmdname = cmd.getName().toLowerCase();
Player player = null;
if (sender instanceof Player) {
player = (Player) sender;
//getLogger().info("player="+player+" cmd="+cmdname+" args="+Arrays.toString(args));
}
// See if any options were specified
Boolean force = false;
Integer options = numOptions(args);
if (options > 0) {
String[] revised_args = new String[args.length - options];
Integer index = 0;
for (String s: args) {
if (s.startsWith("--")) {
// Supported options go here
if (s.equalsIgnoreCase("--force")) {
force = true;
}
} else {
revised_args[index] = s;
index++;
}
}
getLogger().info("done revising argument list");
args = revised_args;
}
if (cmdname.equalsIgnoreCase("tp") && player != null && player.hasPermission("teleplugin.tp")) {
if (args.length == 0 || args.length > 3) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tp <player>");
player.sendMessage("§7[§6TP§7]§b /tp <x> <z>");
player.sendMessage("§7[§6TP§7]§b /tp <x> <z> <world>");
return true;
}
if (args.length == 1) {
// Teleport to player
if (teleport(player.getName(), args[0], force)) {
getLogger().info(player.getName() + " teleported to " + args[0] );
} else {
player.sendMessage("§7[§6TP§7]§c Could not teleport you to §6" + args[0]);
getLogger().info(player.getName() + " could not teleport to " + args[0] );
}
return true;
}
if (args.length == 2) {
// Teleport to specific coordinates in current world at maximum height
Location loc = player.getLocation();
Integer x = Integer.parseInt(args[0]);
Integer z = Integer.parseInt(args[1]);
Integer y = loc.getWorld().getHighestBlockYAt(x, z);
loc.setX(x);
loc.setZ(z);
loc.setY(y);
if (safeTeleport(player, loc, force)) {
player.sendMessage("§7[§6TP§7]§b Teleported you to coordinates x=" +x+ " z="+z);
getLogger().info(player.getName() + " teleported to coordinates x=" +x+ " z="+z);
} else {
player.sendMessage("§7[§6TP§7]§c Could not teleport you to coordinates x="+x+" z="+z);
getLogger().info(player.getName() + " could not teleport to coordinates x="+x+" z="+z);
}
return true;
}
if (args.length == 3) {
// Teleport to specific coordinates in alternate world at maximum height
Location loc = player.getLocation();
Integer x = Integer.parseInt(args[0]);
Integer z = Integer.parseInt(args[1]);
World w = player.getServer().getWorld(args[2]);
if (w == null) {
player.sendMessage("§7[§6TP§7]§c There is no world called " + args[2]);
return true;
}
Integer y = w.getHighestBlockYAt(x, z);
loc.setX(x);
loc.setZ(z);
loc.setY(y);
loc.setWorld(w);
if (safeTeleport(player, loc, force)) {
player.sendMessage("§7[§6TP§7]§b Teleported you to coordinates x=" +x+ " z="+z+ " in world "+w.getName());
getLogger().info(player.getName() + " teleported to oordinates x=" +x+ " z="+z+ " in world "+w.getName());
} else {
player.sendMessage("§7[§6TP§7]§c Could not teleport you to oordinates x=" +x+ " z="+z+ " in world "+w.getName());
getLogger().info(player.getName() + " could not teleport to oordinates x=" +x+ " z="+z+ " in world "+w.getName());
}
return true;
}
}
if (cmdname.equalsIgnoreCase("tphelp") && player != null) {
player.sendMessage("§7[§6TP§7]§b Personal teleportation commands: §c[EXPERIMENTAL]");
player.sendMessage("§7[§6TP§7]§6 /tpa <name> §bRequest teleport to player");
player.sendMessage("§7[§6TP§7]§6 /tpy <name> §bGrant teleport access");
player.sendMessage("§7[§6TP§7]§6 /tpn <name> §bDeny teleport access");
player.sendMessage("§7[§6TP§7]§b Access is granted for 24 hours or until denied");
player.sendMessage("§7[§6TP§7]§b Repeated requests/grants/denials are ignored");
if (player != null && player.hasPermission("teleplugin.tpa") == false) {
player.sendMessage("§7[§6TP§7]§c You do not yet have permission to use this feature");
}
return true;
}
if (cmdname.equalsIgnoreCase("tpa") && player != null && player.hasPermission("teleplugin.tpa")) {
force = false; // Disallowed
if (args.length == 0 || args.length > 1) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tpa <player>");
return true;
}
if (args.length == 1) {
Player target = null;
if (args.length == 1) {
target = this.getServer().getPlayer(args[0]);
if (target != null) {
args[0] = target.getName();
}
}
if (target != null && target.hasPermission("teleplugin.tpa") == false) {
player.sendMessage("§7[§6TP§7]§b §6" + args[0] + "§c does not yet have permission to use this feature");
return true;
}
if (has_denial(player.getName(), args[0])) {
player.sendMessage("§7[§6TP§7]§b §6" + args[0] + "§c has denied you teleport permission.");
return true;
}
if (has_permission(player.getName(), args[0])) {
if (teleport(player.getName(), args[0], force)) {
getLogger().info(player.getName() + " teleported to " + args[0] );
cancel_request(player.getName(), args[0]);
} else {
player.sendMessage("§7[§6TP§7]§c Could not teleport you to §6" + args[0]);
getLogger().info(player.getName() + " could not teleport to " + args[0] );
}
} else {
if (request_permission(player.getName(), args[0])) {
player.sendMessage("§7[§6TP§7]§b Requested teleport to §6" + args[0]);
} else {
player.sendMessage("§7[§6TP§7]§c Could not request teleport to §6" + args[0] + "§c at this time");
}
}
return true;
}
}
if (cmdname.equalsIgnoreCase("tpy") && player != null && player.hasPermission("teleplugin.tpa")) {
force = false; // Disallowed
Player target = null;
if (args.length == 0 || args.length > 1) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tpy <player>");
return true;
}
if (args.length == 1) {
target = this.getServer().getPlayer(args[0]);
if (target != null) {
args[0] = target.getName();
}
}
if (args.length == 1) {
if (target != null && target.hasPermission("teleplugin.tpa") == false) {
player.sendMessage("§7[§6TP§7]§b §6" + args[0] + "§c does not yet have permission to use this feature");
return true;
}
if (grant_permission(player.getName(), args[0])) {
player.sendMessage("§7[§6TP§7]§b Teleport permission granted to §6" + args[0]);
} else {
player.sendMessage("§7[§6TP§7]§c Could not grant teleport permission to §6" + args[0] + "§c at this time");
}
if (has_request(args[0], player.getName()) && has_permission(args[0], player.getName())) {
if (teleport(args[0], player.getName(), force)) {
getLogger().info(args[0] + " teleported to " + player.getName() );
cancel_request(args[0], player.getName());
}
}
return true;
}
}
if (cmdname.equalsIgnoreCase("tpn") && player != null && player.hasPermission("teleplugin.tpa")) {
if (args.length == 0 || args.length > 1) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tpn <player>");
return true;
}
if (args.length == 1) {
if (deny_permission(player.getName(), args[0])) {
player.sendMessage("§7[§6TP§7]§b Teleport permission denied to §6" + args[0]);
} else {
player.sendMessage("§7[§6TP§7]§c Could not deny teleport permission to §6" + args[0] + "§c at this time");
}
return true;
}
}
if (cmdname.equalsIgnoreCase("tphere") && player != null && player.hasPermission("teleplugin.tphere")) {
if (args.length == 0) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tphere <player> [<player> ...]");
return true;
}
if (args.length >= 1) {
for (String subject : args) {
if (teleport(subject, player.getName(), force)) {
getLogger().info(player.getName() + " teleported " + args[0] + " to self");
player.sendMessage("§7[§6TP§7]§b Teleported §6" + args[0] + "§b to you");
} else {
player.sendMessage("§7[§6TP§7]§c Could not teleport §6" + args[0] + "§c to you");
}
}
return true;
}
}
if (cmdname.equalsIgnoreCase("tpto") && (player == null || player.hasPermission("teleplugin.tpto"))) {
if (args.length < 2) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tpto <to_player> <player> [<player> ...]");
return true;
}
if (args.length >= 2) {
String performer = "(Server)";
if (player != null) {
performer = player.getName();
}
String destination = args[args.length-1];
for (Integer i=0; i<args.length-1; i++) {
if (teleport(args[i], destination, force)) {
getLogger().info(performer + " teleported " + args[i] + " to " + destination);
if (player != null) {
player.sendMessage("§7[§6TP§7]§b Teleported §6" + args[i] + "§b to §6" + destination);
}
} else {
getLogger().info("Could not teleport " + args[i] + " to " + destination);
if (player != null) {
player.sendMessage("§7[§6TP§7]§c Could not teleport §6" + args[i] + "§c to §6" + destination);
}
}
}
return true;
}
}
/* if (cmdname.equalsIgnoreCase("spawn")) {
if (args.length > 0) {
// Spawn others
if (player == null || player.hasPermission("teleplugin.spawn.other")) {
String admin = "Server";
if (player != null) {
admin = player.getName();
}
for (String pname : args) {
Player p = getServer().getPlayer(pname);
if (p != null) {
p.teleport(p.getWorld().getSpawnLocation());
p.sendMessage("§7[§6TP§7]§b You were respawned by " + admin);
getLogger().info("Player " + pname + " was respawned by " + admin);
}
}
} else {
player.sendMessage("§7[§6TP§7]§c You do not have permission to spawn other players");
}
} else {
// Spawn self
if (player != null && player.hasPermission("teleplugin.spawn.self")) {
player.teleport(player.getWorld().getSpawnLocation());
player.sendMessage("§7[§6TP§7]§b You were respawned");
getLogger().info("Player " + player.getName() + " respawned");
} else {
player.sendMessage("§7[§6TP§7]§c Spawn who?");
}
}
}
*/ if (cmdname.equalsIgnoreCase("tpback") && player != null && player.hasPermission("teleplugin.tpback")) {
if (args.length == 0 || args.length > 2) {
player.sendMessage("§7[§6TP§7]§b Valid syntax:");
player.sendMessage("§7[§6TP§7]§b /tpback <minutes> [<player>]");
player.sendMessage("§7[§6TP§7]§b /tpback <hh>:<mm> [<player>]");
return true;
}
if (args.length == 1) {
// Get delta
Integer delta = 1;
// Is this a time in HH:MM format? Convert to delta
if (args[0].matches("^[0-2][0-9]:[0-5][0-9]$")) {
args[0] = time_to_delta(args[0]);
}
// The argument should now be an integer
try {
delta = Integer.valueOf(args[0]);
}
catch (Exception e) {
player.sendMessage("§7[§6TP§7]§c Expected a number");
//e.printStackTrace();
return false;
}
// Teleport back to own location
Location loc = getLocation(player.getName(), delta);
if (loc == null) {
player.sendMessage("§7[§6TP§7]§c No location recorded "+delta+" minute"+(delta==1?"":"s")+" ago");
Integer oldest = getOldestDelta(player.getName());
if (oldest != null) {
player.sendMessage("§7[§6TP§7]§c The earliest location is "+oldest+" minute"+(oldest==1?"":"s")+" old");
}
} else {
if (safeTeleport(player, loc, force)) {
getLogger().info(player.getName() + " teleported "+delta+" minute"+(delta==1?"":"s")+" back");
player.sendMessage("§7[§6TP§7]§b Teleported you "+delta+" minute"+(delta==1?"":"s")+" back");
} else {
player.sendMessage("§7[§6TP§7]§c Teleport to "+delta+" minute"+(delta==1?"":"s")+" ago failed");
}
}
return true;
}
if (args.length == 2 && player.hasPermission("teleplugin.tpback.other")) {
// Is this a time in HH:MM format? Convert to delta
if (args[0].matches("^[0-2][0-9]:[0-5][0-9]$")) {
args[0] = time_to_delta(args[0]);
}
// Get delta
Integer delta = 1;
try {
delta = Integer.valueOf(args[0]);
}
catch (Exception e) {
player.sendMessage("§7[§6TP§7]§c Expected a number");
//e.printStackTrace();
return false;
}
// Get player name
Player p = null;
String pname = args[1];
p = player.getServer().getPlayer(pname);
if (p != null) {
pname = p.getName();
}
// Teleport back to player's location
Location loc = getLocation(pname, delta);
if (loc == null) {
player.sendMessage("§7[§6TP§7]§c No location recorded for §6"+pname+"§c "+delta+" minute"+(delta==1?"":"s")+" ago");
Integer oldest = getOldestDelta(pname);
if (oldest != null) {
player.sendMessage("§7[§6TP§7]§c The earliest location for §6"+pname+"§c is "+oldest+" minute"+(oldest==1?"":"s")+" old");
}
} else {
if (safeTeleport(player, loc, force)) {
getLogger().info(player.getName() + " teleported to where "+pname+" was "+delta+" minute"+(delta==1?"":"s")+" ago");
player.sendMessage("§7[§6TP§7]§b Teleported you to where §6"+pname+"§c was "+delta+" minute"+(delta==1?"":"s")+" ago");
} else {
player.sendMessage("§7[§6TP§7]§c Teleport to where §6"+pname+"§c was "+delta+" minute"+(delta==1?"":"s")+" ago failed");
}
}
return true;
}
}
if (cmdname.equalsIgnoreCase("warp")) {
if (args.length == 0) {
respond(player, "§7[§6TP§7]§b Syntax:");
respond(player, "§7[§6TP§7]§b /warp <place> [--force|<player> [...]]");
return true;
}
Warp w = new Warp(args[0], player, this);
if (w.exists()) {
if (w.usePermitted(player)) {
if (args.length == 1) {
// Warp self
if (player == null) {
respond(player, "§7[§6TP§7]§c Must specify a player from the console");
} else {
if (safeTeleport(player, w.location(), force)) {
getLogger().info(player.getName() + " warped to "+args[0]);
respond(player, "§7[§6TP§7]§b Warped you to "+args[0]);
w.touch();
} else {
respond(player, "§7[§6TP§7]§c Warp to "+args[0]+" failed");
}
}
} else {
// Warp others
if (player.hasPermission("teleplugin.warpother")) {
for (Integer i = 1; i < args.length; i++) {
Player p = getServer().getPlayer(args[i]);
if (p != null) {
if (safeTeleport(p, w.location(), false)) {
getLogger().info(player.getName() + " warped "+p.getName()+" to "+args[0]);
respond(player, "§7[§6TP§7]§b Warped "+p.getName()+" to "+args[0]);
respond(p, "§7[§6TP§7]§b "+player.getName()+" warped you to "+w.name());
w.touch();
} else {
respond(player, "§7[§6TP§7]§c "+p.getName()+" was not warped to "+args[1]);
}
} else {
respond(player, "§7[§6TP§7]§c Player "+args[i]+" is not online");
}
}
} else {
getLogger().info(player.getName() + " warp others to "+args[0]+" denied");
respond(player, "§7[§6TP§7]§c You don't have permission to warp other players");
}
}
} else {
getLogger().info(player.getName() + " warp to "+args[0]+" denied");
respond(player, "§7[§6TP§7]§c You don't have permission to use warp point '"+args[0]+"'");
}
} else {
respond(player, "§7[§6TP§7]§c Warp point '"+args[0]+"' not found");
}
return true;
}
if (cmdname.equalsIgnoreCase("setwarp")) {
if (args.length == 0 || args.length > 1) {
respond(player, "§7[§6TP§7]§b Syntax:");
respond(player, "§7[§6TP§7]§b /setwarp <place>");
return true;
}
if (player == null) {
respond(player, "§7[§6TP§7]§c You can't set a warp point from the console");
return true;
}
Warp w = new Warp(args[0], player, this);
if (w.exists()) {
respond(player, "§7[§6TP§7]§c Warp point '"+args[0]+"' already exists");
} else {
if (w.createPermitted(player)) {
if (w.create()) {
getLogger().info(player.getName()+" created warp point "+args[0]);
respond(player, "§7[§6TP§7]§b Warp point '"+args[0]+"' was created");
} else {
getLogger().warning("Error creating warp point '"+args[0]+"': "+w.error());
respond(player, "§7[§6TP§7]§c Internal error: "+w.error());
}
} else {
getLogger().warning(player.getName()+" was not permitted to create warp point "+args[0]);
respond(player, "§7[§6TP§7]§c You don't have permission to create warp point '"+args[0]+"'");
}
}
return true;
}
if (cmdname.equalsIgnoreCase("delwarp")) {
if (args.length == 0 || args.length > 1) {
respond(player, "§7[§6TP§7]§b Syntax:");
respond(player, "§7[§6TP§7]§b /delwarp <place>");
return true;
}
Warp w = new Warp(args[0], player, this);
if (w.exists()) {
if (w.deletePermitted(player)) {
w.delete();
getLogger().info(player.getName()+" deleted warp point "+args[0]);
respond(player, "§7[§6TP§7]§b Warp point '"+args[0]+"' has now been deleted");
} else {
getLogger().warning(player.getName()+" was not permitted to delete warp point "+args[0]);
respond(player, "§7[§6TP§7]§c You don't have permission to delete warp point '"+args[0]+"'");
}
} else {
respond(player, "§7[§6TP§7]§c Warp point '"+args[0]+"' does not exist");
}
return true;
}
if (cmdname.equalsIgnoreCase("movewarp")) {
if (args.length == 0 || args.length > 1) {
respond(player, "§7[§6TP§7]§b Syntax:");
respond(player, "§7[§6TP§7]§b /movewarp <place>");
return true;
}
if (player == null) {
respond(player, "§7[§6TP§7]§c You can't move a warp point from the console");
return true;
}
Warp w = new Warp(args[0], player, this);
if (w.exists()) {
if (w.movePermitted(player)) {
if (w.delete() && w.create()) {
getLogger().info(player.getName()+" moved warp point "+args[0]);
respond(player, "§7[§6TP§7]§b Warp point '"+args[0]+"' was moved");
} else {
getLogger().warning("Error moving warp point"+args[0]+": "+w.error());
respond(player, "§7[§6TP§7]§c Internal error: "+w.error());
}
} else {
getLogger().warning(player.getName()+" was not permitted to move warp point "+args[0]);
respond(player, "§7[§6TP§7]§c You don't have permission to move warp point '"+args[0]+"'");
}
} else {
respond(player, "§7[§6TP§7]§c Warp point '"+args[0]+"' does not exist");
}
return true;
}
if (cmdname.equalsIgnoreCase("listwarps"))
{
if (args.length > 2)
{
respond(player, "§7[§6TP§7]§b Syntax:");
respond(player, "§7[§6TP§7]§b /listwarps [<player>] [<page>]");
return true;
}
// Parse arguments
String owner = null;
int page = 0;
if (args.length >= 1)
{
if (Character.isDigit(args[0].charAt(0)))
{
try {
page = Integer.parseInt(args[0]) - 1;
}
catch (NumberFormatException e) {
player.sendMessage("§7[§6TP§7]§c Expected a number");
//e.printStackTrace();
return false;
}
page = Integer.parseInt(args[0]);
}
else if (args[0].equals("."))
{
owner = player.getName();
}
else
{
owner = args[0];
}
}
if (page < 0) page = 0;
// Sanitize owner name
if (owner != null)
{
Pattern pattern = Pattern.compile("[^\\w@\\.\\'\\-]");
Matcher matcher = pattern.matcher(owner);
matcher.replaceAll("");
}
// Check permissions
if (owner == null)
{
if (!player.hasPermission("teleplugin.listwarps.global"))
{
getLogger().warning(player.getName() + " was not permitted to list global warp points");
respond(player, "§7[§6TP§7]§c You don't have permission to list these warp points");
}
}
else if (owner.equalsIgnoreCase(player.getName()))
{
if (!player.hasPermission("teleplugin.listwarps.own"))
{
getLogger().warning(player.getName() + " was not permitted to list own warp points");
respond(player, "§7[§6TP§7]§c You don't have permission to list these warp points");
}
}
else
{
if (!player.hasPermission("teleplugin.listwarps.other"))
{
getLogger().warning(player.getName() + " was not permitted to list " + owner + "'s warp points");
respond(player, "§7[§6TP§7]§c You don't have permission to list these warp points");
}
}
File dir = new File(this.getDataFolder(), "warps");
if (owner != null)
dir = new File(dir, owner);
// List all files ending with ".loc"
File[] warps = dir.listFiles(new FilenameFilter()
{
@Override
public boolean accept(File dir, String name)
{
return new File(dir, name).isFile() && name.toLowerCase().endsWith(".loc");
}
});
/* warps is null if the warps/ directory does not exist (e.g. if no warps have been created yet);
* to avoid NullPointerException, warps is instantiated as an empty array */
if (warps == null)
warps = new File[0];
int pageCount = (int) Math.ceil(warps.length/10f);
respond(player, "§7[§6TP§7]§b Warps (page " + (page + 1) + " of " + pageCount + "):");
int startIndex = page * 10;
for (int i = startIndex; i < startIndex + 10 && i < warps.length; i++)
respond(player, "§7[§6TP§7]§9 " + warps[i].getName());
return true;
}
return false;
}
@EventHandler
public boolean onLogin( PlayerLoginEvent event ) {
Player player = event.getPlayer();
registerLocation(player.getName(), player.getLocation());
return true;
}
@EventHandler
public boolean onQuit( PlayerQuitEvent event ) {
// Save and unload locations from memory
String pname = event.getPlayer().getName();
registerLocation(pname, event.getPlayer().getLocation());
saveLocations(pname);
locs.remove(pname);
return true;
}
@EventHandler
public boolean onMove( PlayerMoveEvent event ) {
registerLocation(event.getPlayer().getName(), event.getFrom());
return true;
}
@EventHandler(priority = EventPriority.LOW) // Must process before CreativeControl
public void onTeleport( PlayerTeleportEvent event ) {
Player player = event.getPlayer();
String pname = player.getName();
registerLocation(pname, event.getFrom());
String from_world = event.getFrom().getWorld().getName().toLowerCase();
String to_world = event.getTo().getWorld().getName().toLowerCase();
getLogger().fine("DEBUG: " + pname + " teleporting from '" + from_world + "' to '" + to_world + "'");
if (from_world.equals(to_world)) {
return; // Teleporting within the same world always permitted
} else {
if (player.hasPermission("teleplugin.creative."+to_world)) {
// Switch to CREATIVE mode
player.setGameMode(GameMode.CREATIVE);
} else {
// Switch to SURVIVAL mode
player.setGameMode(GameMode.SURVIVAL);
}
}
if (event.getCause() == PlayerTeleportEvent.TeleportCause.NETHER_PORTAL) {
getLogger().info(pname + " used a Nether portal");
return; // Allow game mechanic
}
if (event.getCause() == PlayerTeleportEvent.TeleportCause.END_PORTAL) {
getLogger().info(pname + " used a The End portal");
return; // Allow game mechanic
}
if (player.hasPermission("teleplugin.enter.any") == false && player.hasPermission("teleplugin.enter." + to_world) == false) {
getLogger().info(pname + " was denied access to enter world '" + to_world + "'");
player.sendMessage("§7[§6TP§7]§c You are not allowed to enter " + to_world + "§c this way");
event.setCancelled(true);
return;
}
if (player.hasPermission("teleplugin.leave.any") == false && player.hasPermission("teleplugin.leave." + from_world) == false) {
getLogger().info(pname + " was denied access to leave world '" + from_world + "'");
player.sendMessage("§7[§6TP§7]§c You are not allowed to leave " + to_world + "§c this way");
event.setCancelled(true);
return;
}
return;
}
private boolean teleport(String subject, String destination, Boolean force) {
Player subj = getServer().getPlayer(subject);
Player dest = getServer().getPlayer(destination);
if (subject.equals(destination)) {
System.out.println("[TP] Teleport "+subject+" to "+destination+"..?");
return true;
}
if (subj != null && dest != null) {
Location loc = dest.getLocation();
if (safeTeleport(subj, loc, force)) {
subj.sendMessage("§7[§6TP§7]§b Teleporting you to §6" + dest.getName());
return true;
} else {
subj.sendMessage("§7[§6TP§7]§c Teleport to §6" + dest.getName() + "§c failed");
return false;
}
} else {
if (subj == null) {
System.out.println("[TP] Teleport who?");
}
if (dest == null) {
System.out.println("[TP] Teleport where?");
}
return false;
}
}
private boolean request_permission(String subject, String destination) {
File f = null;
long now = System.currentTimeMillis();
// Requesting permission to self? Go away.
if (subject.equalsIgnoreCase(destination)) {
return false;
}
// Create request token
f = new File(req_dir, subject + "-to-" + destination + ".requested");
if (f.exists() && f.lastModified() + cooldown > now) {
// Already requested so renew quietly
getLogger().info(subject + " renewing /tpa request to " + destination);
f.setLastModified(now);
return true;
} else {
// This is a new request
Player p = this.getServer().getPlayer(destination);
if (p != null) {
getLogger().info(subject + " sending new /tpa request to " + destination);
try {
f.createNewFile();
p.sendMessage("§7[§6TP§7]§b §6" + subject + "§b has requested /tpa permission (See '/tphelp')");
} catch (IOException e) {
e.printStackTrace();
getLogger().warning("Unexpected error creating "+f.getName()+": "+e.getLocalizedMessage());
return false;
}
return true;
} else {
return false;
}
}
}
private boolean cancel_request(String subject, String destination) {
File f = null;
// Create request token
f = new File(req_dir, subject + "-to-" + destination + ".requested");
f.delete();
return true;
}
private boolean grant_permission(String subject, String destination) {
File f = null;
long now = System.currentTimeMillis();
// Granting permission to self? Go away.
if (subject.equalsIgnoreCase(destination)) {
return false;
}
// Delete denial token, if any
f = new File(req_dir, destination + "-to-" + subject + ".denied");
f.delete();
// Create permission token
f = new File(req_dir, destination + "-to-" + subject + ".granted");
if (f.exists() && f.lastModified() + cooldown > now) {
// Already granted so renew quietly
getLogger().info(subject + " renewing /tpa permission for §6" + destination);
f.setLastModified(now);
return true;
} else {
// This is a new permission
Player p = this.getServer().getPlayer(destination);
if (p != null) {
getLogger().info(subject + " granting new /tpa permission for " + destination);
try {
f.createNewFile();
p.sendMessage("§7[§6TP§7]§b §6" + subject + "§b has granted you /tpa permission (See '/tphelp')");
} catch (IOException e) {
e.printStackTrace();
getLogger().warning("Unexpected error creating "+f.getName()+": "+e.getLocalizedMessage());
return false;
}
return true;
} else {
return false;
}
}
}
private boolean deny_permission(String subject, String destination) {
File f = null;
long now = System.currentTimeMillis();
// Denying permission to self? Go away.
if (subject.equalsIgnoreCase(destination)) {
return false;
}
// Delete permission token, if any
f = new File(req_dir, destination + "-to-" + subject + ".granted");
f.delete();
// Delete request token, if any
f = new File(req_dir, destination + "-to-" + subject + ".requested");
f.delete();
// Create denial token
f = new File(req_dir, destination + "-to-" + subject + ".denied");
if (f.exists() && f.lastModified() + cooldown > now) {
// Already denied so renew quietly
getLogger().info(subject + " renewing /tpa denial for " + destination);
f.setLastModified(now);
return true;
} else {
// This is a new denial
Player p = this.getServer().getPlayer(destination);
getLogger().info(subject + " creating new /tpa denial for " + destination);
try {
f.createNewFile();
p.sendMessage("§7[§6TP§7]§b §6" + subject + "§c has denied you /tpa permission (See '/tphelp')");
} catch (IOException e) {
e.printStackTrace();
getLogger().warning("Unexpected error creating "+f.getName()+": "+e.getLocalizedMessage());
return false;
}
return true;
}
}
private boolean has_denial(String subject, String destination) {
File f = null;
long now = System.currentTimeMillis();
// Checking permission to self? Go away.
if (subject.equalsIgnoreCase(destination)) {
return false;
}
// Check for valid permission token
f = new File(req_dir, subject + "-to-" + destination + ".denied");
if (f.exists() && f.lastModified() + cooldown > now) {
return true;
} else {
return false;
}
}
private boolean has_permission(String subject, String destination) {
File f = null;
long now = System.currentTimeMillis();
// Checking permission to self? Go away.
if (subject.equalsIgnoreCase(destination)) {
return false;
}
// Check for valid permission token
f = new File(req_dir, subject + "-to-" + destination + ".granted");
if (f.exists() && f.lastModified() + cooldown > now) {
return true;
} else {
return false;
}
}
private boolean has_request(String subject, String destination) {
File f = null;
long now = System.currentTimeMillis();
// Checking for request to self? Go away.
if (subject.equalsIgnoreCase(destination)) {
return false;
}
// Check for valid request token
f = new File(req_dir, subject + "-to-" + destination + ".requested");
if (f.exists() && f.lastModified() + cooldown > now) {
return true;
} else {
return false;
}
}
private Integer getUnixtime() {
return (int) (System.currentTimeMillis() / 1000L);
}
private void registerLocation(String pname, Location loc) {
Integer minute_now = (getUnixtime() / 60);
Integer minute_limit = minute_now - max_tpback;
// Fetch this player's location table
ConcurrentHashMap<Integer,Location> playerlocs = locs.get(pname);
if (playerlocs == null) {
playerlocs = loadLocations(pname);
locs.put(pname, playerlocs);
}
// Unless already done this minute, record current location
playerlocs.putIfAbsent(minute_now, loc);
// Purge data older than 1 hour
for (Integer minute : playerlocs.keySet()) {
if (minute > minute_limit) {
break;
}
playerlocs.remove(minute);
}
}
private Location getLocation(String pname, Integer delta) {
Location loc = null;
Integer minute_now = (getUnixtime() / 60) - delta;
// Fetch this player's location table
ConcurrentHashMap<Integer,Location> playerlocs = locs.get(pname);
if (playerlocs == null) {
//logger.info("[TP] I have no data for "+pname);
return loc;
}
// Play back the last hour until we get past the moment we're looking for
//logger.info("[TP] Searching "+pname+"'s CHM for delta "+minutes+" (unixtime "+unixtime+")");
List<Integer> keys = new ArrayList<Integer>(playerlocs.keySet());
Collections.sort(keys);
for (Integer minute : keys) {
if (minute > minute_now) {
//logger.info("[TP] "+pname+"'s next location is at "+minute+" which is past delta.");
break;
}
//logger.info("[TP] Found "+pname+"'s location at "+minute);
loc = playerlocs.get(minute);
}
return loc;
}
private Integer getOldestDelta(String pname) {
// Fetch this player's location table
ConcurrentHashMap<Integer,Location> playerlocs = locs.get(pname);
if (playerlocs == null) {
return null;
}
List<Integer> keys = new ArrayList<Integer>(playerlocs.keySet());
Collections.sort(keys);
for (Integer minute : keys) {
// Don't actually loop, just return the first one
//logger.info("[TP] Found "+pname+"'s location at "+minute);
return (getUnixtime()/60) - minute;
}
return null; // Unreachable
}
private ConcurrentHashMap<Integer,Location> loadLocations(String pname) {
Integer minute_now = (getUnixtime() / 60);
Integer minute_limit = minute_now - max_tpback;
ConcurrentHashMap<Integer,Location> playerlocs = new ConcurrentHashMap<Integer,Location>();
File file = new File(loc_dir, pname + ".dat");
if (file.exists() == false) {
return playerlocs;
}
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(file));
String line = null;
while ((line = reader.readLine()) != null) {
String[] pair = line.split(":");
Integer minute = Integer.valueOf(pair[0]);
if (minute >= minute_limit) {
// Data is recent enough to keep
String[] values = pair[1].split(",");
World world = getServer().getWorld(values[0]);
if (world != null) {
// World still exists
Double x = Double.valueOf(values[1]);
Double y = Double.valueOf(values[2]);
Double z = Double.valueOf(values[3]);
Float yaw = Float.valueOf(values[4]);
Float pitch = Float.valueOf(values[5]);
Location loc = new Location(world, x, y, z, yaw, pitch);
// Remember this location
playerlocs.put(minute, loc);
}
}
}
} catch (FileNotFoundException e) {
// Should not be possible
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (reader != null) {
reader.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return playerlocs;
}
private void saveLocations(String pname) {
ConcurrentHashMap<Integer,Location> playerlocs = locs.get(pname);
if (playerlocs == null) {
getLogger().warning("Internal error: No location data to save for player "+pname);
return;
}
File file = new File(loc_dir, pname + ".dat");
try {
FileWriter outFile = new FileWriter(file);
PrintWriter out = new PrintWriter(outFile);
for (Integer minute : playerlocs.keySet()) {
Location loc = playerlocs.get(minute);
// Serialize location manually
String w = loc.getWorld().getName();
Double x = loc.getX();
Double y = loc.getY();
Double z = loc.getZ();
Float yaw = loc.getYaw();
Float pitch = loc.getPitch();
// Write to file
out.println(minute+":"+w+","+x+","+y+","+z+","+yaw+","+pitch);
}
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private boolean safeTeleport(Player player, Location location, Boolean force) {
if (location == null) {
player.sendMessage("§7[§6TP§7]§c Invalid location");
return false;
}
Integer x = location.getBlockX();
Integer y = location.getBlockY();
Integer z = location.getBlockZ();
Block b = null;
World world = location.getWorld();
if (y < world.getMaxHeight()) { y++; }
Integer needAir = 2;
Boolean danger = false;
for (Integer check = y; check >= 0; check--) {
b = world.getBlockAt(x, check, z);
Material type = b.getType();
if (isAir(type)) {
needAir--;
continue;
}
if (type == Material.LAVA || type == Material.STATIONARY_LAVA) {
player.sendMessage("§7[§6TP§7]§c Lava detected");
danger = true;
break;
}
if (type == Material.FIRE) {
player.sendMessage("§7[§6TP§7]§c Fire detected");
danger = true;
}
if (type == Material.TRAP_DOOR) {
player.sendMessage("§7[§6TP§7]§c Trapdoor detected");
danger = true;
break;
}
if (needAir > 0) {
player.sendMessage("§7[§6TP§7]§c Blocked location detected ("+b.getType().name()+")");
danger = true;
}
break; // Found a safe platform
}
if (isPVP(location)) {
player.sendMessage("§7[§6TP§7]§c Player vs Player (PVP) region detected");
danger = true;
}
if (danger == false) {
return player.teleport(location);
} else if (force == true) {
getLogger().info(player.getName() + " used the safety override");
player.sendMessage("§7[§6TP§7]§c Safety override (-force) is in effect");
return player.teleport(location);
}
return false;
}
private boolean isAir(Material type) {
boolean ret = !type.isSolid();
// exceptions
switch (type)
{
case WATER:
case STATIONARY_WATER:
case LAVA:
case STATIONARY_LAVA:
case PORTAL:
// Add more non-solid materials that do NOT allow breathing here...
ret = false;
break;
case PISTON_EXTENSION:
case PISTON_MOVING_PIECE:
case STEP:
case SIGN_POST:
case WOODEN_DOOR:
case WALL_SIGN:
case STONE_PLATE:
case IRON_DOOR_BLOCK:
case WOOD_PLATE:
case FENCE:
case CAKE_BLOCK:
case TRAP_DOOR:
case IRON_FENCE:
case THIN_GLASS:
case FENCE_GATE:
case NETHER_FENCE:
case BREWING_STAND:
case CAULDRON:
case WOOD_STEP:
case GOLD_PLATE:
case IRON_PLATE:
// Add more solid materials that do allow breathing here...
ret = true;
break;
default:
break;
}
return ret;
}
private String time_to_delta(String timestr) {
String[] parts = timestr.split(":");
Integer hh = Integer.parseInt(parts[0]);
Integer mm = Integer.parseInt(parts[1]);
// Clamp values
if (hh < 0) { hh += 24; }
if (hh > 23) { hh -= 24; }
if (mm < 0) { mm += 60; }
if (mm > 59) { hh -= 60; }
// Use Calendar to calculate the difference
Calendar now = Calendar.getInstance();
Calendar then = Calendar.getInstance();
then.set(Calendar.HOUR_OF_DAY, hh);
then.set(Calendar.MINUTE, mm);
long ms = now.getTimeInMillis() - then.getTimeInMillis();
//getLogger().info("The time is now "+now);
//getLogger().info("The time you requested (hours="+hh+", minutes="+mm+") I interpret as "+then);
//getLogger().info("The difference is "+ms+" milliseconds");
if (ms < 0) {
// Requested time is in the future so we must assume the user means yesterday
//getLogger().info("No, that's in the future, let's go one day back.");
then.add(Calendar.DATE, -1);
//getLogger().info("The time you requested (hours="+hh+", minutes="+mm+") I interpret as "+then);
ms = now.getTimeInMillis() - then.getTimeInMillis();
//getLogger().info("The difference is "+ms+" milliseconds");
}
// Copnvert from milliseconds to minutes and return to caller
Integer delta = (int) ms/(1000*60);
//getLogger().info("Or "+delta+" minutes");
return delta.toString();
}
private Integer numOptions(String[] array) {
Integer options = 0;
for (String str : array) {
if (str.startsWith("--")) { options++; }
}
return options;
}
private void respond(Player p, String msg) {
if (p != null) {
p.sendMessage(msg);
} else {
Server server = getServer();
ConsoleCommandSender console = server.getConsoleSender();
console.sendMessage(msg);
}
}
private boolean isPVP(Location loc) {
if (worldguard == null) { return false; }
RegionManager regionManager = worldguard.getRegionContainer().get(loc.getWorld());
ApplicableRegionSet set = regionManager.getApplicableRegions(loc);
return set.testState(null, DefaultFlag.PVP);
}
}
| Store warp folder path in a field | src/no/atc/floyd/bukkit/tele/TelePlugin.java | Store warp folder path in a field |
|
Java | mit | 2585c44785a0778459ef24b44a9aef1dfc3f2660 | 0 | tobiatesan/serleena-android,tobiatesan/serleena-android | ///////////////////////////////////////////////////////////////////////////////
//
// This file is part of Serleena.
//
// The MIT License (MIT)
//
// Copyright (C) 2015 Antonio Cavestro, Gabriele Pozzan, Matteo Lisotto,
// Nicola Mometto, Filippo Sestini, Tobia Tesan, Sebastiano Valle.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
///////////////////////////////////////////////////////////////////////////////
/**
* Name: SerleenaDataSourceIntegrationTest.java
* Package: com.kyloth.serleena.model;
* Author: Gabriele Pozzan
*
* History:
* Version Programmer Changes
* 1.0.0 Gabriele Pozzan Creazione file scrittura
* codice e documentazione Javadoc
*/
package com.kyloth.serleena.model;
import android.content.ContentValues;
import android.database.sqlite.SQLiteDatabase;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Base64;
import com.kyloth.serleena.BuildConfig;
import com.kyloth.serleena.TestDB;
import com.kyloth.serleena.common.EmergencyContact;
import com.kyloth.serleena.common.GeoPoint;
import com.kyloth.serleena.common.IQuadrant;
import com.kyloth.serleena.common.NoSuchWeatherForecastException;
import com.kyloth.serleena.common.Quadrant;
import com.kyloth.serleena.persistence.NoSuchQuadrantException;
import com.kyloth.serleena.persistence.WeatherForecastEnum;
import com.kyloth.serleena.persistence.sqlite.SerleenaDatabase;
import com.kyloth.serleena.persistence.sqlite.SerleenaSQLiteDataSource;
import com.kyloth.serleena.persistence.sqlite.TestFixtures;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricGradleTestRunner;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.annotation.Config;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Iterator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Contiene test di integrazione per le classi di persistenza.
*
* @author Gabriele Pozzan <[email protected]>
* @version 1.0.0
*/
@RunWith(RobolectricGradleTestRunner.class)
@Config(constants = BuildConfig.class, emulateSdk = 19)
public class SerleenaDataSourceIntegrationTest {
SQLiteDatabase db;
SerleenaDatabase serleenaDB;
SerleenaSQLiteDataSource serleenaSQLDS;
SerleenaDataSource dataSource;
/**
* Inizializza i campi dati necessari alla conduzione dei test.
*/
@Before
public void initialize() {
serleenaDB = new SerleenaDatabase(RuntimeEnvironment.application, 1);
db = serleenaDB.getWritableDatabase();
serleenaDB.onUpgrade(db, 1, 1);
ContentValues contacts_1 = TestFixtures.pack(TestFixtures.CONTACTS_FIXTURE_1);
db.insertOrThrow(SerleenaDatabase.TABLE_CONTACTS, null, contacts_1);
ContentValues contacts_2 = TestFixtures.pack(TestFixtures.CONTACTS_FIXTURE_2);
db.insertOrThrow(SerleenaDatabase.TABLE_CONTACTS, null, contacts_2);
ContentValues weather_1 = TestFixtures.pack(TestFixtures.WEATHER_FIXTURE);
db.insertOrThrow(SerleenaDatabase.TABLE_WEATHER_FORECASTS, null, weather_1);
ContentValues exp = TestFixtures.pack(TestFixtures.EXPERIENCES_FIXTURE_EXPERIENCE_1);
db.insertOrThrow(SerleenaDatabase.TABLE_EXPERIENCES, null, exp);
ContentValues values = TestFixtures.pack(TestFixtures.RASTER_FIXTURE);
db.insertOrThrow(SerleenaDatabase.TABLE_RASTERS, null, values);
serleenaSQLDS = new SerleenaSQLiteDataSource(serleenaDB);
dataSource = new SerleenaDataSource(serleenaSQLDS);
}
/**
* Verifica che il metodo getContacts resituisca la lista dei contatti di
* emergenza nelle vicinanze di un punto geografico fornito.
* Verifica inoltre che restituisca una lista vuota nel caso non ci fossero
* contatti nelle vicinanze del punto fornito.
*/
@Test
public void testGetContacts() {
Iterable<EmergencyContact> contacts = dataSource.getContacts(
TestFixtures.CONTACTS_FIXTURE_POINT_INSIDE_BOTH
);
Iterator<EmergencyContact> i_contacts = contacts.iterator();
String name1 = i_contacts.next().name();
String name2 = i_contacts.next().name();
assertTrue(
(
name1.equals(TestFixtures.CONTACTS_FIXTURE_1_NAME)
&& name2.equals(TestFixtures.CONTACTS_FIXTURE_2_NAME)
)
||
(
name2.equals(TestFixtures.CONTACTS_FIXTURE_1_NAME)
&& name1.equals(TestFixtures.CONTACTS_FIXTURE_2_NAME)
)
);
assertFalse(i_contacts.hasNext());
Iterable<EmergencyContact> void_contacts = dataSource.getContacts(TestFixtures.CONTACTS_FIXTURE_POINT_INSIDE_NEITHER);
Iterator<EmergencyContact> i_void_contacts = void_contacts.iterator();
assertFalse(i_void_contacts.hasNext());
}
/**
* Verifica che il metodo getWeatherInfo restituisca correttamente
* le informazioni meteo relative alla località e alla data fornite.
*/
@Test
public void testGetWeatherInfo() throws NoSuchWeatherForecastException {
WeatherForecast forecast = (WeatherForecast)
dataSource.getWeatherInfo(
TestFixtures.WEATHER_FIXTURE_POINT_INSIDE,
TestFixtures.WEATHER_FIXTURE_CAL.getTime()
);
assertTrue(forecast != null);
assertTrue(forecast.getAfternoonForecast() == WeatherForecastEnum.Cloudy);
}
/**
* Verifica che il metodo getExperiences restituisca correttamente
* la lista delle Esperienze salvate nel db.
*/
@Test
public void testGetExperiences() {
Iterable<IExperience> experiences = dataSource.getExperiences();
Iterator<IExperience> i_experiences = experiences.iterator();
Experience experience = (Experience) i_experiences.next();
assertTrue(experience.getName().equals(TestFixtures.EXPERIENCES_FIXTURE_EXPERIENCE_1_NAME));
}
/**
* Verifica che il metodo equals() di Track restituisca risultati corretti.
*/
@Test
public void testTrackEquals() {
SerleenaDatabase serleenaDb = TestDB.getEmptyDatabase();
SQLiteDatabase db = serleenaDb.getWritableDatabase();
TestDB.experienceQuery(db, 0, "experience");
TestDB.trackQuery(db, 0, "track", 0);
TestDB.checkpointQuery(db, 0, 1, 5, 5, 0);
TestDB.checkpointQuery(db, 1, 2, 6, 6, 0);
TestDB.telemetryQuery(db, 0, 0);
TestDB.telemetryQuery(db, 1, 0);
TestDB.checkPointEventQuery(db, 0, 100, 1, 0);
TestDB.checkPointEventQuery(db, 1, 200, 2, 0);
TestDB.checkPointEventQuery(db, 2, 500, 1, 1);
TestDB.checkPointEventQuery(db, 3, 600, 2, 1);
SerleenaDataSource dataSource = new SerleenaDataSource(
new SerleenaSQLiteDataSource(serleenaDb));
ITrack track1 = dataSource.getExperiences().iterator().next()
.getTracks().iterator().next();
ITrack track2 = dataSource.getExperiences().iterator().next()
.getTracks().iterator().next();
assertTrue(track1.equals(track2));
assertTrue(!track1.equals(null));
assertTrue(!track1.equals(new Object()));
}
}
| serleena/app/src/test/java/com/kyloth/serleena/model/SerleenaDataSourceIntegrationTest.java | ///////////////////////////////////////////////////////////////////////////////
//
// This file is part of Serleena.
//
// The MIT License (MIT)
//
// Copyright (C) 2015 Antonio Cavestro, Gabriele Pozzan, Matteo Lisotto,
// Nicola Mometto, Filippo Sestini, Tobia Tesan, Sebastiano Valle.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
///////////////////////////////////////////////////////////////////////////////
/**
* Name: SerleenaDataSourceIntegrationTest.java
* Package: com.kyloth.serleena.model;
* Author: Gabriele Pozzan
*
* History:
* Version Programmer Changes
* 1.0.0 Gabriele Pozzan Creazione file scrittura
* codice e documentazione Javadoc
*/
package com.kyloth.serleena.model;
import android.content.ContentValues;
import android.database.sqlite.SQLiteDatabase;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Base64;
import com.kyloth.serleena.BuildConfig;
import com.kyloth.serleena.TestDB;
import com.kyloth.serleena.common.EmergencyContact;
import com.kyloth.serleena.common.GeoPoint;
import com.kyloth.serleena.common.IQuadrant;
import com.kyloth.serleena.common.NoSuchWeatherForecastException;
import com.kyloth.serleena.common.Quadrant;
import com.kyloth.serleena.persistence.NoSuchQuadrantException;
import com.kyloth.serleena.persistence.WeatherForecastEnum;
import com.kyloth.serleena.persistence.sqlite.IRasterSource;
import com.kyloth.serleena.persistence.sqlite.SerleenaDatabase;
import com.kyloth.serleena.persistence.sqlite.SerleenaSQLiteDataSource;
import com.kyloth.serleena.persistence.sqlite.TestFixtures;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricGradleTestRunner;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.annotation.Config;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Iterator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Contiene test di integrazione per le classi di persistenza.
*
* @author Gabriele Pozzan <[email protected]>
* @version 1.0.0
*/
@RunWith(RobolectricGradleTestRunner.class)
@Config(constants = BuildConfig.class, emulateSdk = 19)
public class SerleenaDataSourceIntegrationTest {
SQLiteDatabase db;
SerleenaDatabase serleenaDB;
SerleenaSQLiteDataSource serleenaSQLDS;
SerleenaDataSource dataSource;
/**
* Inizializza i campi dati necessari alla conduzione dei test.
*/
@Before
public void initialize() {
serleenaDB = new SerleenaDatabase(RuntimeEnvironment.application, 1);
db = serleenaDB.getWritableDatabase();
serleenaDB.onUpgrade(db, 1, 1);
ContentValues contacts_1 = TestFixtures.pack(TestFixtures.CONTACTS_FIXTURE_1);
db.insertOrThrow(SerleenaDatabase.TABLE_CONTACTS, null, contacts_1);
ContentValues contacts_2 = TestFixtures.pack(TestFixtures.CONTACTS_FIXTURE_2);
db.insertOrThrow(SerleenaDatabase.TABLE_CONTACTS, null, contacts_2);
ContentValues weather_1 = TestFixtures.pack(TestFixtures.WEATHER_FIXTURE);
db.insertOrThrow(SerleenaDatabase.TABLE_WEATHER_FORECASTS, null, weather_1);
ContentValues exp = TestFixtures.pack(TestFixtures.EXPERIENCES_FIXTURE_EXPERIENCE_1);
db.insertOrThrow(SerleenaDatabase.TABLE_EXPERIENCES, null, exp);
ContentValues values = TestFixtures.pack(TestFixtures.RASTER_FIXTURE);
db.insertOrThrow(SerleenaDatabase.TABLE_RASTERS, null, values);
serleenaSQLDS = new SerleenaSQLiteDataSource(serleenaDB);
dataSource = new SerleenaDataSource(serleenaSQLDS);
}
/**
* Verifica che il metodo getContacts resituisca la lista dei contatti di
* emergenza nelle vicinanze di un punto geografico fornito.
* Verifica inoltre che restituisca una lista vuota nel caso non ci fossero
* contatti nelle vicinanze del punto fornito.
*/
@Test
public void testGetContacts() {
Iterable<EmergencyContact> contacts = dataSource.getContacts(
TestFixtures.CONTACTS_FIXTURE_POINT_INSIDE_BOTH
);
Iterator<EmergencyContact> i_contacts = contacts.iterator();
String name1 = i_contacts.next().name();
String name2 = i_contacts.next().name();
assertTrue(
(
name1.equals(TestFixtures.CONTACTS_FIXTURE_1_NAME)
&& name2.equals(TestFixtures.CONTACTS_FIXTURE_2_NAME)
)
||
(
name2.equals(TestFixtures.CONTACTS_FIXTURE_1_NAME)
&& name1.equals(TestFixtures.CONTACTS_FIXTURE_2_NAME)
)
);
assertFalse(i_contacts.hasNext());
Iterable<EmergencyContact> void_contacts = dataSource.getContacts(TestFixtures.CONTACTS_FIXTURE_POINT_INSIDE_NEITHER);
Iterator<EmergencyContact> i_void_contacts = void_contacts.iterator();
assertFalse(i_void_contacts.hasNext());
}
/**
* Verifica che il metodo getWeatherInfo restituisca correttamente
* le informazioni meteo relative alla località e alla data fornite.
*/
@Test
public void testGetWeatherInfo() throws NoSuchWeatherForecastException {
WeatherForecast forecast = (WeatherForecast)
dataSource.getWeatherInfo(
TestFixtures.WEATHER_FIXTURE_POINT_INSIDE,
TestFixtures.WEATHER_FIXTURE_CAL.getTime()
);
assertTrue(forecast != null);
assertTrue(forecast.getAfternoonForecast() == WeatherForecastEnum.Cloudy);
}
/**
* Verifica che il metodo getExperiences restituisca correttamente
* la lista delle Esperienze salvate nel db.
*/
@Test
public void testGetExperiences() {
Iterable<IExperience> experiences = dataSource.getExperiences();
Iterator<IExperience> i_experiences = experiences.iterator();
Experience experience = (Experience) i_experiences.next();
assertTrue(experience.getName().equals(TestFixtures.EXPERIENCES_FIXTURE_EXPERIENCE_1_NAME));
}
/**
* Verifica che il metodo equals() di Track restituisca risultati corretti.
*/
@Test
public void testTrackEquals() {
SerleenaDatabase serleenaDb = TestDB.getEmptyDatabase();
SQLiteDatabase db = serleenaDb.getWritableDatabase();
TestDB.experienceQuery(db, 0, "experience");
TestDB.trackQuery(db, 0, "track", 0);
TestDB.checkpointQuery(db, 0, 1, 5, 5, 0);
TestDB.checkpointQuery(db, 1, 2, 6, 6, 0);
TestDB.telemetryQuery(db, 0, 0);
TestDB.telemetryQuery(db, 1, 0);
TestDB.checkPointEventQuery(db, 0, 100, 1, 0);
TestDB.checkPointEventQuery(db, 1, 200, 2, 0);
TestDB.checkPointEventQuery(db, 2, 500, 1, 1);
TestDB.checkPointEventQuery(db, 3, 600, 2, 1);
// TODO: Cos'e'? Come fa a funzionare?
SerleenaDataSource dataSource = new SerleenaDataSource(
new SerleenaSQLiteDataSource(serleenaDb));
ITrack track1 = dataSource.getExperiences().iterator().next()
.getTracks().iterator().next();
ITrack track2 = dataSource.getExperiences().iterator().next()
.getTracks().iterator().next();
assertTrue(track1.equals(track2));
}
private void putQuadrant(double nwLat, double nwLon, double seLat,
double seLon, String base64, long expId) {
ContentValues values = new ContentValues();
values.put("raster_nw_corner_latitude", nwLat);
values.put("raster_nw_corner_longitude", nwLon);
values.put("raster_se_corner_latitude", seLat);
values.put("raster_se_corner_longitude", seLon);
values.put("raster_base64", base64);
values.put("raster_experience", expId);
db.insertOrThrow(SerleenaDatabase.TABLE_RASTERS, null, values);
}
public boolean bitmapEquals(Bitmap first, Bitmap second) {
if (first.getWidth() == second.getWidth() &&
first.getHeight() == second.getHeight() &&
first.getConfig().equals(second.getConfig())) {
boolean b = true;
for (int i = 0; i < first.getWidth() && b; i++)
for (int j = 0; j < first.getHeight() && b; j++)
b = b && (first.getPixel(i, j) == second.getPixel(i, j));
return b;
}
return false;
}
}
| MODEL/TEST: Aggiungi test di equals in SerleenaDataSourceIntegrationTest
| serleena/app/src/test/java/com/kyloth/serleena/model/SerleenaDataSourceIntegrationTest.java | MODEL/TEST: Aggiungi test di equals in SerleenaDataSourceIntegrationTest |
|
Java | epl-1.0 | 095c87da7d565dac2cffa11710e7dd0e4a67c20f | 0 | opendaylight/bgpcep,inocybe/odl-bgpcep,inocybe/odl-bgpcep | /*
* Copyright (c) 2015 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.protocol.bgp.rib.impl;
import com.google.common.base.Preconditions;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import javax.annotation.concurrent.ThreadSafe;
import org.opendaylight.yangtools.yang.binding.Notification;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A best-effort output limiter. It does not provide any fairness, and acts as a blocking gate-keeper
* for a sessions' channel.
*/
@ThreadSafe
final class ChannelOutputLimiter extends ChannelInboundHandlerAdapter {
private static final Logger LOG = LoggerFactory.getLogger(ChannelOutputLimiter.class);
private final BGPSessionImpl session;
private volatile boolean blocked;
ChannelOutputLimiter(final BGPSessionImpl session) {
this.session = Preconditions.checkNotNull(session);
}
private void ensureWritable() {
if (blocked) {
LOG.trace("Blocked slow path tripped on session {}", session);
synchronized (this) {
while (blocked) {
try {
LOG.debug("Waiting for session {} to become writable", session);
this.wait();
} catch (InterruptedException e) {
throw new IllegalStateException("Interrupted while waiting for channel to come back", e);
}
}
LOG.debug("Resuming write on session {}", session);
}
}
}
void write(final Notification msg) {
ensureWritable();
session.write(msg);
}
void writeAndFlush(final Notification msg) {
ensureWritable();
session.writeAndFlush(msg);
}
void flush() {
session.flush();
}
@Override
public void channelWritabilityChanged(final ChannelHandlerContext ctx) throws Exception {
final boolean w = ctx.channel().isWritable();
synchronized (this) {
blocked = !w;
LOG.debug("Writes on session {} {}", session, w ? "unblocked" : "blocked");
if (w) {
this.notifyAll();
} else {
flush();
}
}
super.channelWritabilityChanged(ctx);
}
@Override
public void channelInactive(final ChannelHandlerContext ctx) throws Exception {
synchronized (this) {
blocked = false;
this.notifyAll();
}
super.channelInactive(ctx);
}
}
| bgp/rib-impl/src/main/java/org/opendaylight/protocol/bgp/rib/impl/ChannelOutputLimiter.java | /*
* Copyright (c) 2015 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.protocol.bgp.rib.impl;
import com.google.common.base.Preconditions;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import javax.annotation.concurrent.ThreadSafe;
import org.opendaylight.yangtools.yang.binding.Notification;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A best-effort output limiter. It does not provide any fairness, and acts as a blocking gate-keeper
* for a sessions' channel.
*/
@ThreadSafe
final class ChannelOutputLimiter extends ChannelInboundHandlerAdapter {
private static final Logger LOG = LoggerFactory.getLogger(ChannelOutputLimiter.class);
private final BGPSessionImpl session;
private volatile boolean blocked;
ChannelOutputLimiter(final BGPSessionImpl session) {
this.session = Preconditions.checkNotNull(session);
}
private void ensureWritable() {
if (blocked) {
LOG.trace("Blocked slow path tripped on session {}", session);
synchronized (this) {
while (blocked) {
try {
LOG.debug("Waiting for session {} to become writable", session);
this.wait();
} catch (InterruptedException e) {
throw new IllegalStateException("Interrupted while waiting for channel to come back", e);
}
}
LOG.debug("Resuming write on session {}", session);
}
}
}
void write(final Notification msg) {
ensureWritable();
session.write(msg);
}
void writeAndFlush(final Notification msg) {
ensureWritable();
session.writeAndFlush(msg);
}
void flush() {
session.flush();
}
@Override
public void channelWritabilityChanged(final ChannelHandlerContext ctx) throws Exception {
final boolean w = ctx.channel().isWritable();
synchronized (this) {
blocked = !w;
LOG.debug("Writes on session {} {}", session, w ? "unblocked" : "blocked");
if (w) {
this.notifyAll();
}
}
super.channelWritabilityChanged(ctx);
}
@Override
public void channelInactive(final ChannelHandlerContext ctx) throws Exception {
synchronized (this) {
blocked = false;
this.notifyAll();
}
super.channelInactive(ctx);
}
}
| BUG-4689: Advertising performance fix II
When AdjRibOutListener get a notification with too many changes,
it can fill output buffer with wrtitten Update messages,
without a chance to flush a socket, because the AdjRibOutListener
is blocked when channel is not writable - deadlock situation until a
savior (KA message) will not be send.
Flushing with every write operation is uneffective, hence try to call
the "flush" in a situation when the channel become unwritable.
Change-Id: Ie50b9154d1166d55308c3438c12b9c1b9d3ce19f
Signed-off-by: Milos Fabian <[email protected]>
| bgp/rib-impl/src/main/java/org/opendaylight/protocol/bgp/rib/impl/ChannelOutputLimiter.java | BUG-4689: Advertising performance fix II |
|
Java | epl-1.0 | c12861495b2113dbe60473b4b96706b152d91339 | 0 | eveCSS/eveCSS,eveCSS/eveCSS | package de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite;
import org.apache.log4j.Logger;
import org.eclipse.core.databinding.Binding;
import org.eclipse.core.databinding.DataBindingContext;
import org.eclipse.core.databinding.UpdateValueStrategy;
import org.eclipse.core.databinding.beans.BeansObservables;
import org.eclipse.core.databinding.observable.value.IObservableValue;
import org.eclipse.jface.databinding.fieldassist.ControlDecorationSupport;
import org.eclipse.jface.databinding.swt.SWTObservables;
import org.eclipse.jface.databinding.viewers.ViewersObservables;
import org.eclipse.jface.viewers.ISelectionProvider;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.custom.SashForm;
import org.eclipse.swt.custom.ScrolledComposite;
import org.eclipse.swt.events.FocusAdapter;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IMemento;
import org.eclipse.ui.ISharedImages;
import org.eclipse.ui.PlatformUI;
import de.ptb.epics.eve.data.EventImpacts;
import de.ptb.epics.eve.data.scandescription.ControlEvent;
import de.ptb.epics.eve.data.scandescription.ScanModule;
import de.ptb.epics.eve.data.scandescription.ScanModuleTypes;
import de.ptb.epics.eve.data.scandescription.errors.AxisError;
import de.ptb.epics.eve.data.scandescription.errors.ChannelError;
import de.ptb.epics.eve.data.scandescription.errors.IModelError;
import de.ptb.epics.eve.data.scandescription.errors.PlotWindowError;
import de.ptb.epics.eve.data.scandescription.errors.PositioningError;
import de.ptb.epics.eve.data.scandescription.errors.PostscanError;
import de.ptb.epics.eve.data.scandescription.errors.PrescanError;
import de.ptb.epics.eve.data.scandescription.updatenotification.ControlEventTypes;
import de.ptb.epics.eve.data.scandescription.updatenotification.IModelUpdateListener;
import de.ptb.epics.eve.data.scandescription.updatenotification.ModelUpdateEvent;
import de.ptb.epics.eve.editor.Activator;
import de.ptb.epics.eve.editor.views.eventcomposite.EventComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.ScanModuleSelectionProvider;
import de.ptb.epics.eve.editor.views.scanmoduleview.ScanModuleView;
import de.ptb.epics.eve.editor.views.scanmoduleview.ScanModuleViewComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite.detectorchannelcomposite.DetectorChannelComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite.motoraxiscomposite.MotorAxisComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite.plotcomposite.PlotComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite.positioningcomposite.PositioningComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite.postscancomposite.PostscanComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite.prescancomposite.PrescanComposite;
import de.ptb.epics.eve.util.ui.swt.TextSelectAllFocusListener;
import de.ptb.epics.eve.util.ui.swt.TextSelectAllMouseListener;
/**
* @author Marcus Michalsky
* @since 1.31
*/
public class ClassicComposite extends ScanModuleViewComposite implements IModelUpdateListener {
private static final Logger LOGGER = Logger.getLogger(
ClassicComposite.class.getName());
private static final String MEMENTO_ACTIONS_COMPOSITE_MAXIMIZED =
"actionsCompositeMaximized";
private static final String MEMENTO_EVENTS_COMPOSITE_MAXIMIZED =
"eventsCompositeMaximized";
private static final String MEMENTO_ACTIONS_COMPOSITE_WEIGHT =
"actionsCompositeWeight";
private static final String MEMENTO_EVENTS_COMPOSITE_WEIGHT =
"eventsCompositeWeight";
private static final String MEMENTO_AXES_SORT_STATE =
"AxesSortState";
private static final String MEMENTO_CHANNEL_SORT_STATE =
"ChannelSortState";
private static final String MEMENTO_ACTIONS_TAB_FOLDER_SELECTION_INDEX =
"actionsTabFolderSelectionIndex";
private static final String MEMENTO_EVENTS_TAB_FOLDER_SELECTION_INDEX =
"eventsTabFolderSelectionIndex";
private ScanModuleView parentView;
private ScanModule currentScanModule;
ScrolledComposite sc;
Composite top;
// general composite
private Text valueCountText;
private Text triggerDelayText;
private Text settleTimeText;
private Button triggerConfirmAxisCheckBox;
private Button triggerConfirmChannelCheckBox;
private Binding valueCountBinding;
private Binding triggerDelayBinding;
private Binding settleTimeBinding;
private SashForm actionEventSashForm;
// actions composite
private Composite actionsComposite;
private Label actionMaxIcon;
private boolean actionsCompositeMaximized;
private CTabFolder actionsTabFolder;
private CTabItem motorAxisTab;
private CTabItem detectorChannelTab;
private CTabItem prescanTab;
private CTabItem postscanTab;
private CTabItem positioningTab;
private CTabItem plotTab;
private ActionComposite motorAxisComposite;
private ActionComposite detectorChannelComposite;
private ActionComposite prescanComposite;
private ActionComposite postscanComposite;
private ActionComposite positioningComposite;
private ActionComposite plotComposite;
// events composite
private Composite eventsComposite;
private Label eventMaxIcon;
private boolean eventsCompositeMaximized;
public CTabFolder eventsTabFolder;
private CTabItem pauseEventsTabItem;
private CTabItem redoEventsTabItem;
private CTabItem breakEventsTabItem;
private CTabItem triggerEventsTabItem;
private EventComposite pauseEventComposite;
private EventComposite redoEventComposite;
private EventComposite breakEventComposite;
private EventComposite triggerEventComposite;
private Image restoreIcon;
private Image maximizeIcon;
public ClassicComposite(ScanModuleView parentView, Composite parent, int style) {
super(parentView, parent, style);
this.parentView = parentView;
this.restoreIcon = Activator.getDefault().getImageRegistry()
.get("RESTORE");
this.maximizeIcon = Activator.getDefault().getImageRegistry()
.get("MAXIMIZE");
this.setLayout(new FillLayout());
this.sc = new ScrolledComposite(this, SWT.V_SCROLL);
this.top = new Composite(sc, SWT.NONE);
GridLayout gridLayout = new GridLayout();
this.top.setLayout(gridLayout);
sc.setExpandHorizontal(true);
sc.setExpandVertical(true);
sc.setContent(top);
this.createGeneralComposite(top);
this.actionEventSashForm = new SashForm(top, SWT.VERTICAL);
this.actionEventSashForm.SASH_WIDTH = 4;
GridData gridData = new GridData();
gridData.horizontalAlignment = GridData.FILL;
gridData.verticalAlignment = GridData.FILL;
gridData.grabExcessHorizontalSpace = true;
gridData.grabExcessVerticalSpace = true;
this.actionEventSashForm.setLayoutData(gridData);
this.createActionsComposite(this.actionEventSashForm);
this.createEventsComposite(this.actionEventSashForm);
sc.setMinSize(SWT.DEFAULT, SWT.DEFAULT);
this.bindValues();
}
private void createGeneralComposite(Composite parent) {
Composite generalComposite = new Composite(parent, SWT.BORDER);
GridLayout gridLayout = new GridLayout();
gridLayout.numColumns = 2;
generalComposite.setLayout(gridLayout);
GridData gridData = new GridData();
gridData.grabExcessHorizontalSpace = true;
gridData.horizontalAlignment = GridData.FILL;
generalComposite.setLayoutData(gridData);
Label valueCountLabel = new Label(generalComposite, SWT.NONE);
valueCountLabel.setText("No of Measurements:");
valueCountLabel.setToolTipText(
"Number of Measurements taken for each motor position");
this.valueCountText = new Text(generalComposite, SWT.BORDER);
gridData = new GridData();
gridData.horizontalAlignment = GridData.FILL;
gridData.verticalAlignment = GridData.CENTER;
gridData.horizontalIndent = 7;
gridData.grabExcessHorizontalSpace = true;
this.valueCountText.setLayoutData(gridData);
this.valueCountText.addFocusListener(
new TextSelectAllFocusListener(this.valueCountText));
this.valueCountText.addMouseListener(
new TextSelectAllMouseListener(this.valueCountText));
this.valueCountText.addFocusListener(new TextFocusListener(
this.valueCountText));
Label triggerDelayLabel = new Label(generalComposite, SWT.NONE);
triggerDelayLabel.setText("Trigger Delay (in s):");
triggerDelayLabel.setToolTipText("delay in s before detectors are triggered");
this.triggerDelayText = new Text(generalComposite, SWT.BORDER);
gridData = new GridData();
gridData.horizontalAlignment = GridData.FILL;
gridData.verticalAlignment = GridData.CENTER;
gridData.horizontalIndent = 7;
gridData.grabExcessHorizontalSpace = true;
this.triggerDelayText.setLayoutData(gridData);
this.triggerDelayText.addFocusListener(new TextSelectAllFocusListener(
this.triggerDelayText));
this.triggerDelayText.addMouseListener(new TextSelectAllMouseListener(
this.triggerDelayText));
this.triggerDelayText.addFocusListener(new TextFocusListener(
this.triggerDelayText));
Label settleTimeLabel = new Label(generalComposite, SWT.NONE);
settleTimeLabel.setText("Settle Time (in s):");
settleTimeLabel.setToolTipText(
"Delay time after first positioning in the scan module");
this.settleTimeText = new Text(generalComposite, SWT.BORDER);
gridData = new GridData();
gridData.horizontalAlignment = GridData.FILL;
gridData.verticalAlignment = GridData.CENTER;
gridData.horizontalIndent = 7;
gridData.grabExcessHorizontalSpace = true;
this.settleTimeText.setLayoutData(gridData);
this.settleTimeText.addFocusListener(new TextSelectAllFocusListener(
this.settleTimeText));
this.settleTimeText.addMouseListener(new TextSelectAllMouseListener(
this.settleTimeText));
this.settleTimeText.addFocusListener(new TextFocusListener(
this.settleTimeText));
Label triggerLabel = new Label(generalComposite, SWT.NONE);
triggerLabel.setText("Manual Trigger:");
triggerLabel.setLayoutData(new GridData());
Composite triggerCheckBoxes = new Composite(generalComposite,
SWT.NONE);
gridData = new GridData();
gridData.horizontalIndent = 7;
triggerCheckBoxes.setLayoutData(gridData);
triggerCheckBoxes.setLayout(new FillLayout(SWT.HORIZONTAL));
this.triggerConfirmAxisCheckBox = new Button(triggerCheckBoxes,
SWT.CHECK);
this.triggerConfirmAxisCheckBox.setText("Motors");
this.triggerConfirmChannelCheckBox = new Button(triggerCheckBoxes,
SWT.CHECK);
this.triggerConfirmChannelCheckBox.setText("Detectors");
this.triggerDelayText.addFocusListener(new TextFocusListener(
this.triggerDelayText));
}
private void createActionsComposite(Composite parent) {
this.actionsComposite = new Composite(parent, SWT.BORDER);
this.actionsComposite.setLayout(new GridLayout(2, false));
this.actionMaxIcon = new Label(this.actionsComposite, SWT.NONE);
this.actionMaxIcon.setImage(maximizeIcon);
this.actionMaxIcon.addMouseListener(new MouseAdapter() {
@Override
public void mouseUp(MouseEvent e) {
if (actionsCompositeMaximized) {
actionMaxIcon.setImage(maximizeIcon);
actionMaxIcon.getParent().layout();
actionEventSashForm.setMaximizedControl(null);
actionsCompositeMaximized = false;
} else {
actionMaxIcon.setImage(restoreIcon);
actionEventSashForm.setMaximizedControl(actionsComposite);
actionsCompositeMaximized = true;
}
}
});
Label actionsLabel = new Label(this.actionsComposite, SWT.NONE);
actionsLabel.setText("Actions:");
this.actionsTabFolder = new CTabFolder(this.actionsComposite, SWT.FLAT);
this.actionsTabFolder.setSimple(true);
this.actionsTabFolder.setBorderVisible(true);
GridData gridData = new GridData();
gridData.horizontalAlignment = GridData.FILL;
gridData.verticalAlignment = GridData.FILL;
gridData.grabExcessHorizontalSpace = true;
gridData.grabExcessVerticalSpace = true;
gridData.horizontalSpan = 2;
actionsTabFolder.setLayoutData(gridData);
this.motorAxisComposite = new MotorAxisComposite(
this.parentView, actionsTabFolder, SWT.NONE);
this.detectorChannelComposite = new DetectorChannelComposite(
this.parentView, actionsTabFolder, SWT.NONE);
this.prescanComposite = new PrescanComposite(
this.parentView, actionsTabFolder, SWT.NONE);
this.postscanComposite = new PostscanComposite(
this.parentView, actionsTabFolder, SWT.NONE);
this.positioningComposite = new PositioningComposite(
this.parentView, actionsTabFolder, SWT.NONE);
this.plotComposite = new PlotComposite(
this.parentView, actionsTabFolder, SWT.NONE);
this.motorAxisTab = new CTabItem(actionsTabFolder, SWT.FLAT);
this.motorAxisTab.setText(" Motor Axes ");
this.motorAxisTab.setToolTipText(
"Select motor axes to be used in this scan module");
this.motorAxisTab.setControl(motorAxisComposite);
this.detectorChannelTab = new CTabItem(this.actionsTabFolder, SWT.FLAT);
this.detectorChannelTab.setText(" Detector Channels ");
this.detectorChannelTab
.setToolTipText("Select detector channels to be used in this scan module");
this.detectorChannelTab.setControl(this.detectorChannelComposite);
this.prescanTab = new CTabItem(this.actionsTabFolder, SWT.FLAT);
this.prescanTab.setText(" Prescan ");
this.prescanTab
.setToolTipText("Action to do before scan module is started");
this.prescanTab.setControl(this.prescanComposite);
this.postscanTab = new CTabItem(this.actionsTabFolder, SWT.FLAT);
this.postscanTab.setText(" Postscan ");
this.postscanTab.setToolTipText("Action to do if scan module is done");
this.postscanTab.setControl(this.postscanComposite);
this.positioningTab = new CTabItem(this.actionsTabFolder, SWT.FLAT);
this.positioningTab.setText(" Positioning ");
this.positioningTab
.setToolTipText("Move motor to calculated position after scan module is done");
this.positioningTab.setControl(this.positioningComposite);
this.plotTab = new CTabItem(this.actionsTabFolder, SWT.FLAT);
this.plotTab.setText(" Plot ");
this.plotTab.setToolTipText("Plot settings for this scan module");
this.plotTab.setControl(this.plotComposite);
this.actionsCompositeMaximized = false;
}
private void createEventsComposite(Composite parent) {
this.eventsComposite = new Composite(parent, SWT.BORDER);
this.eventsComposite.setLayout(new GridLayout(2, false));
this.eventMaxIcon = new Label(eventsComposite, SWT.NONE);
this.eventMaxIcon.setImage(maximizeIcon);
this.eventMaxIcon.addMouseListener(new MouseAdapter() {
@Override
public void mouseUp(MouseEvent e) {
if (eventsCompositeMaximized) {
eventMaxIcon.setImage(maximizeIcon);
eventMaxIcon.getParent().layout();
actionEventSashForm.setMaximizedControl(null);
eventsCompositeMaximized = false;
} else {
eventMaxIcon.setImage(restoreIcon);
actionEventSashForm.setMaximizedControl(eventsComposite);
eventsCompositeMaximized = true;
}
}
});
Label eventLabel = new Label(eventsComposite, SWT.NONE);
eventLabel.setText("Events:");
this.eventsTabFolder = new CTabFolder(this.eventsComposite, SWT.NONE);
this.eventsTabFolder.setSimple(true);
this.eventsTabFolder.setBorderVisible(true);
eventsTabFolder.addSelectionListener(
new EventsTabFolderSelectionListener());
GridData gridData = new GridData();
gridData.horizontalAlignment = GridData.FILL;
gridData.verticalAlignment = GridData.FILL;
gridData.grabExcessHorizontalSpace = true;
gridData.grabExcessVerticalSpace = true;
gridData.horizontalSpan = 2;
this.eventsTabFolder.setLayoutData(gridData);
pauseEventComposite = new EventComposite(eventsTabFolder, SWT.NONE,
ControlEventTypes.PAUSE_EVENT, this.parentView);
redoEventComposite = new EventComposite(eventsTabFolder, SWT.NONE,
ControlEventTypes.CONTROL_EVENT, this.parentView);
breakEventComposite = new EventComposite(eventsTabFolder, SWT.NONE,
ControlEventTypes.CONTROL_EVENT, this.parentView);
triggerEventComposite = new EventComposite(eventsTabFolder, SWT.NONE,
ControlEventTypes.CONTROL_EVENT, this.parentView);
this.pauseEventsTabItem = new CTabItem(eventsTabFolder, SWT.NONE);
this.pauseEventsTabItem.setText(" Pause ");
this.pauseEventsTabItem
.setToolTipText("Configure event to pause and resume this scan module");
this.pauseEventsTabItem.setControl(pauseEventComposite);
this.redoEventsTabItem = new CTabItem(eventsTabFolder, SWT.NONE);
this.redoEventsTabItem.setText(" Redo ");
this.redoEventsTabItem
.setToolTipText("Repeat the last acquisition, if redo event occurs");
this.redoEventsTabItem.setControl(redoEventComposite);
this.breakEventsTabItem = new CTabItem(eventsTabFolder, SWT.NONE);
this.breakEventsTabItem.setText(" Skip ");
this.breakEventsTabItem
.setToolTipText("Finish this scan module and continue with next");
this.breakEventsTabItem.setControl(breakEventComposite);
this.triggerEventsTabItem = new CTabItem(eventsTabFolder, SWT.NONE);
this.triggerEventsTabItem.setText(" Trigger ");
this.triggerEventsTabItem
.setToolTipText("Wait for trigger event before moving to next position");
this.triggerEventsTabItem.setControl(triggerEventComposite);
this.eventsCompositeMaximized = false;
}
private void bindValues() {
DataBindingContext context = new DataBindingContext();
ISelectionProvider selectionProvider = new ScanModuleSelectionProvider(
ScanModuleTypes.CLASSIC);
IObservableValue selectionObservable = ViewersObservables
.observeSingleSelection(selectionProvider);
IObservableValue valueCountTargetObservable = SWTObservables
.observeText(this.valueCountText, SWT.Modify);
IObservableValue valueCountModelObservable = BeansObservables.observeDetailValue(
selectionObservable, ScanModule.class,
ScanModule.VALUE_COUNT_PROP, Integer.class);
UpdateValueStrategy valueCountTargetToModelStrategy =
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE);
valueCountTargetToModelStrategy
.setAfterGetValidator(new ValueCountValidator());
valueCountTargetToModelStrategy.setConverter(new ValueCountConverter());
this.valueCountBinding = context.bindValue(valueCountTargetObservable,
valueCountModelObservable, valueCountTargetToModelStrategy,
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE));
ControlDecorationSupport.create(this.valueCountBinding, SWT.LEFT);
IObservableValue triggerDelayTargetObservable = SWTObservables.observeText(
this.triggerDelayText, SWT.Modify);
IObservableValue triggerDelayModelObservable = BeansObservables.observeDetailValue(
selectionObservable, ScanModule.class,
ScanModule.TRIGGER_DELAY_PROP, Double.class);
UpdateValueStrategy triggerDelayTargetToModelStrategy =
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE);
triggerDelayTargetToModelStrategy
.setAfterGetValidator(new TriggerDelaySettleTimeValidator("Settle Time"));
triggerDelayTargetToModelStrategy
.setConverter(new TriggerDelaySettleTimeConverter());
UpdateValueStrategy triggerDelayModelToTargetStrategy =
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE);
triggerDelayModelToTargetStrategy
.setConverter(new ModelToTargetConverter());
this.triggerDelayBinding = context.bindValue(
triggerDelayTargetObservable, triggerDelayModelObservable,
triggerDelayTargetToModelStrategy,
triggerDelayModelToTargetStrategy);
ControlDecorationSupport.create(this.triggerDelayBinding, SWT.LEFT);
IObservableValue settleTimeTargetObservable = SWTObservables.observeText(
this.settleTimeText, SWT.Modify);
IObservableValue settleTimeModelObservable = BeansObservables.observeDetailValue(
selectionObservable, ScanModule.class,
ScanModule.SETTLE_TIME_PROP, Double.class);
UpdateValueStrategy settleTimeTargetToModelStrategy =
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE);
UpdateValueStrategy settleTimeModelToTargetStrategy =
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE);
settleTimeModelToTargetStrategy
.setConverter(new ModelToTargetConverter());
settleTimeTargetToModelStrategy
.setAfterGetValidator(new TriggerDelaySettleTimeValidator(
"Settle Time"));
settleTimeTargetToModelStrategy
.setConverter(new TriggerDelaySettleTimeConverter());
this.settleTimeBinding = context.bindValue(settleTimeTargetObservable,
settleTimeModelObservable, settleTimeTargetToModelStrategy,
settleTimeModelToTargetStrategy);
ControlDecorationSupport.create(this.settleTimeBinding, SWT.LEFT);
IObservableValue axisTriggerTargetObservable = SWTObservables
.observeSelection(this.triggerConfirmAxisCheckBox);
IObservableValue axisTriggerModelObservable = BeansObservables.observeDetailValue(
selectionObservable, ScanModule.class,
ScanModule.TRIGGER_CONFIRM_AXIS_PROP, Boolean.class);
Binding axisTriggerBinding = context.bindValue(
axisTriggerTargetObservable, axisTriggerModelObservable,
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE),
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE));
axisTriggerBinding.getClass();
IObservableValue channelTriggerTargetObservable = SWTObservables
.observeSelection(this.triggerConfirmChannelCheckBox);
IObservableValue channelTriggerModelObservable = BeansObservables
.observeDetailValue(selectionObservable, ScanModule.class,
ScanModule.TRIGGER_CONFIRM_CHANNEL_PROP, Boolean.class);
Binding channelTriggerBinding = context.bindValue(
channelTriggerTargetObservable, channelTriggerModelObservable,
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE),
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE));
channelTriggerBinding.getClass();
}
/**
* {@inheritDoc}
*/
@Override
protected ScanModuleTypes getType() {
return ScanModuleTypes.CLASSIC;
}
/**
* {@inheritDoc}
*/
@Override
protected void setScanModule(ScanModule scanModule) {
LOGGER.debug("ClassicComposite#setScanModule: " + scanModule);
// if there was a scan module shown before, stop listening to changes
if (this.currentScanModule != null) {
this.currentScanModule.removeModelUpdateListener(this);
}
// set the new scan module as the current one
this.currentScanModule = scanModule;
// tell the action composites about the change
this.motorAxisComposite.setScanModule(scanModule);
this.detectorChannelComposite.setScanModule(scanModule);
this.prescanComposite.setScanModule(scanModule);
this.postscanComposite.setScanModule(scanModule);
this.positioningComposite.setScanModule(scanModule);
this.plotComposite.setScanModule(scanModule);
if (this.currentScanModule != null) {
// new scan module
this.currentScanModule.addModelUpdateListener(this);
if (this.eventsTabFolder.getSelection() == null) {
this.eventsTabFolder.setSelection(0);
}
int selectionIndex = this.actionsTabFolder.getSelectionIndex();
if (selectionIndex == -1) {
this.actionsTabFolder.setSelection(0);
} else {
this.actionsTabFolder.setSelection(selectionIndex);
}
sc.setMinSize(this.top.getBounds().width +
this.top.getBounds().x,
this.top.getBounds().height +
this.top.getBounds().y);
} else {
// no scan module selected -> reset contents
this.parentView.setSelectionProvider(null);
}
updateEvent(null);
this.layout();
}
/**
* {@inheritDoc}
*/
@Override
public void updateEvent(ModelUpdateEvent modelUpdateEvent) {
if (this.currentScanModule != null) {
this.triggerEventComposite.setEvents(this.currentScanModule,
EventImpacts.TRIGGER);
this.breakEventComposite.setEvents(this.currentScanModule,
EventImpacts.BREAK);
this.redoEventComposite.setEvents(this.currentScanModule,
EventImpacts.REDO);
this.pauseEventComposite.setEvents(this.currentScanModule,
EventImpacts.PAUSE);
checkForErrors();
} else {
triggerEventComposite.setEvents(this.currentScanModule, null);
breakEventComposite.setEvents(this.currentScanModule, null);
redoEventComposite.setEvents(this.currentScanModule, null);
pauseEventComposite.setEvents(this.currentScanModule, null);
}
}
private void checkForErrors() {
// check errors in Actions Tab
this.motorAxisTab.setImage(null);
this.detectorChannelTab.setImage(null);
this.prescanTab.setImage(null);
this.postscanTab.setImage(null);
this.positioningTab.setImage(null);
this.plotTab.setImage(null);
boolean motorAxisErrors = false;
boolean detectorChannelErrors = false;
boolean prescanErrors = false;
boolean postscanErrors = false;
boolean positioningErrors = false;
boolean plotWindowErrors = false;
for (IModelError error : this.currentScanModule.getModelErrors()) {
if (error instanceof AxisError) {
motorAxisErrors = true;
} else if (error instanceof ChannelError) {
detectorChannelErrors = true;
} else if (error instanceof PrescanError) {
prescanErrors = true;
} else if (error instanceof PostscanError) {
postscanErrors = true;
} else if (error instanceof PositioningError) {
positioningErrors = true;
} else if (error instanceof PlotWindowError) {
plotWindowErrors = true;
}
}
if (motorAxisErrors) {
this.motorAxisTab.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
if (detectorChannelErrors) {
this.detectorChannelTab.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
if (prescanErrors) {
this.prescanTab.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
if (postscanErrors) {
this.postscanTab.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
if (positioningErrors) {
this.positioningTab.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
if (plotWindowErrors) {
this.plotTab.setImage(PlatformUI.getWorkbench().getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
// check errors in Events Tab
this.pauseEventsTabItem.setImage(null);
this.redoEventsTabItem.setImage(null);
this.breakEventsTabItem.setImage(null);
this.triggerEventsTabItem.setImage(null);
for (ControlEvent event : this.currentScanModule.getPauseEvents()) {
if (!event.getModelErrors().isEmpty()) {
this.pauseEventsTabItem.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
}
for (ControlEvent event : this.currentScanModule.getRedoEvents()) {
if (!event.getModelErrors().isEmpty()) {
this.redoEventsTabItem.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
}
for (ControlEvent event : this.currentScanModule.getBreakEvents()) {
if (!event.getModelErrors().isEmpty()) {
this.breakEventsTabItem.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
}
for (ControlEvent event : this.currentScanModule.getTriggerEvents()) {
if (!event.getModelErrors().isEmpty()) {
this.triggerEventsTabItem.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
}
}
/**
* {@inheritDoc}
*/
@Override
protected void saveState(IMemento memento) {
// remember maximized states of sash form
memento.putBoolean(MEMENTO_ACTIONS_COMPOSITE_MAXIMIZED,
this.actionsCompositeMaximized);
memento.putBoolean(MEMENTO_EVENTS_COMPOSITE_MAXIMIZED,
this.eventsCompositeMaximized);
// save composite heights
memento.putInteger(MEMENTO_ACTIONS_COMPOSITE_WEIGHT,
actionEventSashForm.getWeights()[0]);
memento.putInteger(MEMENTO_EVENTS_COMPOSITE_WEIGHT,
actionEventSashForm.getWeights()[1]);
// remember sort state of action composite viewers
memento.putInteger(MEMENTO_AXES_SORT_STATE,
this.motorAxisComposite.getSortState());
memento.putInteger(MEMENTO_CHANNEL_SORT_STATE,
this.detectorChannelComposite.getSortState());
// remember selected action tab
memento.putInteger(MEMENTO_ACTIONS_TAB_FOLDER_SELECTION_INDEX,
this.actionsTabFolder.getSelectionIndex());
memento.putInteger(MEMENTO_EVENTS_TAB_FOLDER_SELECTION_INDEX,
this.eventsTabFolder.getSelectionIndex());
}
/**
* {@inheritDoc}
*/
@Override
protected void restoreState(IMemento memento) {
// restore maximized states
this.actionsCompositeMaximized = (memento
.getBoolean(MEMENTO_ACTIONS_COMPOSITE_MAXIMIZED) == null)
? false
: memento.getBoolean(MEMENTO_ACTIONS_COMPOSITE_MAXIMIZED);
this.eventsCompositeMaximized = (memento
.getBoolean(MEMENTO_EVENTS_COMPOSITE_MAXIMIZED) == null)
? false
: memento.getBoolean(MEMENTO_EVENTS_COMPOSITE_MAXIMIZED);
if (this.actionsCompositeMaximized) {
this.actionMaxIcon.setImage(restoreIcon);
this.actionEventSashForm.setMaximizedControl(actionsComposite);
}
if (this.eventsCompositeMaximized) {
this.eventMaxIcon.setImage(restoreIcon);
this.actionEventSashForm.setMaximizedControl(eventsComposite);
}
// restore sash form weights
int[] weights = new int[2];
weights[0] = (memento.getInteger(MEMENTO_ACTIONS_COMPOSITE_WEIGHT) == null)
? 1 : memento.getInteger(MEMENTO_ACTIONS_COMPOSITE_WEIGHT);
weights[1] = (memento.getInteger(MEMENTO_EVENTS_COMPOSITE_WEIGHT) == null)
? 1 : memento.getInteger(MEMENTO_EVENTS_COMPOSITE_WEIGHT);
actionEventSashForm.setWeights(weights);
// restore sort state of action composite viewers
if (memento.getInteger(MEMENTO_AXES_SORT_STATE) != null) {
this.motorAxisComposite.setSortState(memento
.getInteger(MEMENTO_AXES_SORT_STATE));
}
if (memento.getInteger(MEMENTO_CHANNEL_SORT_STATE) != null) {
this.detectorChannelComposite.setSortState(memento
.getInteger(MEMENTO_CHANNEL_SORT_STATE));
}
// restore selected tabs of tab folders
if (memento.getInteger(MEMENTO_ACTIONS_TAB_FOLDER_SELECTION_INDEX) != null) {
this.actionsTabFolder.setSelection(memento.getInteger(
MEMENTO_ACTIONS_TAB_FOLDER_SELECTION_INDEX));
}
if (memento.getInteger(MEMENTO_EVENTS_TAB_FOLDER_SELECTION_INDEX) != null) {
this.eventsTabFolder.setSelection(memento.getInteger(
MEMENTO_EVENTS_TAB_FOLDER_SELECTION_INDEX));
}
}
/**
* For Legacy Compatibility of Code Smells from EventMenuContributionHelper
* which accessed this (public) attribute directly before (when the classic
* parts were contained directly in the view).
* @return the selection index of the events tab folder or -1
* @since 1.31
*/
public int getEventsTabFolderSelectionIndex() {
return this.eventsTabFolder.getSelectionIndex();
}
private class EventsTabFolderSelectionListener extends SelectionAdapter {
@Override
public void widgetSelected(SelectionEvent e) {
switch (eventsTabFolder.getSelectionIndex()) {
case 0:
parentView.setSelectionProvider(
pauseEventComposite.getTableViewer());
break;
case 1:
parentView.setSelectionProvider(
redoEventComposite.getTableViewer());
break;
case 2:
parentView.setSelectionProvider(
breakEventComposite.getTableViewer());
break;
case 3:
parentView.setSelectionProvider(
triggerEventComposite.getTableViewer());
break;
default:
break;
}
}
}
private class TextFocusListener extends FocusAdapter {
private Text widget;
public TextFocusListener(Text widget) {
this.widget = widget;
}
@Override
public void focusLost(FocusEvent e) {
if (this.widget == valueCountText) {
valueCountBinding.updateModelToTarget();
} else if (this.widget == triggerDelayText) {
triggerDelayBinding.updateModelToTarget();
} else if (this.widget == settleTimeText) {
settleTimeBinding.updateModelToTarget();
}
}
}
}
| bundles/de.ptb.epics.eve.editor/src/de/ptb/epics/eve/editor/views/scanmoduleview/classiccomposite/ClassicComposite.java | package de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite;
import org.apache.log4j.Logger;
import org.eclipse.core.databinding.Binding;
import org.eclipse.core.databinding.DataBindingContext;
import org.eclipse.core.databinding.UpdateValueStrategy;
import org.eclipse.core.databinding.beans.BeansObservables;
import org.eclipse.core.databinding.observable.value.IObservableValue;
import org.eclipse.jface.databinding.fieldassist.ControlDecorationSupport;
import org.eclipse.jface.databinding.swt.SWTObservables;
import org.eclipse.jface.databinding.viewers.ViewersObservables;
import org.eclipse.jface.viewers.ISelectionProvider;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.custom.SashForm;
import org.eclipse.swt.custom.ScrolledComposite;
import org.eclipse.swt.events.FocusAdapter;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IMemento;
import org.eclipse.ui.ISharedImages;
import org.eclipse.ui.PlatformUI;
import de.ptb.epics.eve.data.EventImpacts;
import de.ptb.epics.eve.data.scandescription.ControlEvent;
import de.ptb.epics.eve.data.scandescription.ScanModule;
import de.ptb.epics.eve.data.scandescription.ScanModuleTypes;
import de.ptb.epics.eve.data.scandescription.errors.AxisError;
import de.ptb.epics.eve.data.scandescription.errors.ChannelError;
import de.ptb.epics.eve.data.scandescription.errors.IModelError;
import de.ptb.epics.eve.data.scandescription.errors.PlotWindowError;
import de.ptb.epics.eve.data.scandescription.errors.PositioningError;
import de.ptb.epics.eve.data.scandescription.errors.PostscanError;
import de.ptb.epics.eve.data.scandescription.errors.PrescanError;
import de.ptb.epics.eve.data.scandescription.updatenotification.ControlEventTypes;
import de.ptb.epics.eve.data.scandescription.updatenotification.IModelUpdateListener;
import de.ptb.epics.eve.data.scandescription.updatenotification.ModelUpdateEvent;
import de.ptb.epics.eve.editor.Activator;
import de.ptb.epics.eve.editor.views.eventcomposite.EventComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.ScanModuleSelectionProvider;
import de.ptb.epics.eve.editor.views.scanmoduleview.ScanModuleView;
import de.ptb.epics.eve.editor.views.scanmoduleview.ScanModuleViewComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite.detectorchannelcomposite.DetectorChannelComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite.motoraxiscomposite.MotorAxisComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite.plotcomposite.PlotComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite.positioningcomposite.PositioningComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite.postscancomposite.PostscanComposite;
import de.ptb.epics.eve.editor.views.scanmoduleview.classiccomposite.prescancomposite.PrescanComposite;
import de.ptb.epics.eve.util.ui.swt.TextSelectAllFocusListener;
import de.ptb.epics.eve.util.ui.swt.TextSelectAllMouseListener;
/**
* @author Marcus Michalsky
* @since 1.31
*/
public class ClassicComposite extends ScanModuleViewComposite implements IModelUpdateListener {
private static final Logger LOGGER = Logger.getLogger(
ClassicComposite.class.getName());
private static final String MEMENTO_ACTIONS_COMPOSITE_MAXIMIZED =
"actionsCompositeMaximized";
private static final String MEMENTO_EVENTS_COMPOSITE_MAXIMIZED =
"eventsCompositeMaximized";
private static final String MEMENTO_ACTIONS_COMPOSITE_WEIGHT =
"actionsCompositeWeight";
private static final String MEMENTO_EVENTS_COMPOSITE_WEIGHT =
"eventsCompositeWeight";
private static final String MEMENTO_AXES_SORT_STATE =
"AxesSortState";
private static final String MEMENTO_CHANNEL_SORT_STATE =
"ChannelSortState";
private static final String MEMENTO_ACTIONS_TAB_FOLDER_SELECTION_INDEX =
"actionsTabFolderSelectionIndex";
private static final String MEMENTO_EVENTS_TAB_FOLDER_SELECTION_INDEX =
"eventsTabFolderSelectionIndex";
private ScanModuleView parentView;
private ScanModule currentScanModule;
ScrolledComposite sc;
Composite top;
// general composite
private Text valueCountText;
private Text triggerDelayText;
private Text settleTimeText;
private Button triggerConfirmAxisCheckBox;
private Button triggerConfirmChannelCheckBox;
private Binding valueCountBinding;
private Binding triggerDelayBinding;
private Binding settleTimeBinding;
private SashForm actionEventSashForm;
// actions composite
private Composite actionsComposite;
private Label actionMaxIcon;
private boolean actionsCompositeMaximized;
private CTabFolder actionsTabFolder;
private CTabItem motorAxisTab;
private CTabItem detectorChannelTab;
private CTabItem prescanTab;
private CTabItem postscanTab;
private CTabItem positioningTab;
private CTabItem plotTab;
private ActionComposite motorAxisComposite;
private ActionComposite detectorChannelComposite;
private ActionComposite prescanComposite;
private ActionComposite postscanComposite;
private ActionComposite positioningComposite;
private ActionComposite plotComposite;
// events composite
private Composite eventsComposite;
private Label eventMaxIcon;
private boolean eventsCompositeMaximized;
public CTabFolder eventsTabFolder;
private CTabItem pauseEventsTabItem;
private CTabItem redoEventsTabItem;
private CTabItem breakEventsTabItem;
private CTabItem triggerEventsTabItem;
private EventComposite pauseEventComposite;
private EventComposite redoEventComposite;
private EventComposite breakEventComposite;
private EventComposite triggerEventComposite;
private Image restoreIcon;
private Image maximizeIcon;
public ClassicComposite(ScanModuleView parentView, Composite parent, int style) {
super(parentView, parent, style);
this.parentView = parentView;
this.restoreIcon = Activator.getDefault().getImageRegistry()
.get("RESTORE");
this.maximizeIcon = Activator.getDefault().getImageRegistry()
.get("MAXIMIZE");
this.setLayout(new FillLayout());
this.sc = new ScrolledComposite(this, SWT.V_SCROLL);
this.top = new Composite(sc, SWT.NONE);
GridLayout gridLayout = new GridLayout();
this.top.setLayout(gridLayout);
sc.setExpandHorizontal(true);
sc.setExpandVertical(true);
sc.setContent(top);
this.createGeneralComposite(top);
this.actionEventSashForm = new SashForm(top, SWT.VERTICAL);
this.actionEventSashForm.SASH_WIDTH = 4;
GridData gridData = new GridData();
gridData.horizontalAlignment = GridData.FILL;
gridData.verticalAlignment = GridData.FILL;
gridData.grabExcessHorizontalSpace = true;
gridData.grabExcessVerticalSpace = true;
this.actionEventSashForm.setLayoutData(gridData);
this.createActionsComposite(this.actionEventSashForm);
this.createEventsComposite(this.actionEventSashForm);
sc.setMinSize(SWT.DEFAULT, SWT.DEFAULT);
this.bindValues();
}
private void createGeneralComposite(Composite parent) {
Composite generalComposite = new Composite(parent, SWT.BORDER);
GridLayout gridLayout = new GridLayout();
gridLayout.numColumns = 2;
generalComposite.setLayout(gridLayout);
GridData gridData = new GridData();
gridData.grabExcessHorizontalSpace = true;
gridData.horizontalAlignment = GridData.FILL;
generalComposite.setLayoutData(gridData);
Label valueCountLabel = new Label(generalComposite, SWT.NONE);
valueCountLabel.setText("No of Measurements:");
valueCountLabel.setToolTipText(
"Number of Measurements taken for each motor position");
this.valueCountText = new Text(generalComposite, SWT.BORDER);
gridData = new GridData();
gridData.horizontalAlignment = GridData.FILL;
gridData.verticalAlignment = GridData.CENTER;
gridData.horizontalIndent = 7;
gridData.grabExcessHorizontalSpace = true;
this.valueCountText.setLayoutData(gridData);
this.valueCountText.addFocusListener(
new TextSelectAllFocusListener(this.valueCountText));
this.valueCountText.addMouseListener(
new TextSelectAllMouseListener(this.valueCountText));
this.valueCountText.addFocusListener(new TextFocusListener(
this.valueCountText));
Label triggerDelayLabel = new Label(generalComposite, SWT.NONE);
triggerDelayLabel.setText("Trigger Delay (in s):");
triggerDelayLabel.setToolTipText("Delay time after positioning");
this.triggerDelayText = new Text(generalComposite, SWT.BORDER);
gridData = new GridData();
gridData.horizontalAlignment = GridData.FILL;
gridData.verticalAlignment = GridData.CENTER;
gridData.horizontalIndent = 7;
gridData.grabExcessHorizontalSpace = true;
this.triggerDelayText.setLayoutData(gridData);
this.triggerDelayText.addFocusListener(new TextSelectAllFocusListener(
this.triggerDelayText));
this.triggerDelayText.addMouseListener(new TextSelectAllMouseListener(
this.triggerDelayText));
this.triggerDelayText.addFocusListener(new TextFocusListener(
this.triggerDelayText));
Label settleTimeLabel = new Label(generalComposite, SWT.NONE);
settleTimeLabel.setText("Settle Time (in s):");
settleTimeLabel.setToolTipText(
"Delay time after first positioning in the scan module");
this.settleTimeText = new Text(generalComposite, SWT.BORDER);
gridData = new GridData();
gridData.horizontalAlignment = GridData.FILL;
gridData.verticalAlignment = GridData.CENTER;
gridData.horizontalIndent = 7;
gridData.grabExcessHorizontalSpace = true;
this.settleTimeText.setLayoutData(gridData);
this.settleTimeText.addFocusListener(new TextSelectAllFocusListener(
this.settleTimeText));
this.settleTimeText.addMouseListener(new TextSelectAllMouseListener(
this.settleTimeText));
this.settleTimeText.addFocusListener(new TextFocusListener(
this.settleTimeText));
Label triggerLabel = new Label(generalComposite, SWT.NONE);
triggerLabel.setText("Manual Trigger:");
triggerLabel.setLayoutData(new GridData());
Composite triggerCheckBoxes = new Composite(generalComposite,
SWT.NONE);
gridData = new GridData();
gridData.horizontalIndent = 7;
triggerCheckBoxes.setLayoutData(gridData);
triggerCheckBoxes.setLayout(new FillLayout(SWT.HORIZONTAL));
this.triggerConfirmAxisCheckBox = new Button(triggerCheckBoxes,
SWT.CHECK);
this.triggerConfirmAxisCheckBox.setText("Motors");
this.triggerConfirmChannelCheckBox = new Button(triggerCheckBoxes,
SWT.CHECK);
this.triggerConfirmChannelCheckBox.setText("Detectors");
this.triggerDelayText.addFocusListener(new TextFocusListener(
this.triggerDelayText));
}
private void createActionsComposite(Composite parent) {
this.actionsComposite = new Composite(parent, SWT.BORDER);
this.actionsComposite.setLayout(new GridLayout(2, false));
this.actionMaxIcon = new Label(this.actionsComposite, SWT.NONE);
this.actionMaxIcon.setImage(maximizeIcon);
this.actionMaxIcon.addMouseListener(new MouseAdapter() {
@Override
public void mouseUp(MouseEvent e) {
if (actionsCompositeMaximized) {
actionMaxIcon.setImage(maximizeIcon);
actionMaxIcon.getParent().layout();
actionEventSashForm.setMaximizedControl(null);
actionsCompositeMaximized = false;
} else {
actionMaxIcon.setImage(restoreIcon);
actionEventSashForm.setMaximizedControl(actionsComposite);
actionsCompositeMaximized = true;
}
}
});
Label actionsLabel = new Label(this.actionsComposite, SWT.NONE);
actionsLabel.setText("Actions:");
this.actionsTabFolder = new CTabFolder(this.actionsComposite, SWT.FLAT);
this.actionsTabFolder.setSimple(true);
this.actionsTabFolder.setBorderVisible(true);
GridData gridData = new GridData();
gridData.horizontalAlignment = GridData.FILL;
gridData.verticalAlignment = GridData.FILL;
gridData.grabExcessHorizontalSpace = true;
gridData.grabExcessVerticalSpace = true;
gridData.horizontalSpan = 2;
actionsTabFolder.setLayoutData(gridData);
this.motorAxisComposite = new MotorAxisComposite(
this.parentView, actionsTabFolder, SWT.NONE);
this.detectorChannelComposite = new DetectorChannelComposite(
this.parentView, actionsTabFolder, SWT.NONE);
this.prescanComposite = new PrescanComposite(
this.parentView, actionsTabFolder, SWT.NONE);
this.postscanComposite = new PostscanComposite(
this.parentView, actionsTabFolder, SWT.NONE);
this.positioningComposite = new PositioningComposite(
this.parentView, actionsTabFolder, SWT.NONE);
this.plotComposite = new PlotComposite(
this.parentView, actionsTabFolder, SWT.NONE);
this.motorAxisTab = new CTabItem(actionsTabFolder, SWT.FLAT);
this.motorAxisTab.setText(" Motor Axes ");
this.motorAxisTab.setToolTipText(
"Select motor axes to be used in this scan module");
this.motorAxisTab.setControl(motorAxisComposite);
this.detectorChannelTab = new CTabItem(this.actionsTabFolder, SWT.FLAT);
this.detectorChannelTab.setText(" Detector Channels ");
this.detectorChannelTab
.setToolTipText("Select detector channels to be used in this scan module");
this.detectorChannelTab.setControl(this.detectorChannelComposite);
this.prescanTab = new CTabItem(this.actionsTabFolder, SWT.FLAT);
this.prescanTab.setText(" Prescan ");
this.prescanTab
.setToolTipText("Action to do before scan module is started");
this.prescanTab.setControl(this.prescanComposite);
this.postscanTab = new CTabItem(this.actionsTabFolder, SWT.FLAT);
this.postscanTab.setText(" Postscan ");
this.postscanTab.setToolTipText("Action to do if scan module is done");
this.postscanTab.setControl(this.postscanComposite);
this.positioningTab = new CTabItem(this.actionsTabFolder, SWT.FLAT);
this.positioningTab.setText(" Positioning ");
this.positioningTab
.setToolTipText("Move motor to calculated position after scan module is done");
this.positioningTab.setControl(this.positioningComposite);
this.plotTab = new CTabItem(this.actionsTabFolder, SWT.FLAT);
this.plotTab.setText(" Plot ");
this.plotTab.setToolTipText("Plot settings for this scan module");
this.plotTab.setControl(this.plotComposite);
this.actionsCompositeMaximized = false;
}
private void createEventsComposite(Composite parent) {
this.eventsComposite = new Composite(parent, SWT.BORDER);
this.eventsComposite.setLayout(new GridLayout(2, false));
this.eventMaxIcon = new Label(eventsComposite, SWT.NONE);
this.eventMaxIcon.setImage(maximizeIcon);
this.eventMaxIcon.addMouseListener(new MouseAdapter() {
@Override
public void mouseUp(MouseEvent e) {
if (eventsCompositeMaximized) {
eventMaxIcon.setImage(maximizeIcon);
eventMaxIcon.getParent().layout();
actionEventSashForm.setMaximizedControl(null);
eventsCompositeMaximized = false;
} else {
eventMaxIcon.setImage(restoreIcon);
actionEventSashForm.setMaximizedControl(eventsComposite);
eventsCompositeMaximized = true;
}
}
});
Label eventLabel = new Label(eventsComposite, SWT.NONE);
eventLabel.setText("Events:");
this.eventsTabFolder = new CTabFolder(this.eventsComposite, SWT.NONE);
this.eventsTabFolder.setSimple(true);
this.eventsTabFolder.setBorderVisible(true);
eventsTabFolder.addSelectionListener(
new EventsTabFolderSelectionListener());
GridData gridData = new GridData();
gridData.horizontalAlignment = GridData.FILL;
gridData.verticalAlignment = GridData.FILL;
gridData.grabExcessHorizontalSpace = true;
gridData.grabExcessVerticalSpace = true;
gridData.horizontalSpan = 2;
this.eventsTabFolder.setLayoutData(gridData);
pauseEventComposite = new EventComposite(eventsTabFolder, SWT.NONE,
ControlEventTypes.PAUSE_EVENT, this.parentView);
redoEventComposite = new EventComposite(eventsTabFolder, SWT.NONE,
ControlEventTypes.CONTROL_EVENT, this.parentView);
breakEventComposite = new EventComposite(eventsTabFolder, SWT.NONE,
ControlEventTypes.CONTROL_EVENT, this.parentView);
triggerEventComposite = new EventComposite(eventsTabFolder, SWT.NONE,
ControlEventTypes.CONTROL_EVENT, this.parentView);
this.pauseEventsTabItem = new CTabItem(eventsTabFolder, SWT.NONE);
this.pauseEventsTabItem.setText(" Pause ");
this.pauseEventsTabItem
.setToolTipText("Configure event to pause and resume this scan module");
this.pauseEventsTabItem.setControl(pauseEventComposite);
this.redoEventsTabItem = new CTabItem(eventsTabFolder, SWT.NONE);
this.redoEventsTabItem.setText(" Redo ");
this.redoEventsTabItem
.setToolTipText("Repeat the last acquisition, if redo event occurs");
this.redoEventsTabItem.setControl(redoEventComposite);
this.breakEventsTabItem = new CTabItem(eventsTabFolder, SWT.NONE);
this.breakEventsTabItem.setText(" Skip ");
this.breakEventsTabItem
.setToolTipText("Finish this scan module and continue with next");
this.breakEventsTabItem.setControl(breakEventComposite);
this.triggerEventsTabItem = new CTabItem(eventsTabFolder, SWT.NONE);
this.triggerEventsTabItem.setText(" Trigger ");
this.triggerEventsTabItem
.setToolTipText("Wait for trigger event before moving to next position");
this.triggerEventsTabItem.setControl(triggerEventComposite);
this.eventsCompositeMaximized = false;
}
private void bindValues() {
DataBindingContext context = new DataBindingContext();
ISelectionProvider selectionProvider = new ScanModuleSelectionProvider(
ScanModuleTypes.CLASSIC);
IObservableValue selectionObservable = ViewersObservables
.observeSingleSelection(selectionProvider);
IObservableValue valueCountTargetObservable = SWTObservables
.observeText(this.valueCountText, SWT.Modify);
IObservableValue valueCountModelObservable = BeansObservables.observeDetailValue(
selectionObservable, ScanModule.class,
ScanModule.VALUE_COUNT_PROP, Integer.class);
UpdateValueStrategy valueCountTargetToModelStrategy =
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE);
valueCountTargetToModelStrategy
.setAfterGetValidator(new ValueCountValidator());
valueCountTargetToModelStrategy.setConverter(new ValueCountConverter());
this.valueCountBinding = context.bindValue(valueCountTargetObservable,
valueCountModelObservable, valueCountTargetToModelStrategy,
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE));
ControlDecorationSupport.create(this.valueCountBinding, SWT.LEFT);
IObservableValue triggerDelayTargetObservable = SWTObservables.observeText(
this.triggerDelayText, SWT.Modify);
IObservableValue triggerDelayModelObservable = BeansObservables.observeDetailValue(
selectionObservable, ScanModule.class,
ScanModule.TRIGGER_DELAY_PROP, Double.class);
UpdateValueStrategy triggerDelayTargetToModelStrategy =
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE);
triggerDelayTargetToModelStrategy
.setAfterGetValidator(new TriggerDelaySettleTimeValidator("Settle Time"));
triggerDelayTargetToModelStrategy
.setConverter(new TriggerDelaySettleTimeConverter());
UpdateValueStrategy triggerDelayModelToTargetStrategy =
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE);
triggerDelayModelToTargetStrategy
.setConverter(new ModelToTargetConverter());
this.triggerDelayBinding = context.bindValue(
triggerDelayTargetObservable, triggerDelayModelObservable,
triggerDelayTargetToModelStrategy,
triggerDelayModelToTargetStrategy);
ControlDecorationSupport.create(this.triggerDelayBinding, SWT.LEFT);
IObservableValue settleTimeTargetObservable = SWTObservables.observeText(
this.settleTimeText, SWT.Modify);
IObservableValue settleTimeModelObservable = BeansObservables.observeDetailValue(
selectionObservable, ScanModule.class,
ScanModule.SETTLE_TIME_PROP, Double.class);
UpdateValueStrategy settleTimeTargetToModelStrategy =
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE);
UpdateValueStrategy settleTimeModelToTargetStrategy =
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE);
settleTimeModelToTargetStrategy
.setConverter(new ModelToTargetConverter());
settleTimeTargetToModelStrategy
.setAfterGetValidator(new TriggerDelaySettleTimeValidator(
"Settle Time"));
settleTimeTargetToModelStrategy
.setConverter(new TriggerDelaySettleTimeConverter());
this.settleTimeBinding = context.bindValue(settleTimeTargetObservable,
settleTimeModelObservable, settleTimeTargetToModelStrategy,
settleTimeModelToTargetStrategy);
ControlDecorationSupport.create(this.settleTimeBinding, SWT.LEFT);
IObservableValue axisTriggerTargetObservable = SWTObservables
.observeSelection(this.triggerConfirmAxisCheckBox);
IObservableValue axisTriggerModelObservable = BeansObservables.observeDetailValue(
selectionObservable, ScanModule.class,
ScanModule.TRIGGER_CONFIRM_AXIS_PROP, Boolean.class);
Binding axisTriggerBinding = context.bindValue(
axisTriggerTargetObservable, axisTriggerModelObservable,
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE),
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE));
axisTriggerBinding.getClass();
IObservableValue channelTriggerTargetObservable = SWTObservables
.observeSelection(this.triggerConfirmChannelCheckBox);
IObservableValue channelTriggerModelObservable = BeansObservables
.observeDetailValue(selectionObservable, ScanModule.class,
ScanModule.TRIGGER_CONFIRM_CHANNEL_PROP, Boolean.class);
Binding channelTriggerBinding = context.bindValue(
channelTriggerTargetObservable, channelTriggerModelObservable,
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE),
new UpdateValueStrategy(UpdateValueStrategy.POLICY_UPDATE));
channelTriggerBinding.getClass();
}
/**
* {@inheritDoc}
*/
@Override
protected ScanModuleTypes getType() {
return ScanModuleTypes.CLASSIC;
}
/**
* {@inheritDoc}
*/
@Override
protected void setScanModule(ScanModule scanModule) {
LOGGER.debug("ClassicComposite#setScanModule: " + scanModule);
// if there was a scan module shown before, stop listening to changes
if (this.currentScanModule != null) {
this.currentScanModule.removeModelUpdateListener(this);
}
// set the new scan module as the current one
this.currentScanModule = scanModule;
// tell the action composites about the change
this.motorAxisComposite.setScanModule(scanModule);
this.detectorChannelComposite.setScanModule(scanModule);
this.prescanComposite.setScanModule(scanModule);
this.postscanComposite.setScanModule(scanModule);
this.positioningComposite.setScanModule(scanModule);
this.plotComposite.setScanModule(scanModule);
if (this.currentScanModule != null) {
// new scan module
this.currentScanModule.addModelUpdateListener(this);
if (this.eventsTabFolder.getSelection() == null) {
this.eventsTabFolder.setSelection(0);
}
int selectionIndex = this.actionsTabFolder.getSelectionIndex();
if (selectionIndex == -1) {
this.actionsTabFolder.setSelection(0);
} else {
this.actionsTabFolder.setSelection(selectionIndex);
}
sc.setMinSize(this.top.getBounds().width +
this.top.getBounds().x,
this.top.getBounds().height +
this.top.getBounds().y);
} else {
// no scan module selected -> reset contents
this.parentView.setSelectionProvider(null);
}
updateEvent(null);
this.layout();
}
/**
* {@inheritDoc}
*/
@Override
public void updateEvent(ModelUpdateEvent modelUpdateEvent) {
if (this.currentScanModule != null) {
this.triggerEventComposite.setEvents(this.currentScanModule,
EventImpacts.TRIGGER);
this.breakEventComposite.setEvents(this.currentScanModule,
EventImpacts.BREAK);
this.redoEventComposite.setEvents(this.currentScanModule,
EventImpacts.REDO);
this.pauseEventComposite.setEvents(this.currentScanModule,
EventImpacts.PAUSE);
checkForErrors();
} else {
triggerEventComposite.setEvents(this.currentScanModule, null);
breakEventComposite.setEvents(this.currentScanModule, null);
redoEventComposite.setEvents(this.currentScanModule, null);
pauseEventComposite.setEvents(this.currentScanModule, null);
}
}
private void checkForErrors() {
// check errors in Actions Tab
this.motorAxisTab.setImage(null);
this.detectorChannelTab.setImage(null);
this.prescanTab.setImage(null);
this.postscanTab.setImage(null);
this.positioningTab.setImage(null);
this.plotTab.setImage(null);
boolean motorAxisErrors = false;
boolean detectorChannelErrors = false;
boolean prescanErrors = false;
boolean postscanErrors = false;
boolean positioningErrors = false;
boolean plotWindowErrors = false;
for (IModelError error : this.currentScanModule.getModelErrors()) {
if (error instanceof AxisError) {
motorAxisErrors = true;
} else if (error instanceof ChannelError) {
detectorChannelErrors = true;
} else if (error instanceof PrescanError) {
prescanErrors = true;
} else if (error instanceof PostscanError) {
postscanErrors = true;
} else if (error instanceof PositioningError) {
positioningErrors = true;
} else if (error instanceof PlotWindowError) {
plotWindowErrors = true;
}
}
if (motorAxisErrors) {
this.motorAxisTab.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
if (detectorChannelErrors) {
this.detectorChannelTab.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
if (prescanErrors) {
this.prescanTab.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
if (postscanErrors) {
this.postscanTab.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
if (positioningErrors) {
this.positioningTab.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
if (plotWindowErrors) {
this.plotTab.setImage(PlatformUI.getWorkbench().getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
// check errors in Events Tab
this.pauseEventsTabItem.setImage(null);
this.redoEventsTabItem.setImage(null);
this.breakEventsTabItem.setImage(null);
this.triggerEventsTabItem.setImage(null);
for (ControlEvent event : this.currentScanModule.getPauseEvents()) {
if (!event.getModelErrors().isEmpty()) {
this.pauseEventsTabItem.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
}
for (ControlEvent event : this.currentScanModule.getRedoEvents()) {
if (!event.getModelErrors().isEmpty()) {
this.redoEventsTabItem.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
}
for (ControlEvent event : this.currentScanModule.getBreakEvents()) {
if (!event.getModelErrors().isEmpty()) {
this.breakEventsTabItem.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
}
for (ControlEvent event : this.currentScanModule.getTriggerEvents()) {
if (!event.getModelErrors().isEmpty()) {
this.triggerEventsTabItem.setImage(PlatformUI.getWorkbench()
.getSharedImages()
.getImage(ISharedImages.IMG_OBJS_ERROR_TSK));
}
}
}
/**
* {@inheritDoc}
*/
@Override
protected void saveState(IMemento memento) {
// remember maximized states of sash form
memento.putBoolean(MEMENTO_ACTIONS_COMPOSITE_MAXIMIZED,
this.actionsCompositeMaximized);
memento.putBoolean(MEMENTO_EVENTS_COMPOSITE_MAXIMIZED,
this.eventsCompositeMaximized);
// save composite heights
memento.putInteger(MEMENTO_ACTIONS_COMPOSITE_WEIGHT,
actionEventSashForm.getWeights()[0]);
memento.putInteger(MEMENTO_EVENTS_COMPOSITE_WEIGHT,
actionEventSashForm.getWeights()[1]);
// remember sort state of action composite viewers
memento.putInteger(MEMENTO_AXES_SORT_STATE,
this.motorAxisComposite.getSortState());
memento.putInteger(MEMENTO_CHANNEL_SORT_STATE,
this.detectorChannelComposite.getSortState());
// remember selected action tab
memento.putInteger(MEMENTO_ACTIONS_TAB_FOLDER_SELECTION_INDEX,
this.actionsTabFolder.getSelectionIndex());
memento.putInteger(MEMENTO_EVENTS_TAB_FOLDER_SELECTION_INDEX,
this.eventsTabFolder.getSelectionIndex());
}
/**
* {@inheritDoc}
*/
@Override
protected void restoreState(IMemento memento) {
// restore maximized states
this.actionsCompositeMaximized = (memento
.getBoolean(MEMENTO_ACTIONS_COMPOSITE_MAXIMIZED) == null)
? false
: memento.getBoolean(MEMENTO_ACTIONS_COMPOSITE_MAXIMIZED);
this.eventsCompositeMaximized = (memento
.getBoolean(MEMENTO_EVENTS_COMPOSITE_MAXIMIZED) == null)
? false
: memento.getBoolean(MEMENTO_EVENTS_COMPOSITE_MAXIMIZED);
if (this.actionsCompositeMaximized) {
this.actionMaxIcon.setImage(restoreIcon);
this.actionEventSashForm.setMaximizedControl(actionsComposite);
}
if (this.eventsCompositeMaximized) {
this.eventMaxIcon.setImage(restoreIcon);
this.actionEventSashForm.setMaximizedControl(eventsComposite);
}
// restore sash form weights
int[] weights = new int[2];
weights[0] = (memento.getInteger(MEMENTO_ACTIONS_COMPOSITE_WEIGHT) == null)
? 1 : memento.getInteger(MEMENTO_ACTIONS_COMPOSITE_WEIGHT);
weights[1] = (memento.getInteger(MEMENTO_EVENTS_COMPOSITE_WEIGHT) == null)
? 1 : memento.getInteger(MEMENTO_EVENTS_COMPOSITE_WEIGHT);
actionEventSashForm.setWeights(weights);
// restore sort state of action composite viewers
if (memento.getInteger(MEMENTO_AXES_SORT_STATE) != null) {
this.motorAxisComposite.setSortState(memento
.getInteger(MEMENTO_AXES_SORT_STATE));
}
if (memento.getInteger(MEMENTO_CHANNEL_SORT_STATE) != null) {
this.detectorChannelComposite.setSortState(memento
.getInteger(MEMENTO_CHANNEL_SORT_STATE));
}
// restore selected tabs of tab folders
if (memento.getInteger(MEMENTO_ACTIONS_TAB_FOLDER_SELECTION_INDEX) != null) {
this.actionsTabFolder.setSelection(memento.getInteger(
MEMENTO_ACTIONS_TAB_FOLDER_SELECTION_INDEX));
}
if (memento.getInteger(MEMENTO_EVENTS_TAB_FOLDER_SELECTION_INDEX) != null) {
this.eventsTabFolder.setSelection(memento.getInteger(
MEMENTO_EVENTS_TAB_FOLDER_SELECTION_INDEX));
}
}
/**
* For Legacy Compatibility of Code Smells from EventMenuContributionHelper
* which accessed this (public) attribute directly before (when the classic
* parts were contained directly in the view).
* @return the selection index of the events tab folder or -1
* @since 1.31
*/
public int getEventsTabFolderSelectionIndex() {
return this.eventsTabFolder.getSelectionIndex();
}
private class EventsTabFolderSelectionListener extends SelectionAdapter {
@Override
public void widgetSelected(SelectionEvent e) {
switch (eventsTabFolder.getSelectionIndex()) {
case 0:
parentView.setSelectionProvider(
pauseEventComposite.getTableViewer());
break;
case 1:
parentView.setSelectionProvider(
redoEventComposite.getTableViewer());
break;
case 2:
parentView.setSelectionProvider(
breakEventComposite.getTableViewer());
break;
case 3:
parentView.setSelectionProvider(
triggerEventComposite.getTableViewer());
break;
default:
break;
}
}
}
private class TextFocusListener extends FocusAdapter {
private Text widget;
public TextFocusListener(Text widget) {
this.widget = widget;
}
@Override
public void focusLost(FocusEvent e) {
if (this.widget == valueCountText) {
valueCountBinding.updateModelToTarget();
} else if (this.widget == triggerDelayText) {
triggerDelayBinding.updateModelToTarget();
} else if (this.widget == settleTimeText) {
settleTimeBinding.updateModelToTarget();
}
}
}
}
| adjusted trigger delay tooltip text (ScanModuleView) | bundles/de.ptb.epics.eve.editor/src/de/ptb/epics/eve/editor/views/scanmoduleview/classiccomposite/ClassicComposite.java | adjusted trigger delay tooltip text (ScanModuleView) |
|
Java | mpl-2.0 | 0bcb8aeab57991a1f937cf982431a98f5cdebb4d | 0 | dumptruckman/PluginBase,dumptruckman/PluginBase | package pluginbase.config.serializers;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
class CharacterSerializer implements Serializer<Character> {
@Nullable
@Override
public Object serialize(@Nullable Character object, @NotNull SerializerSet serializerSet) throws IllegalArgumentException {
if (object == null) {
return null;
}
return object;
}
@Nullable
@Override
public Character deserialize(@Nullable Object serialized, @NotNull Class wantedType, @NotNull SerializerSet serializerSet) throws IllegalArgumentException {
if (serialized == null) {
return null;
}
String s = serialized.toString();
if (s.isEmpty()) {
return null;
}
return s.charAt(0);
}
}
| pluginbase-core/serializable-config/src/main/java/pluginbase/config/serializers/CharacterSerializer.java | package pluginbase.config.serializers;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
class CharacterSerializer implements Serializer<Character> {
@Nullable
@Override
public Object serialize(@Nullable Character object, @NotNull SerializerSet serializerSet) throws IllegalArgumentException {
if (object == null) {
return null;
}
return object.toString();
}
@Nullable
@Override
public Character deserialize(@Nullable Object serialized, @NotNull Class wantedType, @NotNull SerializerSet serializerSet) throws IllegalArgumentException {
if (serialized == null) {
return null;
}
String s = serialized.toString();
if (s.isEmpty()) {
return null;
}
return s.charAt(0);
}
}
| Corrected error in CharacterSerializer.
| pluginbase-core/serializable-config/src/main/java/pluginbase/config/serializers/CharacterSerializer.java | Corrected error in CharacterSerializer. |
|
Java | agpl-3.0 | 1cc1b2ac8619ea6e56223ee09fd9a3e3140456a2 | 0 | Bram28/LEGUP,Bram28/LEGUP,Bram28/LEGUP | package edu.rpi.phil.legup.newgui;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Polygon;
import java.awt.Stroke;
import java.awt.Shape;
import java.awt.event.MouseEvent;
import java.awt.event.MouseWheelEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseAdapter;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import java.awt.geom.Ellipse2D;
import java.awt.geom.Line2D;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Vector;
import javax.swing.JMenuItem;
import javax.swing.JPopupMenu;
import javax.swing.JComponent;
import javax.swing.JViewport;
import javax.swing.ViewportLayout;
import javax.swing.event.PopupMenuListener;
import javax.swing.ImageIcon;
import edu.rpi.phil.legup.BoardDrawingHelper;
import edu.rpi.phil.legup.BoardState;
import edu.rpi.phil.legup.CaseRule;
import edu.rpi.phil.legup.Legup;
import edu.rpi.phil.legup.PuzzleModule;
import edu.rpi.phil.legup.Selection;
import edu.rpi.phil.legup.Justification;
import edu.rpi.phil.legup.Contradiction;
public class TreePanel extends DynamicViewer implements TransitionChangeListener, TreeSelectionListener
{
private static final long serialVersionUID = 2272172376353427845L;
public static final Color nodeColor = new Color(255,255,155);
public static final int NODE_RADIUS = 10;
private static final int SMALL_NODE_RADIUS = 7;
private static final int COLLAPSED_DRAW_DELTA_X = 10;
private static final int COLLAPSED_DRAW_DELTA_Y = 10;
private ArrayList <Rectangle> currentStateBoxes = new ArrayList <Rectangle>();
private Point selectionOffset = null;
private Point lastMovePoint = null;
private static final float floater[] = new float[] {(float)(5.0), (float)(10.0)}; // dashed setup
private static final float floater2[] = new float[] {(float)(2.0), (float)(3.0)}; // dotted setup
private static final Stroke dashed = new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL, 10, floater, 0);
private static final Stroke dotted = new BasicStroke(2, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL, 10, floater2, 0);
private static final Stroke medium = new BasicStroke(2);
private static final Stroke thin = new BasicStroke(1);
private Rectangle bounds = new Rectangle(0,0,0,0);
private int xOffset = 0;
private int yOffset = 0;
private Point mousePoint;
private static Selection mouseOver;
private Map<Integer, Color> collapseColorHash = new HashMap<Integer, Color>();
//Path for node images
//Currently only classic and smiley options exist
private static final String NodeImgs = "images/tree/smiley/";
public TreePanel()
{
super();
// System.out.println("TreePanel created");
BoardState.addTransitionChangeListener(this);
Legup.getInstance().getSelections().addTreeSelectionListener(this);
//setDefaultPosition(-60,-80);
setSize(new Dimension(100, 200));
setPreferredSize(new Dimension(640, 160));
//zoomTo(1);
//System.out.println("scale is " + getZoom());
//zoom(0, new Point(-60, 80));
}
public TreePanel(boolean b) { super(b); }
public void actionPerformed(ActionEvent e)
{
// System.out.println("actionPerformed");
}
private BoardState getLastCollapsed(BoardState s)
{
return getLastCollapsed(s, null);
}
private BoardState getLastCollapsed(BoardState s, int[] outptrNumTransitions)
{
Vector <BoardState> children = s.getChildren();
BoardState rv = s;
int numTransitions = 0;
if (children.size() == 1)
{
BoardState child = children.get(0);
if (child.isCollapsed())
{
++numTransitions;
rv = getLastCollapsed(child);
}
}
if(outptrNumTransitions != null) { outptrNumTransitions[0] = numTransitions; }
return rv;
}
// recursively computes the bounding rectangle of the tree
private Rectangle getTreeBounds( BoardState state ){
// get the position of the current node and add padding
Rectangle b = new Rectangle( state.getLocation() );
b.grow( 2*NODE_RADIUS, 2*NODE_RADIUS );
// Adjust the rectangle so that rule popups aren't cut off
float scale = (100/(float)getZoom());
b.setBounds((int)b.getX()-(int)(100*scale), (int)b.getY(), (int)b.getWidth()+(int)(400*scale), (int)b.getHeight()+(int)(200*scale));
// get the relevant child nodes
Vector <BoardState> children = state.isCollapsed()
? getLastCollapsed(state).getChildren()
: state.getChildren();
// compute the union of the child bounding boxes recursively
for (int c = 0; c < children.size(); c++)
{
b = b.union( getTreeBounds( children.get(c) ) );
}
return b;
}
public void updateTreeSize()
{
bounds = getTreeBounds(Legup.getInstance().getInitialBoardState());
setSize(bounds.getSize());
BoardState state = Legup.getInstance().getInitialBoardState();
if( bounds.y != 60 )
{
state.adjustOffset( new Point( 60-bounds.y, 0 ) );
}
}
public void reset()
{
BoardState state = Legup.getInstance().getInitialBoardState();
if( bounds.x != 0 || bounds.y != 0 )
{
state.setOffset( new Point( state.getOffset().x-bounds.x, state.getOffset().y-bounds.y ) );
updateTreeSize();
}
}
public void draw( Graphics2D g )
{
currentStateBoxes.clear();
BoardState state = Legup.getInstance().getInitialBoardState();
if(state != null)
{
setSize( bounds.getSize() );
g.setRenderingHint( RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON );
g.setRenderingHint( RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON );
drawTree(g,state);
drawCurrentStateBoxes(g);
if (mouseOver != null) drawMouseOver(g);
}
}
public void zoomFit()
{
// find the ideal width and height scale
zoomTo(1.0);
updateTreeSize();
double fitwidth = (viewport.getWidth()-8.0) / (getSize().width - 200);
double fitheight = (viewport.getHeight()-8.0) / (getSize().height - 120);
// choose the smaller of the two and zoom
zoomTo( (fitwidth < fitheight) ? fitwidth : fitheight );
viewport.setViewPosition(new Point(0,0));
}
public void zoomReset()
{
zoomTo(1.0);
viewport.setViewPosition(new Point(0,0));
}
/**
* Get the boardstate / transition at a point in the tree
* @param state the state to check now (starts at root)
* @param where the point where the user clicked
* @return the node or transition the user selected, or null if he or she missed
*/
private Selection getSelectionAtPoint(BoardState state, Point where)
{
if(state == null)return null;
Selection rv = null;
Point loc = state.getLocation();
boolean isCollapsed = state.isCollapsed();
final int radius = isCollapsed ? (2 * NODE_RADIUS) : NODE_RADIUS;
Point draw = new Point(loc.x - radius, loc.y - radius);
// distance from a transition which is considered clicking on it, squared
final int MAX_CLICK_DISTANCE_SQ = 5*5;
Shape myBounds;
//System.out.println("getSelectionAtPoint called for (" + where.x + "," + where.y + ") on node at point (" + state.getLocation().x + "," + state.getLocation().y + ")");
if(state.isModifiable())
{
/*draw.x += 128;
int[] points_x = new int[3];
int[] points_y = new int[3];
for(int c1 = 0;c1 < 3;c1+=1)
{
points_x[c1] = (int)(draw.x+radius*Math.cos(Math.toRadians(c1*120)));
points_y[c1] = (int)(draw.y+radius*Math.sin(Math.toRadians(c1*120)));
}
myBounds = new Polygon(points_x,points_y,3);*/
draw.x -= radius/2;
draw.y -= radius/2;
myBounds = new Ellipse2D.Float(draw.x,draw.y,3*radius,3*radius);
}
else
{
myBounds = new Ellipse2D.Float(draw.x,draw.y,2 * radius,2 * radius);
}
boolean stateSelected = myBounds.contains(where);
if (stateSelected && isCollapsed)
{
Vector <BoardState> parents = state.getParents();
if (parents.size() == 1 && parents.get(0).isCollapsed())
stateSelected = false; // can't select a collapsed state
}
if (stateSelected)
{
rv = new Selection(state,false);
}
else
{
for(BoardState b : state.getChildren())
{
Selection s = getSelectionAtPoint(b,where);
if(s != null)rv = s;
}
}
return rv;
}
/**
* Toggle a state in a selection (something was ctrl + clicked)
* @param state the state to check now (starts at root)
* @param bounds the bounds of the state and all it's children
* @param where the point where the user ctrl + clicked
*/
private void toggleSelection(BoardState state, Point where)
{
Selection s = getSelectionAtPoint(state, where);
Legup.getInstance().getSelections().toggleSelection(s);
}
/**
* Select a new state or transition that the user clicked on
* @param state the state we're at
* @param bounds the bounds of the state and all it's children
* @param where the point where the user clicked
* @return the new Selection
*/
private Selection newSelection(BoardState state, Point where)
{
Selection s = getSelectionAtPoint(state, where);
Legup.getInstance().getSelections().setSelection(s);
return s;
}
protected void mouseMovedAt(Point p, MouseEvent e)
{
Selection prev = mouseOver;
mouseOver = getSelectionAtPoint(Legup.getInstance().getInitialBoardState(), p);
mousePoint = p;
if( prev != null || mouseOver != null )
repaint();
if( prev != null ^ mouseOver != null )
Legup.getInstance().refresh();
if( prev != null && mouseOver != null )
if( !prev.equals(mouseOver) )
Legup.getInstance().refresh();
}
public static Selection getMouseOver()
{
return mouseOver;
}
protected void mouseDraggedAt(Point p, MouseEvent e) {
if (lastMovePoint == null)
lastMovePoint = new Point(p);
// repaint();
}
protected void highlightSelectedTransition(Point p)
{
Selection sel = getSelectionAtPoint(Legup.getInstance().getInitialBoardState(), p);
if(sel != null && sel.getState().isModifiable())
{
Legup.getInstance().getGui().getJustificationFrame().
setSelectionByJustification(sel.getState().getJustification());
}
}
public void mouseReleasedAt(Point p, MouseEvent e)
{
if( e.getButton() == MouseEvent.BUTTON1 )
{
lastMovePoint = new Point(p);
if ( e.isControlDown() ) {
// add to selection
toggleSelection( Legup.getInstance().getInitialBoardState(), p );
} else {
// make a new selection
newSelection( Legup.getInstance().getInitialBoardState(), p );
highlightSelectedTransition(p);
}
// right click
}
}
public void mouseWheelMovedAt( MouseWheelEvent e )
{
updateTreeSize();
}
public BoardState addChildAtCurrentState(Object justification)
{
Selection selection = Legup.getInstance().getSelections().getFirstSelection();
BoardState cur = selection.getState();
if((cur.getChangedCells().size() > 0)||(cur.extraDataChanged()))
{
if (cur.isModifiable() && selection.isState())
{
Legup.setCurrentState(cur.endTransition());
}
}
updateTreeSize();
return cur;
}
public void collapseCurrentState()
{
Selection s = Legup.getInstance().getSelections().getFirstSelection();
BoardState state = s.getState();
//Don't collapse if the selected node is a transition
if (state.isModifiable())
return;
//collapse should hide information about transitions
if (state.getChildren().size() == 1)
state.getChildren().get(0).toggleCollapse();
getCollapseColor(state);
updateTreeSize();
repaint();
}
//This function must be called before the board collapsing takes place, otherwise transition data will be hidden
public void getCollapseColor(BoardState lastCollapsed)
{
int x = (int) lastCollapsed.getLocation().getX();
int y = (int) lastCollapsed.getLocation().getY();
int hashSum = x + y;
BoardState iterBoard = lastCollapsed;
boolean overallColor = true;
//collapse is colored green if all of the transitions are green.
//if there is one red, the entire thing is red
while (iterBoard.getChildren().size() == 1)
{
int status = iterBoard.getStatus();
if (status == BoardState.STATUS_RULE_CORRECT || status == BoardState.STATUS_CONTRADICTION_CORRECT)
{
overallColor &= true;
}
else if (status == BoardState.STATUS_RULE_INCORRECT || status == BoardState.STATUS_CONTRADICTION_INCORRECT)
{
overallColor &= false;
}
//get children
iterBoard = iterBoard.getChildren().get(0);
}
//save multiple colors, because collapse might produce different results on different parts of the tree
if (overallColor)
this.collapseColorHash.put(hashSum, Color.GREEN);
else
this.collapseColorHash.put(hashSum, Color.RED);
}
/**
* Delete the current state and associated transition then fix the children
*/
public void delCurrentState()
{
Selection s = Legup.getInstance().getSelections().getFirstSelection();
BoardState currentState = s.getState();
// make sure we don't delete the initial board state
if (currentState.getParents().size() == 0)
return;
// choose the previous state and move the children from after state
BoardState parentState = null;
BoardState childState = null;
if (currentState.isModifiable()) {
parentState = currentState.getSingleParentState();
childState = currentState.endTransition();
parentState.getChildren().remove(currentState);
currentState.getParents().remove(parentState);
} else {
parentState = currentState.getSingleParentState().getSingleParentState();
childState = currentState;
parentState.getChildren().remove(currentState.getSingleParentState());
currentState.getSingleParentState().getParents().remove(parentState);
}
BoardState.reparentChildren(childState, parentState);
// delete the current state
if (currentState.isModifiable()) {
currentState.deleteState();
} else {
currentState.getSingleParentState().deleteState();
}
Legup.getInstance().getSelections().setSelection(new Selection(parentState, false));
updateTreeSize();
}
/**
* Delete the child and child's subtree starting at the current state
*/
public void delChildAtCurrentState()
{
if(!Legup.getInstance().getGui().checkImmediateFeedback())BoardState.removeColorsFromTransitions();
Selection s = Legup.getInstance().getSelections().getFirstSelection();
BoardState state = s.getState();
if (s.isState())
{ // state
// make sure we don't delete the initial board state
Vector<BoardState> parentStates = state.getParents();
if (parentStates.size() == 0)
return;
// use to select the previous state
BoardState parent = parentStates.get(0);
state.deleteState();
Legup.getInstance().getSelections().setSelection(new Selection(parent, false));
}
else
{ // transition, delete all the things we're trasitioning from
// select current state
Legup.getInstance().getSelections().setSelection(new Selection(state, false));
// delete children states
Vector <BoardState> children = state.getChildren();
while (children.size() > 0)
{
BoardState child = children.get(0);
child.deleteState();
children.remove(0);
}
}
updateTreeSize();
}
/**
* Merge the two or more selected states
* TODO: add elegant error handling
*/
public void mergeStates()
{
ArrayList <Selection> selected = Legup.getInstance().getSelections().getCurrentSelection();
if (selected.size() > 1)
{
boolean allStates = true;
for (int x = 0; x < selected.size(); ++x)
{
if (selected.get(x).isTransition())
{
allStates = false;
break;
}
else if (selected.get(x).getState().isModifiable())
{
allStates = false;
break;
}
}
if (allStates)
{
ArrayList <BoardState> parents = new ArrayList <BoardState>();
for (int x = 0; x < selected.size(); ++x)
parents.add(selected.get(x).getState());
BoardState.merge(parents, false);
}
else
System.out.println("not all states");
}
else
System.out.println("< 2 selected");
updateTreeSize();
}
public void transitionChanged()
{
updateTreeSize();
}
public void treeSelectionChanged(ArrayList <Selection> newSelection)
{
}
/**
* Recursively renders the tree below <code>state</code>.
* Passing in the root node will effectively draw the entire tree.
* @param g the Graphics to draw on
* @param state the state we're drawing
*/
private void drawTree(Graphics g, BoardState state)
{
// System.out.println("Board dimensions are " + state.getWidth() + "x" + state.getHeight());
Graphics2D g2D = (Graphics2D)g;
ArrayList <Selection> sel = Legup.getInstance().getSelections().getCurrentSelection();
boolean isCollapsed = state.isCollapsed();
boolean flag = LEGUP_Gui.profFlag(LEGUP_Gui.IMD_FEEDBACK);
Vector <BoardState> children = null;
Point draw;
g.setColor(Color.black);
draw = (Point)state.getLocation().clone();
if (!isCollapsed)
children = state.getChildren();
else
{
int[] ptrNumTransitions = new int[1];
BoardState lastCollapsed = getLastCollapsed(state, ptrNumTransitions);
Point nextPoint = (Point)lastCollapsed.getLocation().clone();
draw.x = (draw.x + nextPoint.x)/2;
children = lastCollapsed.getChildren();
// System.out.println("Display collapse state....");
// System.out.println("board state: " + lastCollapsed.getLocation());
// getCollapseColor(lastCollapsed);
}
for (int c = 0; c < children.size(); ++c)
{
BoardState b = children.get(c);
Point childPoint = (Point)b.getLocation().clone();
if(b.isCollapsed())
{
childPoint.x = (childPoint.x + getLastCollapsed(state).getLocation().x)/2;
}
if (children.size() == 1)
{
int status = (flag ? b.getStatus() : b.getDelayStatus());
if (status == BoardState.STATUS_RULE_CORRECT || status == BoardState.STATUS_CONTRADICTION_CORRECT)
{
g.setColor(flag ? Color.green : new Color(0x80ff80));
g2D.setStroke(medium);
}
else if (status == BoardState.STATUS_RULE_INCORRECT || status == BoardState.STATUS_CONTRADICTION_INCORRECT)
{
g.setColor(flag ? Color.red : new Color(0xff8080));
g2D.setStroke(medium);
}
else
g.setColor(flag ? Color.black : Color.gray);
drawTransition(new Line2D.Float(draw.x, draw.y, childPoint.x-NODE_RADIUS, childPoint.y), g, state, b.isCollapsed());
//System.out.format("%d, %d, %d, %d\n", childPoint.x, childPoint.y, state.getLocation().x, state.getLocation().y);
g2D.setStroke(thin);
}
else
/*
* We might need to do a dotted transition type thing because green implies justified,
* while a case rule is not justified until all but one child lead to a contradiction
*/
{
if (state.getCaseSplitJustification() == null)
g.setColor(flag ? Color.black : Color.gray);
else if (state.isJustifiedCaseSplit() != null) // invalid split
g.setColor(flag ? Color.red : new Color(0xff8080));
else
g.setColor(flag ? Color.green : new Color(0x80ff80));
// set the stroke depending on whether it leads to a contradiction or is the last state
if (state.getCaseSplitJustification() == null)
g2D.setStroke(thin);
else if (b.leadsToContradiction())
{
g2D.setStroke(medium);
}
else
{
// maybe all the other ones are contradictions (proof by contradiction)
boolean allOthersLeadToContradiction = true;
for (int index = 0; index < children.size(); ++index)
{
if (c == index) // skip ourselves
continue;
BoardState sibling = children.get(index);
if (!sibling.leadsToContradiction())
{
allOthersLeadToContradiction = false;
break;
}
}
if (allOthersLeadToContradiction)
g2D.setStroke(medium);
else
g2D.setStroke(dotted);
}
drawTransition(new Line2D.Float(draw.x, draw.y, childPoint.x-NODE_RADIUS, childPoint.y), g, state, b.isCollapsed());
g2D.setStroke(thin);
}
//**********************Source of node issue*************************//
//if (b.getChildren().size() > 0)
drawTree(g, b);
//drawTree(g, b.getChildren().get(0));
}
Selection theSelection = new Selection(state,false);
if (sel.contains(theSelection))
{ // handle updating the selection information
int deltaY = 0;
int yRad = 36;
if (isCollapsed)
{
deltaY = -2 * COLLAPSED_DRAW_DELTA_Y; // times 2 because draw.y is already adjusted
yRad += 2 * COLLAPSED_DRAW_DELTA_Y;
}
//currentStateBoxes.add(new Rectangle(draw.x - 18, draw.y - 18 + deltaY,36,yRad));
}
if (!isCollapsed)
{
drawNode(g, draw.x, draw.y, state);
}
else
drawCollapsedNode(g, draw.x, draw.y);
// to prevent the drawing of contradictions from taking over the CPU
try {
Thread.sleep(1);
} catch (Exception e) {
System.err.println("zzz...");
}
}
/**
* Draw the current transition (will make it blue if it's part of the selection)
* @param trans the line of the transition we're drawing, starting at the source
* @param g the graphics to use
* @param parent the parent board state of the transition we're drawing
* @param collapsedChild is the child we're connecting to a collapsed state
*/
private void drawTransition(Line2D.Float trans, Graphics g,
BoardState parent, boolean collapsedChild)
{
Graphics2D g2d = (Graphics2D)g;
ArrayList <Selection> sel = Legup.getInstance().getSelections().getCurrentSelection();
Selection theSelection = new Selection(parent,true);
int nodeRadius = collapsedChild ? SMALL_NODE_RADIUS : NODE_RADIUS;
g2d.setStroke(medium);
g.setColor(((sel.contains(theSelection)) ? Color.blue : Color.gray));
g2d.draw(trans);
// we also want to draw the arrowhead
final int ARROW_SIZE = 8;
// find the tip of the arrow, the point NODE_RADIUS away from the destination endpoint
double theta = Math.atan2(trans.y2 - trans.y1, trans.x2 - trans.x1);
double nx = nodeRadius * Math.cos(theta);
double ny = nodeRadius * Math.sin(theta);
int px = Math.round(trans.x2);
int py = Math.round(trans.y2);
Polygon arrowhead = new Polygon();
arrowhead.addPoint(px, py);
nx = (ARROW_SIZE) * Math.cos(theta);
ny = (ARROW_SIZE) * Math.sin(theta);
px = (int)Math.round(trans.x2 - nx);
py = (int)Math.round(trans.y2 - ny);
// px and py are now the "base" of the arrowhead
theta += Math.PI / 2.0;
double dx = (ARROW_SIZE / 2) * Math.cos(theta);
double dy = (ARROW_SIZE / 2) * Math.sin(theta);
arrowhead.addPoint((int)Math.round(px + dx), (int)Math.round(py + dy));
theta -= Math.PI;
dx = (ARROW_SIZE / 2) * Math.cos(theta);
dy = (ARROW_SIZE / 2) * Math.sin(theta);
arrowhead.addPoint((int)Math.round(px + dx), (int)Math.round(py + dy));
g2d.fill(arrowhead);
}
/**
* Creates a triangle with specified x, y, and radius.
* @param x of center of triangle
* @param y of center of triangle
* @param radius of circumscribing circle of triangle
* @returns a Polygon with the points of the requested triangle.
**/
private Polygon makeTriangle(int x, int y, double radius)
{
Polygon triangle = new Polygon();
for(double c1 = 0;c1 < 360;c1+=120)
{
triangle.addPoint((int)(x+radius*Math.cos(Math.toRadians(c1))),(int)(y+radius*Math.sin(Math.toRadians(c1))));
}
return triangle;
}
/**
* Draw a node at a given location
* @param g the graphics to draw it with
* @param x the x location of the center of the node
* @param y the y location of the center of the node
* @param state the state to draw
*/
private void drawNode( Graphics g, int x, int y, BoardState state ){
final int diam = NODE_RADIUS + NODE_RADIUS;
Graphics2D g2D = (Graphics2D)g;
g2D.setStroke(thin);
Polygon triangle = makeTriangle(x, y, 1.5*NODE_RADIUS);
Selection theSelection = new Selection(state,false);
ArrayList <Selection> sel = Legup.getInstance().getSelections().getCurrentSelection();
g.setColor(state.getColor());
if(!state.isModifiable())
{
g.fillOval( x - NODE_RADIUS, y - NODE_RADIUS, diam, diam );
g.setColor((sel.contains(theSelection)? Color.blue : Color.black));
g2D.setStroke((sel.contains(theSelection)? medium : thin));
g.drawOval( x - NODE_RADIUS, y - NODE_RADIUS, diam, diam );
}
else
{
{
g2D.fill(triangle);
g.setColor((sel.contains(theSelection)? Color.blue : Color.black));
g2D.setStroke((sel.contains(theSelection)? medium : thin));
g.drawPolygon(triangle);
}
if(state.getJustification() instanceof Contradiction)
{
g.setColor(Color.red);
g2D.drawLine(x-NODE_RADIUS+3*NODE_RADIUS,y-NODE_RADIUS,x+NODE_RADIUS+3*NODE_RADIUS,y+NODE_RADIUS);
g2D.drawLine(x+NODE_RADIUS+3*NODE_RADIUS,y-NODE_RADIUS,x-NODE_RADIUS+3*NODE_RADIUS,y+NODE_RADIUS);
g.setColor((sel.contains(theSelection)? Color.blue : Color.black));
}
}
boolean flag = LEGUP_Gui.profFlag(LEGUP_Gui.IMD_FEEDBACK);
}
/**
* Draw a collapsed node at the current location
* @param g the Graphics to draw with
* @param x the x location to draw it on
* @param y the y location to draw it on
*/
private void drawCollapsedNode(Graphics g, int x, int y)
{
final int rad = SMALL_NODE_RADIUS;
final int diam = 2 * rad;
final int deltaX = -COLLAPSED_DRAW_DELTA_X;
final int deltaY = -COLLAPSED_DRAW_DELTA_Y;
final int hashSum = (x+y - 6*NODE_RADIUS); //have to take collapse offset into account
Graphics2D g2D = (Graphics2D)g;
g2D.setStroke(thin);
g2D.setColor(Color.black);
for (int c = 0; c < 3; ++c)
{
Polygon tri = makeTriangle(x - rad + (c - 1) * deltaX, y, diam/2);
g.setColor(collapseColorHash.get(hashSum));
g.fillPolygon(tri);
g.setColor(Color.black);
g.drawPolygon(tri);
}
}
/**
* Draw the current state boxes (the cached selection)
* @param g the graphics to use to draw
*/
private void drawCurrentStateBoxes(Graphics g)
{
if (currentStateBoxes != null)
{
Graphics2D g2d = (Graphics2D)g;
g.setColor(Color.blue);
g2d.setStroke(dashed);
for (int x = 0; x < currentStateBoxes.size(); ++x)
g2d.draw(currentStateBoxes.get(x));
}
}
private void drawMouseOver(Graphics2D g)
{
BoardState B = mouseOver.getState();
//J contains both basic rules and contradictions
Justification J = B.getJustification();
int w, h;
g.setStroke(thin);
w = (int)(100 * (100/(float)getZoom()));
h = (int)(100 * (100/(float)getZoom()));
float scale = (100/(float)getZoom());
int offset = (int)(scale*30);
JViewport vp = getViewport();
BufferedImage image = new BufferedImage(vp.getWidth(), vp.getHeight(), BufferedImage.TYPE_INT_ARGB);
Graphics2D g_tmp = image.createGraphics();
int v_offset = 0;
if((mouseOver.getState().getJustification() != null)||(mouseOver.getState().getCaseRuleJustification() != null))
{
if((mouseOver.getState().justificationText != null)&&(mouseOver.getState().getColor() != TreePanel.nodeColor))
{
g_tmp.setColor(Color.black);
String[] tmp = mouseOver.getState().justificationText.split("\n");
v_offset = 10+tmp.length*14;
for(int c1=0;c1<tmp.length;c1++)
{
g_tmp.drawString(tmp[c1],0,(14*c1)+10);
}
}
g_tmp.setColor(Color.gray);
g_tmp.drawRect(0,v_offset,100,100);
}
if (J != null)
{
g_tmp.drawImage(J.getImageIcon().getImage(), 0, v_offset, null);
}
CaseRule CR = B.getCaseSplitJustification();
if (CR != null)
{
g_tmp.drawImage(CR.getImageIcon().getImage(), 0, v_offset, null);
return;
}
g.drawImage(image, mousePoint.x+(int)(scale*30), mousePoint.y-(int)(scale*30), (int)(scale*vp.getWidth()), (int)(scale*vp.getHeight()), null);
}
}
| code/edu/rpi/phil/legup/newgui/TreePanel.java | package edu.rpi.phil.legup.newgui;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.Polygon;
import java.awt.Stroke;
import java.awt.Shape;
import java.awt.event.MouseEvent;
import java.awt.event.MouseWheelEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseAdapter;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import java.awt.geom.Ellipse2D;
import java.awt.geom.Line2D;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.Vector;
import javax.swing.JMenuItem;
import javax.swing.JPopupMenu;
import javax.swing.JComponent;
import javax.swing.JViewport;
import javax.swing.ViewportLayout;
import javax.swing.event.PopupMenuListener;
import javax.swing.ImageIcon;
import edu.rpi.phil.legup.BoardDrawingHelper;
import edu.rpi.phil.legup.BoardState;
import edu.rpi.phil.legup.CaseRule;
import edu.rpi.phil.legup.Legup;
import edu.rpi.phil.legup.PuzzleModule;
import edu.rpi.phil.legup.Selection;
import edu.rpi.phil.legup.Justification;
import edu.rpi.phil.legup.Contradiction;
public class TreePanel extends DynamicViewer implements TransitionChangeListener, TreeSelectionListener
{
private static final long serialVersionUID = 2272172376353427845L;
public static final Color nodeColor = new Color(255,255,155);
public static final int NODE_RADIUS = 10;
private static final int SMALL_NODE_RADIUS = 7;
private static final int COLLAPSED_DRAW_DELTA_X = 10;
private static final int COLLAPSED_DRAW_DELTA_Y = 10;
private ArrayList <Rectangle> currentStateBoxes = new ArrayList <Rectangle>();
private Point selectionOffset = null;
private Point lastMovePoint = null;
private static final float floater[] = new float[] {(float)(5.0), (float)(10.0)}; // dashed setup
private static final float floater2[] = new float[] {(float)(2.0), (float)(3.0)}; // dotted setup
private static final Stroke dashed = new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL, 10, floater, 0);
private static final Stroke dotted = new BasicStroke(2, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL, 10, floater2, 0);
private static final Stroke medium = new BasicStroke(2);
private static final Stroke thin = new BasicStroke(1);
private Rectangle bounds = new Rectangle(0,0,0,0);
private int xOffset = 0;
private int yOffset = 0;
private Point mousePoint;
private static Selection mouseOver;
//Path for node images
//Currently only classic and smiley options exist
private static final String NodeImgs = "images/tree/smiley/";
public TreePanel()
{
super();
// System.out.println("TreePanel created");
BoardState.addTransitionChangeListener(this);
Legup.getInstance().getSelections().addTreeSelectionListener(this);
//setDefaultPosition(-60,-80);
setSize(new Dimension(100, 200));
setPreferredSize(new Dimension(640, 160));
//zoomTo(1);
//System.out.println("scale is " + getZoom());
//zoom(0, new Point(-60, 80));
}
public TreePanel(boolean b) { super(b); }
public void actionPerformed(ActionEvent e)
{
// System.out.println("actionPerformed");
}
private BoardState getLastCollapsed(BoardState s)
{
return getLastCollapsed(s, null);
}
private BoardState getLastCollapsed(BoardState s, int[] outptrNumTransitions)
{
Vector <BoardState> children = s.getChildren();
BoardState rv = s;
int numTransitions = 0;
;
if (children.size() == 1)
{
BoardState child = children.get(0);
if (child.isCollapsed())
{
++numTransitions;
rv = getLastCollapsed(child);
}
}
if(outptrNumTransitions != null) { outptrNumTransitions[0] = numTransitions; }
return rv;
}
// recursively computes the bounding rectangle of the tree
private Rectangle getTreeBounds( BoardState state ){
// get the position of the current node and add padding
Rectangle b = new Rectangle( state.getLocation() );
b.grow( 2*NODE_RADIUS, 2*NODE_RADIUS );
// Adjust the rectangle so that rule popups aren't cut off
float scale = (100/(float)getZoom());
b.setBounds((int)b.getX()-(int)(100*scale), (int)b.getY(), (int)b.getWidth()+(int)(400*scale), (int)b.getHeight()+(int)(200*scale));
// get the relevant child nodes
Vector <BoardState> children = state.isCollapsed()
? getLastCollapsed(state).getChildren()
: state.getChildren();
// compute the union of the child bounding boxes recursively
for (int c = 0; c < children.size(); c++)
{
b = b.union( getTreeBounds( children.get(c) ) );
}
return b;
}
public void updateTreeSize()
{
bounds = getTreeBounds(Legup.getInstance().getInitialBoardState());
setSize(bounds.getSize());
BoardState state = Legup.getInstance().getInitialBoardState();
if( bounds.y != 60 )
{
state.adjustOffset( new Point( 60-bounds.y, 0 ) );
}
}
public void reset()
{
BoardState state = Legup.getInstance().getInitialBoardState();
if( bounds.x != 0 || bounds.y != 0 )
{
state.setOffset( new Point( state.getOffset().x-bounds.x, state.getOffset().y-bounds.y ) );
updateTreeSize();
}
}
public void draw( Graphics2D g )
{
currentStateBoxes.clear();
BoardState state = Legup.getInstance().getInitialBoardState();
if(state != null)
{
setSize( bounds.getSize() );
g.setRenderingHint( RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON );
g.setRenderingHint( RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON );
drawTree(g,state);
drawCurrentStateBoxes(g);
if (mouseOver != null) drawMouseOver(g);
}
}
public void zoomFit()
{
// find the ideal width and height scale
zoomTo(1.0);
updateTreeSize();
double fitwidth = (viewport.getWidth()-8.0) / (getSize().width - 200);
double fitheight = (viewport.getHeight()-8.0) / (getSize().height - 120);
// choose the smaller of the two and zoom
zoomTo( (fitwidth < fitheight) ? fitwidth : fitheight );
viewport.setViewPosition(new Point(0,0));
}
public void zoomReset()
{
zoomTo(1.0);
viewport.setViewPosition(new Point(0,0));
}
/**
* Get the boardstate / transition at a point in the tree
* @param state the state to check now (starts at root)
* @param where the point where the user clicked
* @return the node or transition the user selected, or null if he or she missed
*/
private Selection getSelectionAtPoint(BoardState state, Point where)
{
if(state == null)return null;
Selection rv = null;
Point loc = state.getLocation();
boolean isCollapsed = state.isCollapsed();
final int radius = isCollapsed ? (2 * NODE_RADIUS) : NODE_RADIUS;
Point draw = new Point(loc.x - radius, loc.y - radius);
// distance from a transition which is considered clicking on it, squared
final int MAX_CLICK_DISTANCE_SQ = 5*5;
Shape myBounds;
//System.out.println("getSelectionAtPoint called for (" + where.x + "," + where.y + ") on node at point (" + state.getLocation().x + "," + state.getLocation().y + ")");
if(state.isModifiable())
{
/*draw.x += 128;
int[] points_x = new int[3];
int[] points_y = new int[3];
for(int c1 = 0;c1 < 3;c1+=1)
{
points_x[c1] = (int)(draw.x+radius*Math.cos(Math.toRadians(c1*120)));
points_y[c1] = (int)(draw.y+radius*Math.sin(Math.toRadians(c1*120)));
}
myBounds = new Polygon(points_x,points_y,3);*/
draw.x -= radius/2;
draw.y -= radius/2;
myBounds = new Ellipse2D.Float(draw.x,draw.y,3*radius,3*radius);
}
else
{
myBounds = new Ellipse2D.Float(draw.x,draw.y,2 * radius,2 * radius);
}
boolean stateSelected = myBounds.contains(where);
if (stateSelected && isCollapsed)
{
Vector <BoardState> parents = state.getParents();
if (parents.size() == 1 && parents.get(0).isCollapsed())
stateSelected = false; // can't select a collapsed state
}
if (stateSelected)
{
rv = new Selection(state,false);
}
else
{
for(BoardState b : state.getChildren())
{
Selection s = getSelectionAtPoint(b,where);
if(s != null)rv = s;
}
}
return rv;
}
/**
* Toggle a state in a selection (something was ctrl + clicked)
* @param state the state to check now (starts at root)
* @param bounds the bounds of the state and all it's children
* @param where the point where the user ctrl + clicked
*/
private void toggleSelection(BoardState state, Point where)
{
Selection s = getSelectionAtPoint(state, where);
Legup.getInstance().getSelections().toggleSelection(s);
}
/**
* Select a new state or transition that the user clicked on
* @param state the state we're at
* @param bounds the bounds of the state and all it's children
* @param where the point where the user clicked
* @return the new Selection
*/
private Selection newSelection(BoardState state, Point where)
{
Selection s = getSelectionAtPoint(state, where);
Legup.getInstance().getSelections().setSelection(s);
return s;
}
protected void mouseMovedAt(Point p, MouseEvent e)
{
Selection prev = mouseOver;
mouseOver = getSelectionAtPoint(Legup.getInstance().getInitialBoardState(), p);
mousePoint = p;
if( prev != null || mouseOver != null )
repaint();
if( prev != null ^ mouseOver != null )
Legup.getInstance().refresh();
if( prev != null && mouseOver != null )
if( !prev.equals(mouseOver) )
Legup.getInstance().refresh();
}
public static Selection getMouseOver()
{
return mouseOver;
}
protected void mouseDraggedAt(Point p, MouseEvent e) {
if (lastMovePoint == null)
lastMovePoint = new Point(p);
// repaint();
}
protected void highlightSelectedTransition(Point p)
{
Selection sel = getSelectionAtPoint(Legup.getInstance().getInitialBoardState(), p);
if(sel != null && sel.getState().isModifiable())
{
Legup.getInstance().getGui().getJustificationFrame().
setSelectionByJustification(sel.getState().getJustification());
}
}
public void mouseReleasedAt(Point p, MouseEvent e)
{
if( e.getButton() == MouseEvent.BUTTON1 )
{
lastMovePoint = new Point(p);
if ( e.isControlDown() ) {
// add to selection
toggleSelection( Legup.getInstance().getInitialBoardState(), p );
} else {
// make a new selection
newSelection( Legup.getInstance().getInitialBoardState(), p );
highlightSelectedTransition(p);
}
// right click
}
}
public void mouseWheelMovedAt( MouseWheelEvent e )
{
updateTreeSize();
}
public BoardState addChildAtCurrentState(Object justification)
{
Selection selection = Legup.getInstance().getSelections().getFirstSelection();
BoardState cur = selection.getState();
if((cur.getChangedCells().size() > 0)||(cur.extraDataChanged()))
{
if (cur.isModifiable() && selection.isState())
{
Legup.setCurrentState(cur.endTransition());
}
}
updateTreeSize();
return cur;
}
public void collapseCurrentState()
{
Selection s = Legup.getInstance().getSelections().getFirstSelection();
BoardState state = s.getState();
//Don't collapse if the selected node is a transition
if (state.isModifiable())
return;
//collapse should hide information about transitions
if (state.getChildren().size() == 1)
state.getChildren().get(0).toggleCollapse();
updateTreeSize();
repaint();
}
/**
* Delete the current state and associated transition then fix the children
*/
public void delCurrentState()
{
Selection s = Legup.getInstance().getSelections().getFirstSelection();
BoardState currentState = s.getState();
// make sure we don't delete the initial board state
if (currentState.getParents().size() == 0)
return;
// choose the previous state and move the children from after state
BoardState parentState = null;
BoardState childState = null;
if (currentState.isModifiable()) {
parentState = currentState.getSingleParentState();
childState = currentState.endTransition();
parentState.getChildren().remove(currentState);
currentState.getParents().remove(parentState);
} else {
parentState = currentState.getSingleParentState().getSingleParentState();
childState = currentState;
parentState.getChildren().remove(currentState.getSingleParentState());
currentState.getSingleParentState().getParents().remove(parentState);
}
BoardState.reparentChildren(childState, parentState);
// delete the current state
if (currentState.isModifiable()) {
currentState.deleteState();
} else {
currentState.getSingleParentState().deleteState();
}
Legup.getInstance().getSelections().setSelection(new Selection(parentState, false));
updateTreeSize();
}
/**
* Delete the child and child's subtree starting at the current state
*/
public void delChildAtCurrentState()
{
if(!Legup.getInstance().getGui().checkImmediateFeedback())BoardState.removeColorsFromTransitions();
Selection s = Legup.getInstance().getSelections().getFirstSelection();
BoardState state = s.getState();
if (s.isState())
{ // state
// make sure we don't delete the initial board state
Vector<BoardState> parentStates = state.getParents();
if (parentStates.size() == 0)
return;
// use to select the previous state
BoardState parent = parentStates.get(0);
state.deleteState();
Legup.getInstance().getSelections().setSelection(new Selection(parent, false));
}
else
{ // transition, delete all the things we're trasitioning from
// select current state
Legup.getInstance().getSelections().setSelection(new Selection(state, false));
// delete children states
Vector <BoardState> children = state.getChildren();
while (children.size() > 0)
{
BoardState child = children.get(0);
child.deleteState();
children.remove(0);
}
}
updateTreeSize();
}
/**
* Merge the two or more selected states
* TODO: add elegant error handling
*/
public void mergeStates()
{
ArrayList <Selection> selected = Legup.getInstance().getSelections().getCurrentSelection();
if (selected.size() > 1)
{
boolean allStates = true;
for (int x = 0; x < selected.size(); ++x)
{
if (selected.get(x).isTransition())
{
allStates = false;
break;
}
else if (selected.get(x).getState().isModifiable())
{
allStates = false;
break;
}
}
if (allStates)
{
ArrayList <BoardState> parents = new ArrayList <BoardState>();
for (int x = 0; x < selected.size(); ++x)
parents.add(selected.get(x).getState());
BoardState.merge(parents, false);
}
else
System.out.println("not all states");
}
else
System.out.println("< 2 selected");
updateTreeSize();
}
public void transitionChanged()
{
updateTreeSize();
}
public void treeSelectionChanged(ArrayList <Selection> newSelection)
{
}
/**
* Recursively renders the tree below <code>state</code>.
* Passing in the root node will effectively draw the entire tree.
* @param g the Graphics to draw on
* @param state the state we're drawing
*/
private void drawTree(Graphics g, BoardState state)
{
// System.out.println("Board dimensions are " + state.getWidth() + "x" + state.getHeight());
Graphics2D g2D = (Graphics2D)g;
ArrayList <Selection> sel = Legup.getInstance().getSelections().getCurrentSelection();
boolean isCollapsed = state.isCollapsed();
boolean flag = LEGUP_Gui.profFlag(LEGUP_Gui.IMD_FEEDBACK);
Vector <BoardState> children = null;
Point draw;
g.setColor(Color.black);
draw = (Point)state.getLocation().clone();
if (!isCollapsed)
children = state.getChildren();
else
{
int[] ptrNumTransitions = new int[1];
BoardState lastCollapsed = getLastCollapsed(state, ptrNumTransitions);
Point nextPoint = (Point)lastCollapsed.getLocation().clone();
draw.x = (draw.x + nextPoint.x)/2;
children = lastCollapsed.getChildren();
}
for (int c = 0; c < children.size(); ++c)
{
BoardState b = children.get(c);
Point childPoint = (Point)b.getLocation().clone();
if(b.isCollapsed())
{
childPoint.x = (childPoint.x + getLastCollapsed(state).getLocation().x)/2;
}
if (children.size() == 1)
{
int status = (flag ? b.getStatus() : b.getDelayStatus());
if (status == BoardState.STATUS_RULE_CORRECT || status == BoardState.STATUS_CONTRADICTION_CORRECT)
{
g.setColor(flag ? Color.green : new Color(0x80ff80));
g2D.setStroke(medium);
}
else if (status == BoardState.STATUS_RULE_INCORRECT || status == BoardState.STATUS_CONTRADICTION_INCORRECT)
{
g.setColor(flag ? Color.red : new Color(0xff8080));
g2D.setStroke(medium);
}
else
g.setColor(flag ? Color.black : Color.gray);
drawTransition(new Line2D.Float(draw.x, draw.y, childPoint.x-NODE_RADIUS, childPoint.y), g, state, b.isCollapsed());
//System.out.format("%d, %d, %d, %d\n", childPoint.x, childPoint.y, state.getLocation().x, state.getLocation().y);
g2D.setStroke(thin);
}
else
/*
* We might need to do a dotted transition type thing because green implies justified,
* while a case rule is not justified until all but one child lead to a contradiction
*/
{
if (state.getCaseSplitJustification() == null)
g.setColor(flag ? Color.black : Color.gray);
else if (state.isJustifiedCaseSplit() != null) // invalid split
g.setColor(flag ? Color.red : new Color(0xff8080));
else
g.setColor(flag ? Color.green : new Color(0x80ff80));
// set the stroke depending on whether it leads to a contradiction or is the last state
if (state.getCaseSplitJustification() == null)
g2D.setStroke(thin);
else if (b.leadsToContradiction())
{
g2D.setStroke(medium);
}
else
{
// maybe all the other ones are contradictions (proof by contradiction)
boolean allOthersLeadToContradiction = true;
for (int index = 0; index < children.size(); ++index)
{
if (c == index) // skip ourselves
continue;
BoardState sibling = children.get(index);
if (!sibling.leadsToContradiction())
{
allOthersLeadToContradiction = false;
break;
}
}
if (allOthersLeadToContradiction)
g2D.setStroke(medium);
else
g2D.setStroke(dotted);
}
drawTransition(new Line2D.Float(draw.x, draw.y, childPoint.x-NODE_RADIUS, childPoint.y), g, state, b.isCollapsed());
g2D.setStroke(thin);
}
//**********************Source of node issue*************************//
//if (b.getChildren().size() > 0)
drawTree(g, b);
//drawTree(g, b.getChildren().get(0));
}
Selection theSelection = new Selection(state,false);
if (sel.contains(theSelection))
{ // handle updating the selection information
int deltaY = 0;
int yRad = 36;
if (isCollapsed)
{
deltaY = -2 * COLLAPSED_DRAW_DELTA_Y; // times 2 because draw.y is already adjusted
yRad += 2 * COLLAPSED_DRAW_DELTA_Y;
}
//currentStateBoxes.add(new Rectangle(draw.x - 18, draw.y - 18 + deltaY,36,yRad));
}
if (!isCollapsed)
{
drawNode(g,draw.x, draw.y,state);
}
else
drawCollapsedNode(g,draw.x,draw.y);
// to prevent the drawing of contradictions from taking over the CPU
try {
Thread.sleep(1);
} catch (Exception e) {
System.err.println("zzz...");
}
}
/**
* Draw the current transition (will make it blue if it's part of the selection)
* @param trans the line of the transition we're drawing, starting at the source
* @param g the graphics to use
* @param parent the parent board state of the transition we're drawing
* @param collapsedChild is the child we're connecting to a collapsed state
*/
private void drawTransition(Line2D.Float trans, Graphics g,
BoardState parent, boolean collapsedChild)
{
Graphics2D g2d = (Graphics2D)g;
ArrayList <Selection> sel = Legup.getInstance().getSelections().getCurrentSelection();
Selection theSelection = new Selection(parent,true);
int nodeRadius = collapsedChild ? SMALL_NODE_RADIUS : NODE_RADIUS;
g2d.setStroke(medium);
g.setColor(((sel.contains(theSelection)) ? Color.blue : Color.gray));
g2d.draw(trans);
// we also want to draw the arrowhead
final int ARROW_SIZE = 8;
// find the tip of the arrow, the point NODE_RADIUS away from the destination endpoint
double theta = Math.atan2(trans.y2 - trans.y1, trans.x2 - trans.x1);
double nx = nodeRadius * Math.cos(theta);
double ny = nodeRadius * Math.sin(theta);
int px = Math.round(trans.x2);
int py = Math.round(trans.y2);
Polygon arrowhead = new Polygon();
arrowhead.addPoint(px, py);
nx = (ARROW_SIZE) * Math.cos(theta);
ny = (ARROW_SIZE) * Math.sin(theta);
px = (int)Math.round(trans.x2 - nx);
py = (int)Math.round(trans.y2 - ny);
// px and py are now the "base" of the arrowhead
theta += Math.PI / 2.0;
double dx = (ARROW_SIZE / 2) * Math.cos(theta);
double dy = (ARROW_SIZE / 2) * Math.sin(theta);
arrowhead.addPoint((int)Math.round(px + dx), (int)Math.round(py + dy));
theta -= Math.PI;
dx = (ARROW_SIZE / 2) * Math.cos(theta);
dy = (ARROW_SIZE / 2) * Math.sin(theta);
arrowhead.addPoint((int)Math.round(px + dx), (int)Math.round(py + dy));
g2d.fill(arrowhead);
}
/**
* Creates a triangle with specified x, y, and radius.
* @param x of center of triangle
* @param y of center of triangle
* @param radius of circumscribing circle of triangle
* @returns a Polygon with the points of the requested triangle.
**/
private Polygon makeTriangle(int x, int y, double radius)
{
Polygon triangle = new Polygon();
for(double c1 = 0;c1 < 360;c1+=120)
{
triangle.addPoint((int)(x+radius*Math.cos(Math.toRadians(c1))),(int)(y+radius*Math.sin(Math.toRadians(c1))));
}
return triangle;
}
/**
* Draw a node at a given location
* @param g the graphics to draw it with
* @param x the x location of the center of the node
* @param y the y location of the center of the node
* @param state the state to draw
*/
private void drawNode( Graphics g, int x, int y, BoardState state ){
final int diam = NODE_RADIUS + NODE_RADIUS;
Graphics2D g2D = (Graphics2D)g;
g2D.setStroke(thin);
Polygon triangle = makeTriangle(x, y, 1.5*NODE_RADIUS);
Selection theSelection = new Selection(state,false);
ArrayList <Selection> sel = Legup.getInstance().getSelections().getCurrentSelection();
g.setColor(state.getColor());
if(!state.isModifiable())
{
g.fillOval( x - NODE_RADIUS, y - NODE_RADIUS, diam, diam );
g.setColor((sel.contains(theSelection)? Color.blue : Color.black));
g2D.setStroke((sel.contains(theSelection)? medium : thin));
g.drawOval( x - NODE_RADIUS, y - NODE_RADIUS, diam, diam );
}
else
{
{
g2D.fill(triangle);
g.setColor((sel.contains(theSelection)? Color.blue : Color.black));
g2D.setStroke((sel.contains(theSelection)? medium : thin));
g.drawPolygon(triangle);
}
if(state.getJustification() instanceof Contradiction)
{
g.setColor(Color.red);
g2D.drawLine(x-NODE_RADIUS+3*NODE_RADIUS,y-NODE_RADIUS,x+NODE_RADIUS+3*NODE_RADIUS,y+NODE_RADIUS);
g2D.drawLine(x+NODE_RADIUS+3*NODE_RADIUS,y-NODE_RADIUS,x-NODE_RADIUS+3*NODE_RADIUS,y+NODE_RADIUS);
g.setColor((sel.contains(theSelection)? Color.blue : Color.black));
}
}
boolean flag = LEGUP_Gui.profFlag(LEGUP_Gui.IMD_FEEDBACK);
}
/**
* Draw a collapsed node at the current location
* @param g the Graphics to draw with
* @param x the x location to draw it on
* @param y the y location to draw it on
*/
private void drawCollapsedNode(Graphics g,int x, int y)
{
final int rad = SMALL_NODE_RADIUS;
final int diam = 2 * rad;
final int deltaX = -COLLAPSED_DRAW_DELTA_X;
final int deltaY = -COLLAPSED_DRAW_DELTA_Y;
Graphics2D g2D = (Graphics2D)g;
g2D.setStroke(thin);
g2D.setColor(Color.black);
for (int c = 0; c < 3; ++c)
{
Polygon tri = makeTriangle(x - rad + (c - 1) * deltaX, y, diam/2);
g.setColor(nodeColor);
g.fillPolygon(tri);
g.setColor(Color.black);
g.drawPolygon(tri);
}
}
/**
* Draw the current state boxes (the cached selection)
* @param g the graphics to use to draw
*/
private void drawCurrentStateBoxes(Graphics g)
{
if (currentStateBoxes != null)
{
Graphics2D g2d = (Graphics2D)g;
g.setColor(Color.blue);
g2d.setStroke(dashed);
for (int x = 0; x < currentStateBoxes.size(); ++x)
g2d.draw(currentStateBoxes.get(x));
}
}
private void drawMouseOver(Graphics2D g)
{
BoardState B = mouseOver.getState();
//J contains both basic rules and contradictions
Justification J = B.getJustification();
int w, h;
g.setStroke(thin);
w = (int)(100 * (100/(float)getZoom()));
h = (int)(100 * (100/(float)getZoom()));
float scale = (100/(float)getZoom());
int offset = (int)(scale*30);
JViewport vp = getViewport();
BufferedImage image = new BufferedImage(vp.getWidth(), vp.getHeight(), BufferedImage.TYPE_INT_ARGB);
Graphics2D g_tmp = image.createGraphics();
int v_offset = 0;
if((mouseOver.getState().getJustification() != null)||(mouseOver.getState().getCaseRuleJustification() != null))
{
if((mouseOver.getState().justificationText != null)&&(mouseOver.getState().getColor() != TreePanel.nodeColor))
{
g_tmp.setColor(Color.black);
String[] tmp = mouseOver.getState().justificationText.split("\n");
v_offset = 10+tmp.length*14;
for(int c1=0;c1<tmp.length;c1++)
{
g_tmp.drawString(tmp[c1],0,(14*c1)+10);
}
}
g_tmp.setColor(Color.gray);
g_tmp.drawRect(0,v_offset,100,100);
}
if (J != null)
{
g_tmp.drawImage(J.getImageIcon().getImage(), 0, v_offset, null);
}
CaseRule CR = B.getCaseSplitJustification();
if (CR != null)
{
g_tmp.drawImage(CR.getImageIcon().getImage(), 0, v_offset, null);
return;
}
g.drawImage(image, mousePoint.x+(int)(scale*30), mousePoint.y-(int)(scale*30), (int)(scale*vp.getWidth()), (int)(scale*vp.getHeight()), null);
}
}
| Assign a color to collapsed nodes
The overall color is based off of the color of the transition nodes.
| code/edu/rpi/phil/legup/newgui/TreePanel.java | Assign a color to collapsed nodes |
|
Java | agpl-3.0 | 5c1517441f49364a2953683865bf824c931e391d | 0 | ChiralBehaviors/Ultrastructure,ChiralBehaviors/Ultrastructure | /**
* Copyright (c) 2016 Chiral Behaviors, LLC, all rights reserved.
*
* This file is part of Ultrastructure.
*
* Ultrastructure is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ULtrastructure is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Ultrastructure. If not, see <http://www.gnu.org/licenses/>.
*/
package com.chiralbehaviors.CoRE.phantasm.graphql;
import static com.chiralbehaviors.CoRE.phantasm.graphql.PhantasmProcessing.object;
import static com.chiralbehaviors.CoRE.phantasm.graphql.PhantasmProcessing.objectBuilder;
import static graphql.Scalars.GraphQLFloat;
import static graphql.Scalars.GraphQLString;
import java.util.Collections;
import java.util.Deque;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.UUID;
import java.util.stream.Collectors;
import com.chiralbehaviors.CoRE.domain.Product;
import com.chiralbehaviors.CoRE.jooq.enums.Cardinality;
import com.chiralbehaviors.CoRE.jooq.enums.ReferenceType;
import com.chiralbehaviors.CoRE.jooq.enums.ValueType;
import com.chiralbehaviors.CoRE.jooq.tables.records.FacetRecord;
import com.chiralbehaviors.CoRE.kernel.phantasm.product.Plugin;
import com.chiralbehaviors.CoRE.kernel.phantasm.product.Workspace;
import com.chiralbehaviors.CoRE.meta.Model;
import com.chiralbehaviors.CoRE.meta.workspace.WorkspaceAccessor;
import com.chiralbehaviors.CoRE.meta.workspace.dsl.WorkspacePresentation;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.AttributeAuthorizationMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.ChildSequencingMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.ExistentialMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.FacetMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.JobMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.MetaProtocolMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.NetworkAttributeAuthorizationMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.NetworkAuthorizationMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.ParentSequencingMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.ProtocolMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.SelfSequencingMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.SiblingSequencingMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.StatusCodeSequencingMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.AttributeAuthorizationQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.ChildSequencingQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.ExistentialQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.FacetQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.JobChronologyQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.JobQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.MetaProtocolQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.NetworkAttributeAuthorizationQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.NetworkAuthorizationQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.ParentSequencingQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.ProtocolQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.SelfSequencingQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.SiblingSequencingQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.StatusCodeSequencingQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.AttributeAuthorization;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.ChildSequencing;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Agency;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Attribute;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Interval;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Location;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Relationship;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.StatusCode;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Unit;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Facet;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Job;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.JobChronology;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.MetaProtocol;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.NetworkAttributeAuthorization;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.NetworkAuthorization;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.ParentSequencing;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Protocol;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.SelfSequencing;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.SiblingSequencing;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.StatusCodeSequencing;
import com.chiralbehaviors.CoRE.phantasm.model.PhantasmCRUD;
import com.chiralbehaviors.CoRE.phantasm.model.PhantasmTraversal.Aspect;
import graphql.schema.DataFetchingEnvironment;
import graphql.schema.GraphQLFieldDefinition;
import graphql.schema.GraphQLInterfaceType;
import graphql.schema.GraphQLObjectType;
import graphql.schema.GraphQLObjectType.Builder;
import graphql.schema.GraphQLSchema;
import graphql.schema.GraphQLTypeReference;
/**
* @author hhildebrand
*
*/
public class WorkspaceSchema {
public interface MetaMutations extends ExistentialMutations, FacetMutations,
AttributeAuthorizationMutations, NetworkAuthorizationMutations,
ChildSequencingMutations, ParentSequencingMutations,
SelfSequencingMutations, SiblingSequencingMutations,
ProtocolMutations, MetaProtocolMutations,
StatusCodeSequencingMutations,
NetworkAttributeAuthorizationMutations, JobMutations {
}
public interface MetaQueries extends ExistentialQueries, FacetQueries,
AttributeAuthorizationQueries, NetworkAuthorizationQueries,
ChildSequencingQueries, ParentSequencingQueries,
SelfSequencingQueries, SiblingSequencingQueries, ProtocolQueries,
MetaProtocolQueries, StatusCodeSequencingQueries,
NetworkAttributeAuthorizationQueries, JobQueries,
JobChronologyQueries {
}
public interface Mutations extends ExistentialMutations, JobMutations {
}
public interface Queries
extends ExistentialQueries, JobQueries, JobChronologyQueries {
}
public static Model ctx(DataFetchingEnvironment env) {
return ((PhantasmCRUD) env.getContext()).getModel();
}
private final WorkspaceTypeFunction typeFunction = new WorkspaceTypeFunction();
public WorkspaceSchema() {
}
public GraphQLSchema build(WorkspaceAccessor accessor, Model model,
ClassLoader executionScope) throws NoSuchMethodException,
InstantiationException,
IllegalAccessException {
Deque<FacetRecord> unresolved = FacetFields.initialState(accessor,
model);
Map<FacetRecord, FacetFields> resolved = new HashMap<>();
Product definingProduct = accessor.getDefiningProduct();
Workspace workspace = model.wrap(Workspace.class, definingProduct);
List<Plugin> plugins = workspace.getPlugins();
while (!unresolved.isEmpty()) {
FacetRecord facet = unresolved.pop();
if (resolved.containsKey(facet)) {
continue;
}
FacetFields type = new FacetFields(facet);
resolved.put(facet, type);
List<Plugin> facetPlugins = plugins.stream()
.filter(plugin -> facet.getName()
.equals(plugin.getFacetName()))
.collect(Collectors.toList());
type.resolve(facet, facetPlugins, model, executionScope)
.stream()
.filter(auth -> !resolved.containsKey(auth))
.forEach(auth -> unresolved.add(auth));
}
registerTypes(resolved);
Builder topLevelQuery = objectBuilder(Queries.class, typeFunction,
typeFunction);
Builder topLevelMutation = objectBuilder(Mutations.class, typeFunction,
typeFunction);
GraphQLSchema schema;
resolved.entrySet()
.stream()
.forEach(e -> e.getValue()
.build(new Aspect(model.create(), e.getKey()),
topLevelQuery, topLevelMutation));
schema = GraphQLSchema.newSchema()
.query(topLevelQuery.build())
.mutation(topLevelMutation.build())
.build();
return schema;
}
public GraphQLSchema buildMeta() throws Exception {
registerTypes(Collections.emptyMap());
return GraphQLSchema.newSchema()
.query(object(MetaQueries.class, typeFunction,
typeFunction))
.mutation(object(MetaMutations.class, typeFunction,
typeFunction))
.build();
}
private void addPhantasmCast(Builder typeBuilder,
Entry<FacetRecord, FacetFields> entry) {
typeBuilder.field(GraphQLFieldDefinition.newFieldDefinition()
.name(String.format("as%s",
WorkspacePresentation.toTypeName(entry.getKey()
.getName())))
.description(String.format("Cast to the %s facet",
entry.getKey()
.getName()))
.type(new GraphQLTypeReference(entry.getValue()
.getName()))
.dataFetcher(env -> {
Existential existential = (Existential) env.getSource();
PhantasmCRUD crud = FacetFields.ctx(env);
crud.cast(existential.getRecord(),
new Aspect(crud.getModel()
.create(),
entry.getKey()));
return existential;
})
.build());
}
private void addPhantasmCast(graphql.schema.GraphQLInterfaceType.Builder builder,
Entry<FacetRecord, FacetFields> entry) {
builder.field(GraphQLFieldDefinition.newFieldDefinition()
.name(String.format("as%s",
WorkspacePresentation.toTypeName(entry.getKey()
.getName())))
.description(String.format("Cast to the %s facet",
entry.getKey()
.getName()))
.type(new GraphQLTypeReference(entry.getValue()
.getName()))
.build());
}
private void registerTypes(Map<FacetRecord, FacetFields> resolved) throws NoSuchMethodException,
InstantiationException,
IllegalAccessException {
GraphQLInterfaceType et = existentialType(resolved);
typeFunction.register(Existential.class, (u, t) -> et);
typeFunction.register(Double.class, (u, t) -> GraphQLFloat);
typeFunction.register(UUID.class, (u, t) -> GraphQLString);
typeFunction.register(ValueType.class, (u, t) -> GraphQLString);
typeFunction.register(Cardinality.class, (u, t) -> GraphQLString);
typeFunction.register(ReferenceType.class, (u, t) -> GraphQLString);
GraphQLObjectType agencyType = phantasm(resolved,
objectBuilder(Agency.class,
typeFunction,
typeFunction));
typeFunction.register(Agency.class, (u, t) -> agencyType);
GraphQLObjectType attrType = phantasm(resolved,
objectBuilder(Attribute.class,
typeFunction,
typeFunction));
typeFunction.register(Attribute.class, (u, t) -> attrType);
GraphQLObjectType intervalType = phantasm(resolved,
objectBuilder(Interval.class,
typeFunction,
typeFunction));
typeFunction.register(Interval.class, (u, t) -> intervalType);
GraphQLObjectType locationType = phantasm(resolved,
objectBuilder(Location.class,
typeFunction,
typeFunction));
typeFunction.register(Location.class, (u, t) -> locationType);
GraphQLObjectType productType = phantasm(resolved,
objectBuilder(com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Product.class,
typeFunction,
typeFunction));
typeFunction.register(com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Product.class,
(u, t) -> productType);
GraphQLObjectType relationshipType = phantasm(resolved,
objectBuilder(Relationship.class,
typeFunction,
typeFunction));
typeFunction.register(Relationship.class, (u, t) -> relationshipType);
GraphQLObjectType statusCodeType = phantasm(resolved,
objectBuilder(StatusCode.class,
typeFunction,
typeFunction));
typeFunction.register(StatusCode.class, (u, t) -> statusCodeType);
GraphQLObjectType unitType = phantasm(resolved,
objectBuilder(Unit.class,
typeFunction,
typeFunction));
typeFunction.register(Unit.class, (u, t) -> unitType);
GraphQLObjectType facteType = object(Facet.class, typeFunction,
typeFunction);
typeFunction.register(Facet.class, (u, t) -> {
return facteType;
});
GraphQLObjectType attrAuthType = object(AttributeAuthorization.class,
typeFunction, typeFunction);
typeFunction.register(AttributeAuthorization.class, (u, t) -> {
return attrAuthType;
});
GraphQLObjectType csType = object(ChildSequencing.class, typeFunction,
typeFunction);
typeFunction.register(ChildSequencing.class, (u, t) -> {
return csType;
});
GraphQLObjectType job = object(Job.class, typeFunction, typeFunction);
typeFunction.register(Job.class, (u, t) -> job);
GraphQLObjectType metaType = object(MetaProtocol.class, typeFunction,
typeFunction);
typeFunction.register(MetaProtocol.class, (u, t) -> {
return metaType;
});
GraphQLObjectType netAuthType = object(NetworkAuthorization.class,
typeFunction, typeFunction);
typeFunction.register(NetworkAuthorization.class, (u, t) -> {
return netAuthType;
});
GraphQLObjectType psType = object(ParentSequencing.class, typeFunction,
typeFunction);
typeFunction.register(ParentSequencing.class, (u, t) -> {
return psType;
});
GraphQLObjectType protocolType = object(Protocol.class, typeFunction,
typeFunction);
typeFunction.register(Protocol.class, (u, t) -> {
return protocolType;
});
GraphQLObjectType ssType = object(SelfSequencing.class, typeFunction,
typeFunction);
typeFunction.register(SelfSequencing.class, (u, t) -> {
return ssType;
});
GraphQLObjectType sibSeqType = object(SiblingSequencing.class,
typeFunction, typeFunction);
typeFunction.register(SiblingSequencing.class, (u, t) -> {
return sibSeqType;
});
GraphQLObjectType scsType = object(StatusCodeSequencing.class,
typeFunction, typeFunction);
typeFunction.register(StatusCodeSequencing.class, (u, t) -> {
return scsType;
});
GraphQLObjectType netAttAuthType = object(NetworkAttributeAuthorization.class,
typeFunction, typeFunction);
typeFunction.register(NetworkAttributeAuthorization.class, (u, t) -> {
return netAttAuthType;
});
GraphQLObjectType chronType = object(JobChronology.class, typeFunction,
typeFunction);
typeFunction.register(JobChronology.class, (u, t) -> {
return chronType;
});
}
private GraphQLObjectType phantasm(Map<FacetRecord, FacetFields> resolved,
Builder objectBuilder) {
resolved.entrySet()
.forEach(e -> addPhantasmCast(objectBuilder, e));
return objectBuilder.build();
}
private GraphQLInterfaceType existentialType(Map<FacetRecord, FacetFields> resolved) {
graphql.schema.GraphQLInterfaceType.Builder builder = graphql.schema.GraphQLInterfaceType.newInterface();
builder.name("Existential");
builder.description("The Existential interface type");
builder.field(GraphQLFieldDefinition.newFieldDefinition()
.name("id")
.description("Existential id")
.type(GraphQLString)
.build());
builder.field(GraphQLFieldDefinition.newFieldDefinition()
.name("name")
.description("Existential name")
.type(GraphQLString)
.build());
builder.field(GraphQLFieldDefinition.newFieldDefinition()
.name("description")
.description("Existential description")
.type(GraphQLString)
.build());
builder.field(GraphQLFieldDefinition.newFieldDefinition()
.name("updatedBy")
.description("Agency that updated the Existential")
.type(new GraphQLTypeReference("Agency"))
.build());
builder.typeResolver(typeFunction);
resolved.entrySet()
.forEach(e -> addPhantasmCast(builder, e));
return builder.build();
}
}
| phantasm-at-rest/src/main/java/com/chiralbehaviors/CoRE/phantasm/graphql/WorkspaceSchema.java | /**
* Copyright (c) 2016 Chiral Behaviors, LLC, all rights reserved.
*
* This file is part of Ultrastructure.
*
* Ultrastructure is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ULtrastructure is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Ultrastructure. If not, see <http://www.gnu.org/licenses/>.
*/
package com.chiralbehaviors.CoRE.phantasm.graphql;
import static com.chiralbehaviors.CoRE.phantasm.graphql.PhantasmProcessing.object;
import static com.chiralbehaviors.CoRE.phantasm.graphql.PhantasmProcessing.objectBuilder;
import static graphql.Scalars.GraphQLFloat;
import static graphql.Scalars.GraphQLString;
import java.util.Collections;
import java.util.Deque;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.UUID;
import java.util.stream.Collectors;
import com.chiralbehaviors.CoRE.domain.Product;
import com.chiralbehaviors.CoRE.jooq.enums.Cardinality;
import com.chiralbehaviors.CoRE.jooq.enums.ReferenceType;
import com.chiralbehaviors.CoRE.jooq.enums.ValueType;
import com.chiralbehaviors.CoRE.jooq.tables.records.FacetRecord;
import com.chiralbehaviors.CoRE.kernel.phantasm.product.Plugin;
import com.chiralbehaviors.CoRE.kernel.phantasm.product.Workspace;
import com.chiralbehaviors.CoRE.meta.Model;
import com.chiralbehaviors.CoRE.meta.workspace.WorkspaceAccessor;
import com.chiralbehaviors.CoRE.meta.workspace.dsl.WorkspacePresentation;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.AttributeAuthorizationMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.ChildSequencingMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.ExistentialMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.FacetMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.JobMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.MetaProtocolMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.NetworkAttributeAuthorizationMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.NetworkAuthorizationMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.ParentSequencingMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.ProtocolMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.SelfSequencingMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.SiblingSequencingMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.mutations.StatusCodeSequencingMutations;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.AttributeAuthorizationQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.ChildSequencingQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.ExistentialQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.FacetQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.JobChronologyQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.JobQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.MetaProtocolQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.NetworkAttributeAuthorizationQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.NetworkAuthorizationQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.ParentSequencingQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.ProtocolQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.SelfSequencingQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.SiblingSequencingQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.queries.StatusCodeSequencingQueries;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.AttributeAuthorization;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.ChildSequencing;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Agency;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Attribute;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Interval;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Location;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Relationship;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.StatusCode;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Unit;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Facet;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Job;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.JobChronology;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.MetaProtocol;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.NetworkAttributeAuthorization;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.NetworkAuthorization;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.ParentSequencing;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.Protocol;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.SelfSequencing;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.SiblingSequencing;
import com.chiralbehaviors.CoRE.phantasm.graphql.types.StatusCodeSequencing;
import com.chiralbehaviors.CoRE.phantasm.model.PhantasmCRUD;
import com.chiralbehaviors.CoRE.phantasm.model.PhantasmTraversal.Aspect;
import graphql.schema.DataFetchingEnvironment;
import graphql.schema.GraphQLFieldDefinition;
import graphql.schema.GraphQLInterfaceType;
import graphql.schema.GraphQLObjectType;
import graphql.schema.GraphQLObjectType.Builder;
import graphql.schema.GraphQLSchema;
import graphql.schema.GraphQLType;
import graphql.schema.GraphQLTypeReference;
import graphql.schema.TypeResolver;
/**
* @author hhildebrand
*
*/
public class WorkspaceSchema {
public interface MetaMutations extends ExistentialMutations, FacetMutations,
AttributeAuthorizationMutations, NetworkAuthorizationMutations,
ChildSequencingMutations, ParentSequencingMutations,
SelfSequencingMutations, SiblingSequencingMutations,
ProtocolMutations, MetaProtocolMutations,
StatusCodeSequencingMutations,
NetworkAttributeAuthorizationMutations, JobMutations {
}
public interface MetaQueries extends ExistentialQueries, FacetQueries,
AttributeAuthorizationQueries, NetworkAuthorizationQueries,
ChildSequencingQueries, ParentSequencingQueries,
SelfSequencingQueries, SiblingSequencingQueries, ProtocolQueries,
MetaProtocolQueries, StatusCodeSequencingQueries,
NetworkAttributeAuthorizationQueries, JobQueries,
JobChronologyQueries {
}
public interface Mutations extends ExistentialMutations, JobMutations {
}
public interface Queries
extends ExistentialQueries, JobQueries, JobChronologyQueries {
}
public static Model ctx(DataFetchingEnvironment env) {
return ((PhantasmCRUD) env.getContext()).getModel();
}
private final WorkspaceTypeFunction typeFunction = new WorkspaceTypeFunction();
public WorkspaceSchema() {
}
public GraphQLSchema build(WorkspaceAccessor accessor, Model model,
ClassLoader executionScope) throws NoSuchMethodException,
InstantiationException,
IllegalAccessException {
Deque<FacetRecord> unresolved = FacetFields.initialState(accessor,
model);
Map<FacetRecord, FacetFields> resolved = new HashMap<>();
Product definingProduct = accessor.getDefiningProduct();
Workspace workspace = model.wrap(Workspace.class, definingProduct);
List<Plugin> plugins = workspace.getPlugins();
while (!unresolved.isEmpty()) {
FacetRecord facet = unresolved.pop();
if (resolved.containsKey(facet)) {
continue;
}
FacetFields type = new FacetFields(facet);
resolved.put(facet, type);
List<Plugin> facetPlugins = plugins.stream()
.filter(plugin -> facet.getName()
.equals(plugin.getFacetName()))
.collect(Collectors.toList());
type.resolve(facet, facetPlugins, model, executionScope)
.stream()
.filter(auth -> !resolved.containsKey(auth))
.forEach(auth -> unresolved.add(auth));
}
registerTypes(resolved);
Builder topLevelQuery = objectBuilder(Queries.class, typeFunction,
typeFunction);
Builder topLevelMutation = objectBuilder(Mutations.class, typeFunction,
typeFunction);
GraphQLSchema schema;
resolved.entrySet()
.stream()
.forEach(e -> e.getValue()
.build(new Aspect(model.create(), e.getKey()),
topLevelQuery, topLevelMutation));
schema = GraphQLSchema.newSchema()
.query(topLevelQuery.build())
.mutation(topLevelMutation.build())
.build();
return schema;
}
public GraphQLSchema buildMeta() throws Exception {
registerTypes(Collections.emptyMap());
return GraphQLSchema.newSchema()
.query(object(MetaQueries.class, typeFunction,
typeFunction))
.mutation(object(MetaMutations.class, typeFunction,
typeFunction))
.build();
}
private void addPhantasmCast(Builder typeBuilder,
Entry<FacetRecord, FacetFields> entry) {
typeBuilder.field(GraphQLFieldDefinition.newFieldDefinition()
.name(String.format("as%s",
WorkspacePresentation.toTypeName(entry.getKey()
.getName())))
.description(String.format("Cast to the %s facet",
entry.getKey()
.getName()))
.type(new GraphQLTypeReference(entry.getValue()
.getName()))
.dataFetcher(env -> {
Existential existential = (Existential) env.getSource();
PhantasmCRUD crud = FacetFields.ctx(env);
crud.cast(existential.getRecord(),
new Aspect(crud.getModel()
.create(),
entry.getKey()));
return existential;
})
.build());
}
private void addPhantasmCast(graphql.schema.GraphQLInterfaceType.Builder builder,
Entry<FacetRecord, FacetFields> entry) {
builder.field(GraphQLFieldDefinition.newFieldDefinition()
.name(String.format("as%s",
WorkspacePresentation.toTypeName(entry.getKey()
.getName())))
.description(String.format("Cast to the %s facet",
entry.getKey()
.getName()))
.type(new GraphQLTypeReference(entry.getValue()
.getName()))
.build());
}
private void registerTypes(Map<FacetRecord, FacetFields> resolved) throws NoSuchMethodException,
InstantiationException,
IllegalAccessException {
GraphQLInterfaceType et = existentialType(resolved);
typeFunction.register(Existential.class, (u, t) -> et);
typeFunction.register(Double.class, (u, t) -> GraphQLFloat);
typeFunction.register(UUID.class, (u, t) -> GraphQLString);
typeFunction.register(ValueType.class, (u, t) -> GraphQLString);
typeFunction.register(Cardinality.class, (u, t) -> GraphQLString);
typeFunction.register(ReferenceType.class, (u, t) -> GraphQLString);
GraphQLObjectType agencyType = phantasm(resolved,
objectBuilder(Agency.class,
typeFunction,
typeFunction));
typeFunction.register(Agency.class, (u, t) -> agencyType);
GraphQLObjectType attrType = phantasm(resolved,
objectBuilder(Attribute.class,
typeFunction,
typeFunction));
typeFunction.register(Attribute.class, (u, t) -> attrType);
GraphQLObjectType intervalType = phantasm(resolved,
objectBuilder(Interval.class,
typeFunction,
typeFunction));
typeFunction.register(Interval.class, (u, t) -> intervalType);
GraphQLObjectType locationType = phantasm(resolved,
objectBuilder(Location.class,
typeFunction,
typeFunction));
typeFunction.register(Location.class, (u, t) -> locationType);
GraphQLObjectType productType = phantasm(resolved,
objectBuilder(com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Product.class,
typeFunction,
typeFunction));
typeFunction.register(com.chiralbehaviors.CoRE.phantasm.graphql.types.Existential.Product.class,
(u, t) -> productType);
GraphQLObjectType relationshipType = phantasm(resolved,
objectBuilder(Relationship.class,
typeFunction,
typeFunction));
typeFunction.register(Relationship.class, (u, t) -> relationshipType);
GraphQLObjectType statusCodeType = phantasm(resolved,
objectBuilder(StatusCode.class,
typeFunction,
typeFunction));
typeFunction.register(StatusCode.class, (u, t) -> statusCodeType);
GraphQLObjectType unitType = phantasm(resolved,
objectBuilder(Unit.class,
typeFunction,
typeFunction));
typeFunction.register(Unit.class, (u, t) -> unitType);
GraphQLObjectType facteType = objectTypeOf(Facet.class);
typeFunction.register(Facet.class, (u, t) -> {
return facteType;
});
GraphQLObjectType attrAuthType = objectTypeOf(AttributeAuthorization.class);
typeFunction.register(AttributeAuthorization.class, (u, t) -> {
return attrAuthType;
});
GraphQLObjectType csType = objectTypeOf(ChildSequencing.class);
typeFunction.register(ChildSequencing.class, (u, t) -> {
return csType;
});
GraphQLObjectType job = objectTypeOf(Job.class);
typeFunction.register(Job.class, (u, t) -> job);
GraphQLObjectType metaType = objectTypeOf(MetaProtocol.class);
typeFunction.register(MetaProtocol.class, (u, t) -> {
return metaType;
});
GraphQLObjectType netAuthType = objectTypeOf(NetworkAuthorization.class);
typeFunction.register(NetworkAuthorization.class, (u, t) -> {
return netAuthType;
});
GraphQLObjectType psType = objectTypeOf(ParentSequencing.class);
typeFunction.register(ParentSequencing.class, (u, t) -> {
return psType;
});
GraphQLObjectType protocolType = objectTypeOf(Protocol.class);
typeFunction.register(Protocol.class, (u, t) -> {
return protocolType;
});
GraphQLObjectType ssType = objectTypeOf(SelfSequencing.class);
typeFunction.register(SelfSequencing.class, (u, t) -> {
return ssType;
});
GraphQLObjectType sibSeqType = objectTypeOf(SiblingSequencing.class);
typeFunction.register(SiblingSequencing.class, (u, t) -> {
return sibSeqType;
});
GraphQLObjectType scsType = objectTypeOf(StatusCodeSequencing.class);
typeFunction.register(StatusCodeSequencing.class, (u, t) -> {
return scsType;
});
GraphQLObjectType netAttAuthType = objectTypeOf(NetworkAttributeAuthorization.class);
typeFunction.register(NetworkAttributeAuthorization.class, (u, t) -> {
return netAttAuthType;
});
GraphQLObjectType chronType = objectTypeOf(JobChronology.class);
typeFunction.register(JobChronology.class, (u, t) -> {
return chronType;
});
}
private GraphQLObjectType objectTypeOf(Class<?> clazz) {
GraphQLType type = typeFunction.getType(clazz);
if (type != null) {
return (GraphQLObjectType) type;
}
TypeProxy proxy = new TypeProxy();
GraphQLObjectType object = object(clazz, typeFunction, typeFunction);
proxy.setTarget(object);
return object;
}
private GraphQLObjectType phantasm(Map<FacetRecord, FacetFields> resolved,
Builder objectBuilder) {
resolved.entrySet()
.forEach(e -> addPhantasmCast(objectBuilder, e));
return objectBuilder.build();
}
private GraphQLInterfaceType existentialType(Map<FacetRecord, FacetFields> resolved) {
graphql.schema.GraphQLInterfaceType.Builder builder = graphql.schema.GraphQLInterfaceType.newInterface();
builder.name("Existential");
builder.description("The Existential interface type");
builder.field(GraphQLFieldDefinition.newFieldDefinition()
.name("id")
.description("Existential id")
.type(GraphQLString)
.build());
builder.field(GraphQLFieldDefinition.newFieldDefinition()
.name("name")
.description("Existential name")
.type(GraphQLString)
.build());
builder.field(GraphQLFieldDefinition.newFieldDefinition()
.name("description")
.description("Existential description")
.type(GraphQLString)
.build());
builder.field(GraphQLFieldDefinition.newFieldDefinition()
.name("updatedBy")
.description("Agency that updated the Existential")
.type(new GraphQLTypeReference("Agency"))
.build());
builder.typeResolver(typeFunction);
resolved.entrySet()
.forEach(e -> addPhantasmCast(builder, e));
return builder.build();
}
}
| simplify | phantasm-at-rest/src/main/java/com/chiralbehaviors/CoRE/phantasm/graphql/WorkspaceSchema.java | simplify |
|
Java | lgpl-2.1 | 503b8a580d0b828737ea6e3c8b71171ce003dcd2 | 0 | cytoscape/cytoscape-impl,cytoscape/cytoscape-impl,cytoscape/cytoscape-impl,cytoscape/cytoscape-impl,cytoscape/cytoscape-impl | package org.cytoscape.internal.view;
import static javax.swing.GroupLayout.DEFAULT_SIZE;
import static javax.swing.GroupLayout.PREFERRED_SIZE;
import static javax.swing.GroupLayout.Alignment.CENTER;
import static org.cytoscape.internal.util.ViewUtil.DEFAULT_PROVIDER_PROP_KEY;
import static org.cytoscape.internal.util.ViewUtil.getViewProperty;
import static org.cytoscape.internal.util.ViewUtil.hasVisibleOwnedWindows;
import static org.cytoscape.internal.util.ViewUtil.setViewProperty;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.MouseInfo;
import java.awt.Point;
import java.awt.RenderingHints;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseMotionAdapter;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.event.WindowFocusListener;
import java.awt.font.FontRenderContext;
import java.awt.geom.AffineTransform;
import java.net.URL;
import java.text.Collator;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import javax.swing.AbstractAction;
import javax.swing.AbstractButton;
import javax.swing.ActionMap;
import javax.swing.BorderFactory;
import javax.swing.GroupLayout;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.InputMap;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.JToggleButton;
import javax.swing.KeyStroke;
import javax.swing.ListSelectionModel;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.ToolTipManager;
import javax.swing.UIManager;
import javax.swing.border.Border;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.DefaultTableModel;
import org.cytoscape.application.swing.search.NetworkSearchTaskFactory;
import org.cytoscape.internal.util.RandomImage;
import org.cytoscape.service.util.CyServiceRegistrar;
import org.cytoscape.util.swing.IconManager;
import org.cytoscape.util.swing.LookAndFeelUtil;
import org.cytoscape.util.swing.OpenBrowser;
/*
* #%L
* Cytoscape Swing Application Impl (swing-application-impl)
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2006 - 2017 The Cytoscape Consortium
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
@SuppressWarnings("serial")
public class NetworkSearchBar extends JPanel {
private static final int ICON_SIZE = 32;
private static final String DEF_SEARCH_TEXT = "Type your query here...";
private JButton providersButton;
private JButton providerSelectorButton;
private JPanel contentPane;
private JTextField searchTextField;
private JToggleButton optionsButton;
private JButton searchButton;
private JPopupMenu providersPopup;
private ProvidersPanel providersPanel;
private OptionsDialog optionsDialog;
private final EmptyIcon emptyIcon = new EmptyIcon(ICON_SIZE, ICON_SIZE);
private final Map<NetworkSearchTaskFactory, Icon> providerIcons = new HashMap<>();
private final TreeSet<NetworkSearchTaskFactory> providers;
/** This should only be set when the user explicitly selects a provider */
private NetworkSearchTaskFactory selectedProvider;
private final CyServiceRegistrar serviceRegistrar;
public NetworkSearchBar(CyServiceRegistrar serviceRegistrar) {
this.serviceRegistrar = serviceRegistrar;
final Collator collator = Collator.getInstance();
providers = new TreeSet<>((NetworkSearchTaskFactory o1, NetworkSearchTaskFactory o2) -> {
return collator.compare(o1.getName(), o2.getName());
});
init();
}
private void setDefaultProvider(NetworkSearchTaskFactory suggestedProvider) {
if (suggestedProvider == null || !providers.contains(suggestedProvider)) {
// Check if there is a CyProperty for this
String id = getViewProperty(DEFAULT_PROVIDER_PROP_KEY, serviceRegistrar);
if (id != null)
suggestedProvider = getProvider(id);
}
if (suggestedProvider != null) // Update the CyProperty
setViewProperty(DEFAULT_PROVIDER_PROP_KEY, suggestedProvider.getId(), serviceRegistrar);
}
private NetworkSearchTaskFactory getDefaultProvider() {
String id = getViewProperty(DEFAULT_PROVIDER_PROP_KEY, serviceRegistrar);
return getProvider(id);
}
public void setSelectedProvider(NetworkSearchTaskFactory newValue) {
if (newValue != selectedProvider) {
NetworkSearchTaskFactory oldValue = getSelectedProvider(); // Get the actual "current" provider
selectedProvider = newValue;
// Save the last selected provider now, so it can be selected by default when Cytoscape restarts
setDefaultProvider(newValue);
if (newValue != oldValue)
firePropertyChange("selectedProvider", oldValue, newValue);
}
}
/**
* If there is no previously selected provider (by the user),
* it returns the preferred one or the first one in the list.
*/
public NetworkSearchTaskFactory getSelectedProvider() {
if (selectedProvider != null)
return selectedProvider;
NetworkSearchTaskFactory defProvider = getDefaultProvider();
if (defProvider != null)
return defProvider;
if (!providers.isEmpty())
return providers.first();
return null;
}
public NetworkSearchTaskFactory getProvider(String id) {
for (NetworkSearchTaskFactory tf : providers) {
if (tf.getId().equals(id))
return tf;
}
return null;
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
getProvidersButton().setEnabled(enabled);
getProviderSelectorButton().setEnabled(enabled);
getSearchTextField().setEnabled(enabled);
getOptionsButton().setEnabled(enabled);
getSearchButton().setEnabled(enabled);
}
void update(Collection<NetworkSearchTaskFactory> newProviders) {
NetworkSearchTaskFactory oldSelected = getSelectedProvider();
providers.clear();
providerIcons.clear();
if (newProviders != null) {
providers.addAll(newProviders);
newProviders.forEach(p -> {
Icon icon = p.getIcon();
if (icon instanceof ImageIcon) {
ImageIcon ii = (ImageIcon) icon;
if (ii.getIconWidth() > ICON_SIZE || ii.getIconHeight() > ICON_SIZE)
icon = new ImageIcon(ii.getImage().getScaledInstance(ICON_SIZE, ICON_SIZE, Image.SCALE_SMOOTH));
}
providerIcons.put(p, icon != null ? icon : new ImageIcon(new RandomImage(ICON_SIZE, ICON_SIZE)));
});
}
if (selectedProvider != null && !newProviders.contains(selectedProvider))
selectedProvider = null;
// We are not changing the selectedProvider field here (only the user should do it),
// but still need to let the widget know that the actual "current" provider has changed.
// This is done this way to prevent a core provider from preventing another preferred one
// (from third-party apps) from being pre-selected when Cytoscape restarts,
// since the preferred one is auto-selected only when the user has not selected another provider yet.
NetworkSearchTaskFactory newSelected = getSelectedProvider();
if (newSelected != oldSelected)
firePropertyChange("selectedProvider", oldSelected, newSelected);
}
void updateProvidersButton() {
NetworkSearchTaskFactory currentProvider = getSelectedProvider();
if (currentProvider != null) {
Icon icon = providerIcons.get(currentProvider);
getProvidersButton().setIcon(icon != null ? icon : emptyIcon);
getProvidersButton().setToolTipText(currentProvider.getName());
} else {
getProvidersButton().setIcon(emptyIcon);
getProvidersButton().setToolTipText("Please select a search provider...");
}
getProvidersButton().setEnabled(!providers.isEmpty());
getProviderSelectorButton().setEnabled(!providers.isEmpty());
}
void updateSearchEnabled() {
boolean enabled = getSelectedProvider() != null;
getSearchTextField().setEnabled(enabled);
getOptionsButton().setEnabled(enabled);
updateSearchButton();
}
void updateSearchButton() {
NetworkSearchTaskFactory tf = getSelectedProvider();
getSearchButton().setEnabled(tf != null && tf.isReady());
}
void updateSelectedSearchComponent(JComponent queryComp) {
getContentPane().removeAll();
if (queryComp == null)
queryComp = getSearchTextField();
final GroupLayout layout = new GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setAutoCreateContainerGaps(false);
layout.setAutoCreateGaps(false);
layout.setHorizontalGroup(layout.createSequentialGroup()
.addComponent(queryComp, DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(getOptionsButton(), PREFERRED_SIZE, DEFAULT_SIZE, PREFERRED_SIZE)
);
layout.setVerticalGroup(layout.createParallelGroup(CENTER, true)
.addComponent(queryComp, DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(getOptionsButton(), DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
);
}
private void showProvidersPopup() {
if (providers.isEmpty())
return;
if (providersPopup != null)
disposeProvidersPopup(false); // Just to make sure there will never be more than one dialog
providersPopup = new JPopupMenu();
providersPopup.setBackground(getBackground());
providersPopup.setBorder(BorderFactory.createEmptyBorder());
providersPopup.setLayout(new BorderLayout());
providersPopup.add(getProvidersPanel(), BorderLayout.CENTER);
providersPopup.addPropertyChangeListener("visible", evt -> {
if (evt.getNewValue() == Boolean.FALSE) {
updateProvidersButton();
updateSearchEnabled();
}
});
getProvidersPanel().update();
providersPopup.pack();
providersPopup.show(getProvidersButton(), 0, getProvidersButton().getHeight());
providersPopup.requestFocus();
getProvidersPanel().getTable().requestFocusInWindow();
}
private void disposeProvidersPopup(boolean commit) {
if (providersPopup != null) {
if (commit && getProvidersPanel().getSelectedValue() != null)
setSelectedProvider(getProvidersPanel().getSelectedValue());
providersPopup.removeAll();
providersPopup.setVisible(false);
providersPopup = null;
}
}
void showOptionsDialog(JComponent comp) {
if (comp == null)
return;
getOptionsDialog().update(comp);
final Point pt = getOptionsButton().getLocationOnScreen();
getOptionsDialog().setLocation(pt.x, pt.y + getOptionsButton().getHeight());
getOptionsDialog().pack();
getOptionsDialog().setVisible(true);
getOptionsDialog().requestFocus();
}
private void disposeOptionsDialog() {
getOptionsDialog().dispose();
}
private void init() {
setBackground(UIManager.getColor("Table.background"));
setBorder(BorderFactory.createCompoundBorder(
BorderFactory.createLineBorder(UIManager.getColor("Separator.foreground")),
BorderFactory.createEmptyBorder(2, 1, 2, 1)
));
final GroupLayout layout = new GroupLayout(this);
setLayout(layout);
layout.setAutoCreateContainerGaps(false);
layout.setAutoCreateGaps(false);
int maxHeight = getContentPane().getPreferredSize().height;
layout.setHorizontalGroup(layout.createSequentialGroup()
.addComponent(getProvidersButton(), PREFERRED_SIZE, DEFAULT_SIZE, PREFERRED_SIZE)
.addComponent(getProviderSelectorButton(), PREFERRED_SIZE, DEFAULT_SIZE, PREFERRED_SIZE)
.addComponent(getContentPane(), DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(getSearchButton(), PREFERRED_SIZE, DEFAULT_SIZE, PREFERRED_SIZE)
);
layout.setVerticalGroup(layout.createParallelGroup(CENTER, true)
.addComponent(getProvidersButton(), DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(getProviderSelectorButton(), DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(getContentPane(), maxHeight, maxHeight, maxHeight)
.addComponent(getSearchButton(), DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
);
update(Collections.emptyList());
}
JButton getProvidersButton() {
if (providersButton == null) {
providersButton = new JButton(emptyIcon);
styleButton(providersButton, ICON_SIZE, providersButton.getFont(), -1);
providersButton.addActionListener(evt -> {
showProvidersPopup();
});
updateProvidersButton();
}
return providersButton;
}
JButton getProviderSelectorButton() {
if (providerSelectorButton == null) {
providerSelectorButton = new JButton(IconManager.ICON_SORT_DOWN);
providerSelectorButton.setToolTipText("Click to select a search provider...");
styleButton(providerSelectorButton, 12, serviceRegistrar.getService(IconManager.class).getIconFont(10.0f),
SwingConstants.RIGHT);
providerSelectorButton.addActionListener(evt -> {
getProvidersButton().doClick();
});
}
return providerSelectorButton;
}
JPanel getContentPane() {
if (contentPane == null) {
contentPane = new JPanel();
contentPane.setBackground(getBackground());
updateSelectedSearchComponent(getSearchTextField());
}
return contentPane;
}
JTextField getSearchTextField() {
if (searchTextField == null) {
final Color msgColor = UIManager.getColor("Label.disabledForeground");
final int vgap = 1;
final int hgap = 5;
searchTextField = new JTextField() {
@Override
public void paint(Graphics g) {
super.paint(g);
if (getText() == null || getText().trim().isEmpty()) {
// Set antialiasing
Graphics2D g2 = (Graphics2D) g.create();
g2.setRenderingHints(
new RenderingHints(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON));
// Set the font
g2.setFont(getFont());
// Get the FontMetrics
FontMetrics metrics = g2.getFontMetrics(getFont());
// Determine the X coordinate for the text
int x = hgap;
// Determine the Y coordinate for the text (note we add the ascent, as in java 2d 0 is top of the screen)
int y = (metrics.getHeight() / 2) + metrics.getAscent() + vgap;
// Draw
g2.setColor(msgColor);
g2.drawString(DEF_SEARCH_TEXT, x, y);
g2.dispose();
}
}
};
searchTextField.setBackground(getBackground());
searchTextField.setMinimumSize(searchTextField.getPreferredSize());
searchTextField.setBorder(BorderFactory.createEmptyBorder(vgap, hgap, vgap, hgap));
searchTextField.setFont(searchTextField.getFont().deriveFont(LookAndFeelUtil.getSmallFontSize()));
}
return searchTextField;
}
JToggleButton getOptionsButton() {
if (optionsButton == null) {
optionsButton = new JToggleButton(IconManager.ICON_BARS);
optionsButton.setToolTipText("More Options...");
styleButton(optionsButton, 32, serviceRegistrar.getService(IconManager.class).getIconFont(14.0f),
SwingConstants.LEFT);
}
return optionsButton;
}
JButton getSearchButton() {
if (searchButton == null) {
searchButton = new JButton(IconManager.ICON_SEARCH);
styleButton(searchButton, 32, serviceRegistrar.getService(IconManager.class).getIconFont(16.0f),
SwingConstants.LEFT);
searchButton.setBorder(
BorderFactory.createMatteBorder(0, 1, 0, 0, UIManager.getColor("Separator.foreground")));
searchButton.setToolTipText("Search Network");
}
return searchButton;
}
ProvidersPanel getProvidersPanel() {
if (providersPanel == null) {
providersPanel = new ProvidersPanel();
}
return providersPanel;
}
public OptionsDialog getOptionsDialog() {
if (optionsDialog == null) {
optionsDialog = new OptionsDialog();
}
return optionsDialog;
}
private void styleButton(AbstractButton btn, int width, Font font, int borderSide) {
btn.setFont(font);
btn.setContentAreaFilled(false);
if (borderSide == SwingConstants.LEFT)
btn.setBorder(BorderFactory.createCompoundBorder(
BorderFactory.createEmptyBorder(1, 0, 1, 1),
BorderFactory.createMatteBorder(0, 1, 0, 0, UIManager.getColor("Separator.foreground"))
));
else if (borderSide == SwingConstants.RIGHT)
btn.setBorder(BorderFactory.createCompoundBorder(
BorderFactory.createEmptyBorder(1, 1, 1, 0),
BorderFactory.createMatteBorder(0, 0, 0, 1, UIManager.getColor("Separator.foreground"))
));
else
btn.setBorder(BorderFactory.createEmptyBorder(1, 1, 1, 1));
Dimension d = new Dimension(width, Math.max(width, getSearchTextField().getPreferredSize().height));
btn.setMinimumSize(d);
btn.setPreferredSize(d);
}
class ProvidersPanel extends JPanel {
private final static int MAX_VISIBLE_ROWS = 10;
private final static int COL_COUNT = 3;
final static int ICON_COL_IDX = 0;
final static int NAME_COL_IDX = 1;
final static int WEBSITE_COL_IDX = 2;
private JScrollPane scrollPane;
private JTable table;
ProvidersPanel() {
setLayout(new BorderLayout());
add(getScrollPane(), BorderLayout.CENTER);
setKeyBindings(getTable());
}
private NetworkSearchTaskFactory getProvider(int row) {
return (NetworkSearchTaskFactory) getTable().getModel().getValueAt(row, NAME_COL_IDX);
}
public NetworkSearchTaskFactory getSelectedValue() {
int row = getTable().getSelectedRow();
return row != -1 ? getProvider(row) : null;
}
JScrollPane getScrollPane() {
if (scrollPane == null) {
scrollPane = new JScrollPane(getTable());
scrollPane.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED);
scrollPane.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
}
return scrollPane;
}
JTable getTable() {
if (table == null) {
DefaultTableModel model = new DefaultTableModel() {
@Override
public boolean isCellEditable(int row, int column) {
return false;
}
};
model.setColumnCount(COL_COUNT);
table = new JTable(model);
table.setDefaultRenderer(Object.class, new ProvidersTableCellRenderer());
table.setTableHeader(null);
table.setIntercellSpacing(new Dimension(0, 0));
table.setShowGrid(false);
table.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
table.setColumnSelectionAllowed(false);
table.setRowHeight(ICON_SIZE + 2);
table.addMouseMotionListener(new MouseMotionAdapter() {
@Override
public void mouseMoved(MouseEvent e) {
int row = getTable().rowAtPoint(e.getPoint());
if (row != -1)
setSelectedRow(row);
}
});
table.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
int col = getTable().columnAtPoint(e.getPoint());
int row = getTable().rowAtPoint(e.getPoint());
if (row != -1) {
NetworkSearchTaskFactory tf = getProvider(row);
if (col == WEBSITE_COL_IDX && tf != null && tf.getWebsite() != null) {
serviceRegistrar.getService(OpenBrowser.class).openURL(tf.getWebsite().toString());
} else {
getTable().repaint();
disposeProvidersPopup(true);
}
}
}
});
// Provider descriptions can be very long, so let's make the tooltip visible for a few minutes
// to give the user a chance to read them
table.addMouseListener(new DismissDelayMouseAdapter((int) TimeUnit.MINUTES.toMillis(5))); // 5 min
}
return table;
}
void update() {
Object[][] data = new Object[providers.size()][COL_COUNT];
int nameWidth = 100;
int selectedRow = -1;
int i = 0;
Font defFont = ((ProvidersTableCellRenderer) getTable().getDefaultRenderer(Object.class)).defFont;
AffineTransform af = new AffineTransform();
FontRenderContext frc = new FontRenderContext(af, true, true);
for (NetworkSearchTaskFactory tf : providers) {
data[i][ICON_COL_IDX] = tf;
data[i][NAME_COL_IDX] = tf;
data[i][WEBSITE_COL_IDX] = tf;
if (tf.equals(getSelectedProvider()))
selectedRow = i;
nameWidth = Math.max(nameWidth, (int) (defFont.getStringBounds(tf.getName(), frc).getWidth()));
i++;
}
nameWidth = Math.min(340, nameWidth);
DefaultTableModel model = (DefaultTableModel) getTable().getModel();
model.setDataVector(data, new String[COL_COUNT]);
getTable().getColumnModel().getColumn(ICON_COL_IDX).setMinWidth(ICON_SIZE);
getTable().getColumnModel().getColumn(ICON_COL_IDX).setMaxWidth(ICON_SIZE);
getTable().getColumnModel().getColumn(NAME_COL_IDX).setMinWidth(nameWidth + 10);
getTable().getColumnModel().getColumn(WEBSITE_COL_IDX).setMinWidth(32);
getTable().getColumnModel().getColumn(WEBSITE_COL_IDX).setMaxWidth(32);
setSelectedRow(selectedRow);
getTable().repaint();
int w = getTable().getColumnModel().getTotalColumnWidth() + 20;
int h = providers.size() <= MAX_VISIBLE_ROWS ?
getTable().getPreferredSize().height : getTable().getRowHeight() * MAX_VISIBLE_ROWS;
getScrollPane().getViewport().setPreferredSize(new Dimension(w, h));
}
void setSelectedRow(int row) {
if (row != -1)
getTable().setRowSelectionInterval(row, row);
}
private class ProvidersTableCellRenderer extends DefaultTableCellRenderer {
final IconManager iconManager = serviceRegistrar.getService(IconManager.class);
final Font defFont = getFont().deriveFont(LookAndFeelUtil.getSmallFontSize());
final Font iconFont = iconManager.getIconFont(12.0f);
final Border defBorder = BorderFactory.createCompoundBorder(
BorderFactory.createEmptyBorder(1, 0, 0, 0),
BorderFactory.createMatteBorder(0, 0, 1, 0, UIManager.getColor("Separator.foreground"))
);
final Border nameBorder = BorderFactory.createCompoundBorder(
defBorder,
BorderFactory.createEmptyBorder(0, 10, 0, 0)
);
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected,
boolean hasFocus, int row, int column) {
super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
setForeground(UIManager.getColor("Label.foreground"));
setBackground(UIManager.getColor(isSelected ? "Table.selectionBackground" : "Table.background"));
setHorizontalAlignment(CENTER);
setFont(defFont);
setText(null);
setIcon(null);
setBorder(defBorder);
if (value instanceof NetworkSearchTaskFactory) {
NetworkSearchTaskFactory tf = (NetworkSearchTaskFactory) value;
setToolTipText(tf.getDescription());
switch (column) {
case ICON_COL_IDX:
Icon icon = providerIcons.get(tf);
setIcon(icon != null ? icon : emptyIcon);
break;
case NAME_COL_IDX:
setText(tf.getName());
setHorizontalAlignment(LEFT);
setBorder(nameBorder);
break;
case WEBSITE_COL_IDX:
URL url = tf.getWebsite();
setText(url != null ? IconManager.ICON_EXTERNAL_LINK : "");
setFont(iconFont);
setForeground(UIManager.getColor("Table.focusCellBackground"));
setToolTipText(url != null ? "Visit Website..." : null);
break;
}
}
return this;
}
}
private void setKeyBindings(JComponent comp) {
final ActionMap actionMap = comp.getActionMap();
final InputMap inputMap = comp.getInputMap(WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), KeyAction.VK_ENTER);
inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, 0), KeyAction.VK_SPACE);
inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_TAB, 0), KeyAction.VK_TAB);
actionMap.put(KeyAction.VK_ENTER, new KeyAction(KeyAction.VK_ENTER));
actionMap.put(KeyAction.VK_SPACE, new KeyAction(KeyAction.VK_SPACE));
actionMap.put(KeyAction.VK_TAB, new KeyAction(KeyAction.VK_TAB));
}
private class KeyAction extends AbstractAction {
final static String VK_ENTER = "VK_ENTER";
final static String VK_SPACE = "VK_SPACE";
final static String VK_TAB = "VK_TAB";
KeyAction(final String actionCommand) {
putValue(ACTION_COMMAND_KEY, actionCommand);
}
@Override
public void actionPerformed(final ActionEvent e) {
final String cmd = e.getActionCommand();
if (cmd.equals(VK_ENTER) || cmd.equals(VK_SPACE) || cmd.equals(VK_TAB))
disposeProvidersPopup(true);
}
}
}
private class OptionsDialog extends JDialog {
public OptionsDialog() {
super(SwingUtilities.getWindowAncestor(NetworkSearchBar.this), ModalityType.MODELESS);
setBackground(getBackground());
setUndecorated(true);
setBorder(BorderFactory.createLineBorder(UIManager.getColor("Separator.foreground")));
addWindowListener(new WindowAdapter() {
@Override
public void windowClosed(WindowEvent e) {
updateSearchEnabled();
}
});
addWindowFocusListener(new WindowFocusListener() {
@Override
public void windowLostFocus(WindowEvent e) {
// If the a component in the Options popup opens another dialog, the Options one
// loses focus, but we don't want it to be disposed.
if (!hasVisibleOwnedWindows(OptionsDialog.this)) {
if (isShowing()) {
// If cursor is over the options button, set the toggle button to not-selected
// to prevent it from opening the dialog again right after its disposed
Point mouseLoc = MouseInfo.getPointerInfo().getLocation();
Point buttonLoc = getOptionsButton().getLocationOnScreen();
mouseLoc.x -= buttonLoc.x;
mouseLoc.y -= buttonLoc.y;
if (!getOptionsButton().contains(mouseLoc))
getOptionsButton().setSelected(false);
}
// Dispose
disposeOptionsDialog();
}
}
@Override
public void windowGainedFocus(WindowEvent e) {
}
});
}
void update(JComponent comp) {
setContentPane(comp);
setKeyBindings(comp);
}
private void setKeyBindings(JComponent comp) {
final ActionMap actionMap = comp.getActionMap();
final InputMap inputMap = comp.getInputMap(WHEN_IN_FOCUSED_WINDOW);
inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), KeyAction.VK_ESCAPE);
actionMap.put(KeyAction.VK_ESCAPE, new KeyAction(KeyAction.VK_ESCAPE));
}
private class KeyAction extends AbstractAction {
final static String VK_ESCAPE = "VK_ESCAPE";
KeyAction(final String actionCommand) {
putValue(ACTION_COMMAND_KEY, actionCommand);
}
@Override
public void actionPerformed(final ActionEvent e) {
final String cmd = e.getActionCommand();
if (cmd.equals(VK_ESCAPE)) {
disposeOptionsDialog();
getOptionsButton().setSelected(false);
}
}
}
}
/**
* Hack to prolong a tooltip’s visible delay
* Thanks to: http://tech.chitgoks.com/2010/05/31/disable-tooltip-delay-in-java-swing/
*/
private class DismissDelayMouseAdapter extends MouseAdapter {
final int defaultDismissTimeout = ToolTipManager.sharedInstance().getDismissDelay();
final int dismissDelayMinutes;
public DismissDelayMouseAdapter(int milliseconds) {
dismissDelayMinutes = milliseconds;
}
@Override
public void mouseEntered(final MouseEvent e) {
ToolTipManager.sharedInstance().setDismissDelay(dismissDelayMinutes);
}
@Override
public void mouseExited(final MouseEvent e) {
ToolTipManager.sharedInstance().setDismissDelay(defaultDismissTimeout);
}
}
private class EmptyIcon implements Icon {
private final String text = IconManager.ICON_BAN;
private final Color fgColor;
private final int width;
private final int height;
public EmptyIcon(int width, int height) {
this.width = width;
this.height = height;
Color c = UIManager.getColor("Label.disabledForeground");
fgColor = new Color(c.getRed(), c.getGreen(), c.getBlue(), 60);
}
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
// Set antialiasing
Graphics2D g2 = (Graphics2D) g;
g2.setRenderingHints(
new RenderingHints(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON));
// Set the font
Font font = serviceRegistrar.getService(IconManager.class).getIconFont(28.0f);
g2.setFont(font);
// Get the FontMetrics
FontMetrics metrics = g2.getFontMetrics(font);
// Determine the X coordinate for the text
int sx = x + 2 + (width - metrics.stringWidth(text)) / 2;
// Determine the Y coordinate for the text (note we add the ascent, as in java 2d 0 is top of the screen)
int sy = y + 1 + ((height - metrics.getHeight()) / 2) + metrics.getAscent();
// Draw
g2.setColor(fgColor);
g2.drawString(text, sx, sy);
}
@Override
public int getIconWidth() {
return width;
}
@Override
public int getIconHeight() {
return height;
}
}
}
| swing-application-impl/src/main/java/org/cytoscape/internal/view/NetworkSearchBar.java | package org.cytoscape.internal.view;
import static javax.swing.GroupLayout.DEFAULT_SIZE;
import static javax.swing.GroupLayout.PREFERRED_SIZE;
import static javax.swing.GroupLayout.Alignment.CENTER;
import static org.cytoscape.internal.util.ViewUtil.DEFAULT_PROVIDER_PROP_KEY;
import static org.cytoscape.internal.util.ViewUtil.getViewProperty;
import static org.cytoscape.internal.util.ViewUtil.hasVisibleOwnedWindows;
import static org.cytoscape.internal.util.ViewUtil.setViewProperty;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.MouseInfo;
import java.awt.Point;
import java.awt.RenderingHints;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseMotionAdapter;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.event.WindowFocusListener;
import java.awt.font.FontRenderContext;
import java.awt.geom.AffineTransform;
import java.net.URL;
import java.text.Collator;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import javax.swing.AbstractAction;
import javax.swing.AbstractButton;
import javax.swing.ActionMap;
import javax.swing.BorderFactory;
import javax.swing.GroupLayout;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.InputMap;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.JToggleButton;
import javax.swing.KeyStroke;
import javax.swing.ListSelectionModel;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.ToolTipManager;
import javax.swing.UIManager;
import javax.swing.border.Border;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.DefaultTableModel;
import org.cytoscape.application.swing.search.NetworkSearchTaskFactory;
import org.cytoscape.internal.util.RandomImage;
import org.cytoscape.service.util.CyServiceRegistrar;
import org.cytoscape.util.swing.IconManager;
import org.cytoscape.util.swing.LookAndFeelUtil;
import org.cytoscape.util.swing.OpenBrowser;
/*
* #%L
* Cytoscape Swing Application Impl (swing-application-impl)
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2006 - 2017 The Cytoscape Consortium
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-2.1.html>.
* #L%
*/
@SuppressWarnings("serial")
public class NetworkSearchBar extends JPanel {
private static final int ICON_SIZE = 32;
private static final String DEF_SEARCH_TEXT = "Type your query here...";
private JButton providersButton;
private JButton providerSelectorButton;
private JPanel contentPane;
private JTextField searchTextField;
private JToggleButton optionsButton;
private JButton searchButton;
private JPopupMenu providersPopup;
private ProvidersPanel providersPanel;
private OptionsDialog optionsDialog;
private final EmptyIcon emptyIcon = new EmptyIcon(ICON_SIZE, ICON_SIZE);
private final Map<NetworkSearchTaskFactory, Icon> providerIcons = new HashMap<>();
private final TreeSet<NetworkSearchTaskFactory> providers;
/** This should only be set when the user explicitly selects a provider */
private NetworkSearchTaskFactory selectedProvider;
private final CyServiceRegistrar serviceRegistrar;
public NetworkSearchBar(CyServiceRegistrar serviceRegistrar) {
this.serviceRegistrar = serviceRegistrar;
final Collator collator = Collator.getInstance();
providers = new TreeSet<>((NetworkSearchTaskFactory o1, NetworkSearchTaskFactory o2) -> {
return collator.compare(o1.getName(), o2.getName());
});
init();
}
private void setDefaultProvider(NetworkSearchTaskFactory suggestedProvider) {
if (suggestedProvider == null || !providers.contains(suggestedProvider)) {
// Check if there is a CyProperty for this
String id = getViewProperty(DEFAULT_PROVIDER_PROP_KEY, serviceRegistrar);
if (id != null)
suggestedProvider = getProvider(id);
}
if (suggestedProvider != null) // Update the CyProperty
setViewProperty(DEFAULT_PROVIDER_PROP_KEY, suggestedProvider.getId(), serviceRegistrar);
}
private NetworkSearchTaskFactory getDefaultProvider() {
String id = getViewProperty(DEFAULT_PROVIDER_PROP_KEY, serviceRegistrar);
return getProvider(id);
}
public void setSelectedProvider(NetworkSearchTaskFactory newValue) {
if (newValue != selectedProvider) {
NetworkSearchTaskFactory oldValue = getSelectedProvider(); // Get the actual "current" provider
selectedProvider = newValue;
// Save the last selected provider now, so it can be selected by default when Cytoscape restarts
setDefaultProvider(newValue);
if (newValue != oldValue)
firePropertyChange("selectedProvider", oldValue, newValue);
}
}
/**
* If there is no previously selected provider (by the user),
* it returns the preferred one or the first one in the list.
*/
public NetworkSearchTaskFactory getSelectedProvider() {
if (selectedProvider != null)
return selectedProvider;
NetworkSearchTaskFactory defProvider = getDefaultProvider();
if (defProvider != null)
return defProvider;
if (!providers.isEmpty())
return providers.first();
return null;
}
public NetworkSearchTaskFactory getProvider(String id) {
for (NetworkSearchTaskFactory tf : providers) {
if (tf.getId().equals(id))
return tf;
}
return null;
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
getProvidersButton().setEnabled(enabled);
getProviderSelectorButton().setEnabled(enabled);
getSearchTextField().setEnabled(enabled);
getOptionsButton().setEnabled(enabled);
getSearchButton().setEnabled(enabled);
}
void update(Collection<NetworkSearchTaskFactory> newProviders) {
NetworkSearchTaskFactory oldSelected = getSelectedProvider();
providers.clear();
providerIcons.clear();
if (newProviders != null) {
providers.addAll(newProviders);
newProviders.forEach(p -> {
Icon icon = p.getIcon();
if (icon instanceof ImageIcon) {
ImageIcon ii = (ImageIcon) icon;
if (ii.getIconWidth() > ICON_SIZE || ii.getIconHeight() > ICON_SIZE)
icon = new ImageIcon(ii.getImage().getScaledInstance(ICON_SIZE, ICON_SIZE, Image.SCALE_SMOOTH));
}
providerIcons.put(p, icon != null ? icon : new ImageIcon(new RandomImage(ICON_SIZE, ICON_SIZE)));
});
}
if (selectedProvider != null && !newProviders.contains(selectedProvider))
selectedProvider = null;
// We are not changing the selectedProvider field here (only the user should do it),
// but still need to let the widget know that the actual "current" provider has changed.
// This is done this way to prevent a core provider from preventing another preferred one
// (from third-party apps) from being pre-selected when Cytoscape restarts,
// since the preferred one is auto-selected only when the user has not selected another provider yet.
NetworkSearchTaskFactory newSelected = getSelectedProvider();
if (newSelected != oldSelected)
firePropertyChange("selectedProvider", oldSelected, newSelected);
}
void updateProvidersButton() {
NetworkSearchTaskFactory currentProvider = getSelectedProvider();
if (currentProvider != null) {
Icon icon = providerIcons.get(currentProvider);
getProvidersButton().setIcon(icon != null ? icon : emptyIcon);
getProvidersButton().setToolTipText(currentProvider.getName());
} else {
getProvidersButton().setIcon(emptyIcon);
getProvidersButton().setToolTipText("Please select a search provider...");
}
getProvidersButton().setEnabled(!providers.isEmpty());
getProviderSelectorButton().setEnabled(!providers.isEmpty());
}
void updateSearchEnabled() {
boolean enabled = getSelectedProvider() != null;
getSearchTextField().setEnabled(enabled);
getOptionsButton().setEnabled(enabled);
updateSearchButton();
}
void updateSearchButton() {
NetworkSearchTaskFactory tf = getSelectedProvider();
getSearchButton().setEnabled(tf != null && tf.isReady());
}
void updateSelectedSearchComponent(JComponent queryComp) {
getContentPane().removeAll();
if (queryComp == null)
queryComp = getSearchTextField();
final GroupLayout layout = new GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setAutoCreateContainerGaps(false);
layout.setAutoCreateGaps(false);
layout.setHorizontalGroup(layout.createSequentialGroup()
.addComponent(queryComp, DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(getOptionsButton(), PREFERRED_SIZE, DEFAULT_SIZE, PREFERRED_SIZE)
);
layout.setVerticalGroup(layout.createParallelGroup(CENTER, true)
.addComponent(queryComp, DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(getOptionsButton(), DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
);
}
private void showProvidersPopup() {
if (providers.isEmpty())
return;
if (providersPopup != null)
disposeProvidersPopup(false); // Just to make sure there will never be more than one dialog
providersPopup = new JPopupMenu();
providersPopup.setBackground(getBackground());
providersPopup.setBorder(BorderFactory.createEmptyBorder());
providersPopup.setLayout(new BorderLayout());
providersPopup.add(getProvidersPanel(), BorderLayout.CENTER);
providersPopup.addPropertyChangeListener("visible", evt -> {
if (evt.getNewValue() == Boolean.FALSE) {
updateProvidersButton();
updateSearchEnabled();
}
});
getProvidersPanel().update();
providersPopup.pack();
providersPopup.show(getProvidersButton(), 0, getProvidersButton().getHeight());
providersPopup.requestFocus();
getProvidersPanel().getTable().requestFocusInWindow();
}
private void disposeProvidersPopup(boolean commit) {
if (providersPopup != null) {
if (commit && getProvidersPanel().getSelectedValue() != null)
setSelectedProvider(getProvidersPanel().getSelectedValue());
providersPopup.removeAll();
providersPopup.setVisible(false);
providersPopup = null;
}
}
void showOptionsDialog(JComponent comp) {
if (comp == null)
return;
getOptionsDialog().update(comp);
final Point pt = getOptionsButton().getLocationOnScreen();
getOptionsDialog().setLocation(pt.x, pt.y + getOptionsButton().getHeight());
getOptionsDialog().pack();
getOptionsDialog().setVisible(true);
getOptionsDialog().requestFocus();
}
private void disposeOptionsDialog() {
getOptionsDialog().dispose();
}
private void init() {
setBackground(UIManager.getColor("Table.background"));
setBorder(BorderFactory.createCompoundBorder(
BorderFactory.createLineBorder(UIManager.getColor("Separator.foreground")),
BorderFactory.createEmptyBorder(2, 1, 2, 1)
));
final GroupLayout layout = new GroupLayout(this);
setLayout(layout);
layout.setAutoCreateContainerGaps(false);
layout.setAutoCreateGaps(false);
int maxHeight = getContentPane().getPreferredSize().height;
layout.setHorizontalGroup(layout.createSequentialGroup()
.addComponent(getProvidersButton(), PREFERRED_SIZE, DEFAULT_SIZE, PREFERRED_SIZE)
.addComponent(getProviderSelectorButton(), PREFERRED_SIZE, DEFAULT_SIZE, PREFERRED_SIZE)
.addComponent(getContentPane(), DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(getSearchButton(), PREFERRED_SIZE, DEFAULT_SIZE, PREFERRED_SIZE)
);
layout.setVerticalGroup(layout.createParallelGroup(CENTER, true)
.addComponent(getProvidersButton(), DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(getProviderSelectorButton(), DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(getContentPane(), maxHeight, maxHeight, maxHeight)
.addComponent(getSearchButton(), DEFAULT_SIZE, DEFAULT_SIZE, Short.MAX_VALUE)
);
update(Collections.emptyList());
}
JButton getProvidersButton() {
if (providersButton == null) {
providersButton = new JButton(emptyIcon);
styleButton(providersButton, ICON_SIZE, providersButton.getFont(), -1);
providersButton.addActionListener(evt -> {
showProvidersPopup();
});
updateProvidersButton();
}
return providersButton;
}
JButton getProviderSelectorButton() {
if (providerSelectorButton == null) {
providerSelectorButton = new JButton(IconManager.ICON_SORT_DOWN);
providerSelectorButton.setToolTipText("Click to select a search provider...");
styleButton(providerSelectorButton, 12, serviceRegistrar.getService(IconManager.class).getIconFont(10.0f),
SwingConstants.RIGHT);
providerSelectorButton.addActionListener(evt -> {
getProvidersButton().doClick();
});
}
return providerSelectorButton;
}
JPanel getContentPane() {
if (contentPane == null) {
contentPane = new JPanel();
contentPane.setBackground(getBackground());
updateSelectedSearchComponent(getSearchTextField());
}
return contentPane;
}
JTextField getSearchTextField() {
if (searchTextField == null) {
final Color msgColor = UIManager.getColor("Label.disabledForeground");
final int vgap = 1;
final int hgap = 5;
searchTextField = new JTextField() {
@Override
public void paint(Graphics g) {
super.paint(g);
if (getText() == null || getText().trim().isEmpty()) {
// Set antialiasing
Graphics2D g2 = (Graphics2D) g.create();
g2.setRenderingHints(
new RenderingHints(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON));
// Set the font
g2.setFont(getFont());
// Get the FontMetrics
FontMetrics metrics = g2.getFontMetrics(getFont());
// Determine the X coordinate for the text
int x = hgap;
// Determine the Y coordinate for the text (note we add the ascent, as in java 2d 0 is top of the screen)
int y = (metrics.getHeight() / 2) + metrics.getAscent() + vgap;
// Draw
g2.setColor(msgColor);
g2.drawString(DEF_SEARCH_TEXT, x, y);
g2.dispose();
}
}
};
searchTextField.setBackground(getBackground());
searchTextField.setMinimumSize(searchTextField.getPreferredSize());
searchTextField.setBorder(BorderFactory.createEmptyBorder(vgap, hgap, vgap, hgap));
searchTextField.setFont(searchTextField.getFont().deriveFont(LookAndFeelUtil.getSmallFontSize()));
}
return searchTextField;
}
JToggleButton getOptionsButton() {
if (optionsButton == null) {
optionsButton = new JToggleButton(IconManager.ICON_BARS);
optionsButton.setToolTipText("More Options...");
styleButton(optionsButton, 32, serviceRegistrar.getService(IconManager.class).getIconFont(14.0f),
SwingConstants.LEFT);
}
return optionsButton;
}
JButton getSearchButton() {
if (searchButton == null) {
searchButton = new JButton(IconManager.ICON_SEARCH);
styleButton(searchButton, 32, serviceRegistrar.getService(IconManager.class).getIconFont(16.0f),
SwingConstants.LEFT);
searchButton.setBorder(
BorderFactory.createMatteBorder(0, 1, 0, 0, UIManager.getColor("Separator.foreground")));
searchButton.setToolTipText("Search Network");
}
return searchButton;
}
ProvidersPanel getProvidersPanel() {
if (providersPanel == null) {
providersPanel = new ProvidersPanel();
}
return providersPanel;
}
public OptionsDialog getOptionsDialog() {
if (optionsDialog == null) {
optionsDialog = new OptionsDialog();
}
return optionsDialog;
}
private void styleButton(AbstractButton btn, int width, Font font, int borderSide) {
btn.setFont(font);
btn.setContentAreaFilled(false);
if (borderSide == SwingConstants.LEFT)
btn.setBorder(BorderFactory.createCompoundBorder(
BorderFactory.createEmptyBorder(1, 0, 1, 1),
BorderFactory.createMatteBorder(0, 1, 0, 0, UIManager.getColor("Separator.foreground"))
));
else if (borderSide == SwingConstants.RIGHT)
btn.setBorder(BorderFactory.createCompoundBorder(
BorderFactory.createEmptyBorder(1, 1, 1, 0),
BorderFactory.createMatteBorder(0, 0, 0, 1, UIManager.getColor("Separator.foreground"))
));
else
btn.setBorder(BorderFactory.createEmptyBorder(1, 1, 1, 1));
Dimension d = new Dimension(width, Math.max(width, getSearchTextField().getPreferredSize().height));
btn.setMinimumSize(d);
btn.setPreferredSize(d);
}
class ProvidersPanel extends JPanel {
private final static int MAX_VISIBLE_ROWS = 10;
private final static int COL_COUNT = 3;
final static int ICON_COL_IDX = 0;
final static int NAME_COL_IDX = 1;
final static int WEBSITE_COL_IDX = 2;
private JScrollPane scrollPane;
private JTable table;
ProvidersPanel() {
setLayout(new BorderLayout());
add(getScrollPane(), BorderLayout.CENTER);
setKeyBindings(getTable());
}
private NetworkSearchTaskFactory getProvider(int row) {
return (NetworkSearchTaskFactory) getTable().getModel().getValueAt(row, NAME_COL_IDX);
}
public NetworkSearchTaskFactory getSelectedValue() {
int row = getTable().getSelectedRow();
return row != -1 ? getProvider(row) : null;
}
JScrollPane getScrollPane() {
if (scrollPane == null) {
scrollPane = new JScrollPane(getTable());
scrollPane.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED);
scrollPane.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
}
return scrollPane;
}
JTable getTable() {
if (table == null) {
DefaultTableModel model = new DefaultTableModel() {
@Override
public boolean isCellEditable(int row, int column) {
return false;
}
};
model.setColumnCount(COL_COUNT);
table = new JTable(model);
table.setDefaultRenderer(Object.class, new ProvidersTableCellRenderer());
table.setTableHeader(null);
table.setIntercellSpacing(new Dimension(0, 0));
table.setShowGrid(false);
table.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
table.setColumnSelectionAllowed(false);
table.setRowHeight(ICON_SIZE + 2);
table.addMouseMotionListener(new MouseMotionAdapter() {
@Override
public void mouseMoved(MouseEvent e) {
int row = getTable().rowAtPoint(e.getPoint());
if (row != -1)
setSelectedRow(row);
}
});
table.addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
int col = getTable().columnAtPoint(e.getPoint());
int row = getTable().rowAtPoint(e.getPoint());
if (row != -1) {
NetworkSearchTaskFactory tf = getProvider(row);
if (col == WEBSITE_COL_IDX && tf != null && tf.getWebsite() != null) {
serviceRegistrar.getService(OpenBrowser.class).openURL(tf.getWebsite().toString());
} else {
getTable().repaint();
disposeProvidersPopup(true);
}
}
}
});
// Provider descriptions can be very long, so let's make the tooltip visible for a few minutes
// to give the user a chance to read them
table.addMouseListener(new DismissDelayMouseAdapter((int) TimeUnit.MINUTES.toMillis(5))); // 5 min
}
return table;
}
void update() {
Object[][] data = new Object[providers.size()][COL_COUNT];
int nameWidth = 100;
int selectedRow = -1;
int i = 0;
Font defFont = ((ProvidersTableCellRenderer) getTable().getDefaultRenderer(Object.class)).defFont;
AffineTransform af = new AffineTransform();
FontRenderContext frc = new FontRenderContext(af, true, true);
for (NetworkSearchTaskFactory tf : providers) {
data[i][ICON_COL_IDX] = tf;
data[i][NAME_COL_IDX] = tf;
data[i][WEBSITE_COL_IDX] = tf;
if (tf.equals(getSelectedProvider()))
selectedRow = i;
nameWidth = Math.max(nameWidth, (int) (defFont.getStringBounds(tf.getName(), frc).getWidth()));
i++;
}
nameWidth = Math.min(340, nameWidth);
DefaultTableModel model = (DefaultTableModel) getTable().getModel();
model.setDataVector(data, new String[COL_COUNT]);
getTable().getColumnModel().getColumn(ICON_COL_IDX).setMinWidth(ICON_SIZE);
getTable().getColumnModel().getColumn(ICON_COL_IDX).setMaxWidth(ICON_SIZE);
getTable().getColumnModel().getColumn(NAME_COL_IDX).setMinWidth(nameWidth + 10);
getTable().getColumnModel().getColumn(WEBSITE_COL_IDX).setMinWidth(32);
getTable().getColumnModel().getColumn(WEBSITE_COL_IDX).setMaxWidth(32);
setSelectedRow(selectedRow);
getTable().repaint();
int w = getTable().getColumnModel().getTotalColumnWidth() + 20;
int h = providers.size() <= MAX_VISIBLE_ROWS ?
getTable().getPreferredSize().height : getTable().getRowHeight() * MAX_VISIBLE_ROWS;
getScrollPane().getViewport().setPreferredSize(new Dimension(w, h));
}
void setSelectedRow(int row) {
if (row != -1)
getTable().setRowSelectionInterval(row, row);
}
private class ProvidersTableCellRenderer extends DefaultTableCellRenderer {
final IconManager iconManager = serviceRegistrar.getService(IconManager.class);
final Font defFont = getFont().deriveFont(LookAndFeelUtil.getSmallFontSize());
final Font iconFont = iconManager.getIconFont(12.0f);
final Border defBorder = BorderFactory.createCompoundBorder(
BorderFactory.createEmptyBorder(1, 0, 0, 0),
BorderFactory.createMatteBorder(0, 0, 1, 0, UIManager.getColor("Separator.foreground"))
);
final Border nameBorder = BorderFactory.createCompoundBorder(
defBorder,
BorderFactory.createEmptyBorder(0, 10, 0, 0)
);
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected,
boolean hasFocus, int row, int column) {
super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
setForeground(UIManager.getColor("Label.foreground"));
setBackground(UIManager.getColor(isSelected ? "Table.selectionBackground" : "Table.background"));
setHorizontalAlignment(CENTER);
setFont(defFont);
setText(null);
setIcon(null);
setBorder(defBorder);
if (value instanceof NetworkSearchTaskFactory) {
NetworkSearchTaskFactory tf = (NetworkSearchTaskFactory) value;
setToolTipText(tf.getDescription());
switch (column) {
case ICON_COL_IDX:
Icon icon = providerIcons.get(tf);
setIcon(icon != null ? icon : emptyIcon);
break;
case NAME_COL_IDX:
setText(tf.getName());
setHorizontalAlignment(LEFT);
setBorder(nameBorder);
break;
case WEBSITE_COL_IDX:
URL url = tf.getWebsite();
setText(url != null ? IconManager.ICON_EXTERNAL_LINK : "");
setFont(iconFont);
setForeground(UIManager.getColor("Table.focusCellBackground"));
setToolTipText(url != null ? "Visit Website..." : null);
break;
}
}
return this;
}
}
private void setKeyBindings(JComponent comp) {
final ActionMap actionMap = comp.getActionMap();
final InputMap inputMap = comp.getInputMap(WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), KeyAction.VK_ENTER);
inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, 0), KeyAction.VK_SPACE);
inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_TAB, 0), KeyAction.VK_TAB);
actionMap.put(KeyAction.VK_ENTER, new KeyAction(KeyAction.VK_ENTER));
actionMap.put(KeyAction.VK_SPACE, new KeyAction(KeyAction.VK_SPACE));
actionMap.put(KeyAction.VK_TAB, new KeyAction(KeyAction.VK_TAB));
}
private class KeyAction extends AbstractAction {
final static String VK_ENTER = "VK_ENTER";
final static String VK_SPACE = "VK_SPACE";
final static String VK_TAB = "VK_TAB";
KeyAction(final String actionCommand) {
putValue(ACTION_COMMAND_KEY, actionCommand);
}
@Override
public void actionPerformed(final ActionEvent e) {
final String cmd = e.getActionCommand();
if (cmd.equals(VK_ENTER) || cmd.equals(VK_SPACE) || cmd.equals(VK_TAB))
disposeProvidersPopup(true);
}
}
}
private class OptionsDialog extends JDialog {
public OptionsDialog() {
super(SwingUtilities.getWindowAncestor(NetworkSearchBar.this), ModalityType.MODELESS);
setBackground(getBackground());
setUndecorated(true);
setBorder(BorderFactory.createLineBorder(UIManager.getColor("Separator.foreground")));
addWindowListener(new WindowAdapter() {
@Override
public void windowClosed(WindowEvent e) {
updateSearchEnabled();
}
});
addWindowFocusListener(new WindowFocusListener() {
@Override
public void windowLostFocus(WindowEvent e) {
// If the a component in the Options popup opens another dialog, the Options one
// loses focus, but we don't want it to be disposed.
if (!hasVisibleOwnedWindows(OptionsDialog.this)) {
// If cursor is over the options button, set the toggle button to not-selected
// to prevent it from opening the dialog again right after its disposed
Point mouseLoc = MouseInfo.getPointerInfo().getLocation();
Point buttonLoc = getOptionsButton().getLocationOnScreen();
mouseLoc.x -= buttonLoc.x;
mouseLoc.y -= buttonLoc.y;
if (!getOptionsButton().contains(mouseLoc))
getOptionsButton().setSelected(false);
// Dispose
disposeOptionsDialog();
}
}
@Override
public void windowGainedFocus(WindowEvent e) {
}
});
}
void update(JComponent comp) {
setContentPane(comp);
setKeyBindings(comp);
}
private void setKeyBindings(JComponent comp) {
final ActionMap actionMap = comp.getActionMap();
final InputMap inputMap = comp.getInputMap(WHEN_IN_FOCUSED_WINDOW);
inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), KeyAction.VK_ESCAPE);
actionMap.put(KeyAction.VK_ESCAPE, new KeyAction(KeyAction.VK_ESCAPE));
}
private class KeyAction extends AbstractAction {
final static String VK_ESCAPE = "VK_ESCAPE";
KeyAction(final String actionCommand) {
putValue(ACTION_COMMAND_KEY, actionCommand);
}
@Override
public void actionPerformed(final ActionEvent e) {
final String cmd = e.getActionCommand();
if (cmd.equals(VK_ESCAPE)) {
disposeOptionsDialog();
getOptionsButton().setSelected(false);
}
}
}
}
/**
* Hack to prolong a tooltip’s visible delay
* Thanks to: http://tech.chitgoks.com/2010/05/31/disable-tooltip-delay-in-java-swing/
*/
private class DismissDelayMouseAdapter extends MouseAdapter {
final int defaultDismissTimeout = ToolTipManager.sharedInstance().getDismissDelay();
final int dismissDelayMinutes;
public DismissDelayMouseAdapter(int milliseconds) {
dismissDelayMinutes = milliseconds;
}
@Override
public void mouseEntered(final MouseEvent e) {
ToolTipManager.sharedInstance().setDismissDelay(dismissDelayMinutes);
}
@Override
public void mouseExited(final MouseEvent e) {
ToolTipManager.sharedInstance().setDismissDelay(defaultDismissTimeout);
}
}
private class EmptyIcon implements Icon {
private final String text = IconManager.ICON_BAN;
private final Color fgColor;
private final int width;
private final int height;
public EmptyIcon(int width, int height) {
this.width = width;
this.height = height;
Color c = UIManager.getColor("Label.disabledForeground");
fgColor = new Color(c.getRed(), c.getGreen(), c.getBlue(), 60);
}
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
// Set antialiasing
Graphics2D g2 = (Graphics2D) g;
g2.setRenderingHints(
new RenderingHints(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON));
// Set the font
Font font = serviceRegistrar.getService(IconManager.class).getIconFont(28.0f);
g2.setFont(font);
// Get the FontMetrics
FontMetrics metrics = g2.getFontMetrics(font);
// Determine the X coordinate for the text
int sx = x + 2 + (width - metrics.stringWidth(text)) / 2;
// Determine the Y coordinate for the text (note we add the ascent, as in java 2d 0 is top of the screen)
int sy = y + 1 + ((height - metrics.getHeight()) / 2) + metrics.getAscent();
// Draw
g2.setColor(fgColor);
g2.drawString(text, sx, sy);
}
@Override
public int getIconWidth() {
return width;
}
@Override
public int getIconHeight() {
return height;
}
}
}
| Fixes a potential Exception.
| swing-application-impl/src/main/java/org/cytoscape/internal/view/NetworkSearchBar.java | Fixes a potential Exception. |
|
Java | lgpl-2.1 | d25ae926963172cfce689bbb37f2499271f944e5 | 0 | magsilva/jazzy,reckart/jazzy | /*
* put your module comment here
* formatted with JxBeauty (c) [email protected]
*/
package com.swabunga.spell.swing;
import com.swabunga.spell.event.*;
import javax.swing.*;
import javax.swing.text.*;
import javax.swing.event.*;
import java.io.File;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
/** Implementation of a spell check form.
* <p>This needs to layed out correctly but for the most part it works.</p>
*
* @author Jason Height ([email protected])
*/
public class JSpellForm extends JPanel
implements ActionListener, ListSelectionListener {
/** The Ignore button click action command*/
public static final String IGNORE_CMD = "IGNORE";
/** The Ignore All button click action command*/
public static final String IGNOREALL_CMD = "IGNOREALL";
/** The Add button click action command*/
public static final String ADD_CMD = "ADD";
/** The Replace button click action command*/
public static final String REPLACE_CMD = "REPLACE";
/** The Replace All button click action command*/
public static final String REPLACEALL_CMD = "REPLACEALL";
/** The Cancel button click action command*/
public static final String CANCEL_CMD = "CANCEL";
/** The resource for the Suggestions label*/
private static final String SUGGESTIONS_RES = "SUGGESTIONS";
private static final String INVALIDWORD_RES = "INVALIDWORD";
/* Accessible GUI Components */
protected JList suggestList;
protected JTextArea checkText;
/* The current spell check event */
protected SpellCheckEvent spellEvent;
/** The listener list (holds actionlisteners) */
protected EventListenerList listenerList = new EventListenerList();
protected ResourceBundle messages;
/** Panel constructor */
public JSpellForm () {
messages = ResourceBundle.getBundle("com.swabunga.spell.swing.messages", Locale.getDefault());
initialiseGUI();
}
/** Helper method to create a JButton with a command, a text label and a listener*/
private static final JButton createButton (String command, String text, ActionListener listener) {
JButton btn = new JButton(text);
btn.setActionCommand(command);
btn.addActionListener(listener);
return btn;
}
/** Creates the buttons on the left hand side of the panel*/
protected JPanel makeEastPanel () {
JPanel jPanel1 = new JPanel();
BoxLayout layout = new BoxLayout(jPanel1, BoxLayout.Y_AXIS);
jPanel1.setLayout(layout);
JButton ignoreBtn = createButton(IGNORE_CMD, messages.getString(IGNORE_CMD), this);
ignoreBtn.setMaximumSize( new Dimension(Short.MAX_VALUE, Short.MAX_VALUE ));
jPanel1.add(ignoreBtn);
JButton ignoreAllBtn = createButton(IGNOREALL_CMD, messages.getString(IGNOREALL_CMD), this);
ignoreAllBtn.setMaximumSize( new Dimension(Short.MAX_VALUE, Short.MAX_VALUE ));
jPanel1.add(ignoreAllBtn);
JButton addBtn = createButton(ADD_CMD, messages.getString(ADD_CMD), this);
addBtn.setMaximumSize( new Dimension(Short.MAX_VALUE, Short.MAX_VALUE ));
jPanel1.add(addBtn);
JButton changeBtn = createButton(REPLACE_CMD, messages.getString(REPLACE_CMD), this);
changeBtn.setMaximumSize( new Dimension(Short.MAX_VALUE, Short.MAX_VALUE ));
jPanel1.add(changeBtn);
JButton changeAllBtn = createButton(REPLACEALL_CMD, messages.getString(REPLACEALL_CMD), this);
changeAllBtn.setMaximumSize( new Dimension(Short.MAX_VALUE, Short.MAX_VALUE ));
jPanel1.add(changeAllBtn);
JButton cancelBtn = createButton(CANCEL_CMD, messages.getString(CANCEL_CMD), this);
cancelBtn.setMaximumSize( new Dimension(Short.MAX_VALUE, Short.MAX_VALUE ));
jPanel1.add(cancelBtn);
return jPanel1;
}
protected JPanel makeCentrePanel () {
JPanel jPanel2 = new JPanel();
jPanel2.setLayout(new BoxLayout(jPanel2, BoxLayout.Y_AXIS));
JLabel lbl1 = new JLabel(messages.getString(INVALIDWORD_RES));
jPanel2.add(lbl1);
checkText = new JTextArea();
jPanel2.add(new JScrollPane(checkText));
JLabel lbl2 = new JLabel(messages.getString(SUGGESTIONS_RES));
jPanel2.add(lbl2);
suggestList = new JList();
suggestList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
jPanel2.add(new JScrollPane(suggestList));
suggestList.addListSelectionListener(this);
return jPanel2;
}
/** Called by the constructor to initialise the GUI*/
protected void initialiseGUI () {
setLayout(new BorderLayout());
this.add(makeEastPanel(), BorderLayout.EAST);
this.add(makeCentrePanel(), BorderLayout.CENTER);
}
/** Register an action listener */
public void addActionListener (ActionListener l) {
listenerList.add(ActionListener.class, l);
}
/** Deregister an action listener*/
public void removeActionListener (ActionListener l) {
listenerList.remove(ActionListener.class, l);
}
protected void fireActionEvent (ActionEvent e) {
// Guaranteed to return a non-null array
Object[] listeners = listenerList.getListenerList();
// Process the listeners last to first, notifying
// those that are interested in this event
for (int i = listeners.length - 2; i >= 0; i -= 2) {
if (listeners[i] == ActionListener.class) {
((ActionListener)listeners[i + 1]).actionPerformed(e);
}
}
}
/** Sets the current spell check event that is being shown to the user*/
public void setSpellEvent (SpellCheckEvent event) {
spellEvent = event;
DefaultListModel m = new DefaultListModel();
java.util.List suggestions = event.getSuggestions();
for (int i = 0; i < suggestions.size(); i++) {
m.addElement(suggestions.get(i));
}
suggestList.setModel(m);
if (m.size()>0) {
suggestList.setSelectedIndex(0);
}
checkText.setText(event.getInvalidWord());
}
/** Fired when a value in the list is selected*/
public void valueChanged(ListSelectionEvent e) {
if (!e.getValueIsAdjusting()) {
Object selectedValue = suggestList.getSelectedValue();
if (selectedValue != null)
checkText.setText(selectedValue.toString());
}
}
/** Fired when a button is selected */
public void actionPerformed (ActionEvent e) {
if (IGNORE_CMD.equals(e.getActionCommand())) {
spellEvent.ignoreWord(false);
}
else if (IGNOREALL_CMD.equals(e.getActionCommand())) {
spellEvent.ignoreWord(true);
}
else if (REPLACE_CMD.equals(e.getActionCommand())) {
spellEvent.replaceWord(checkText.getText(), false);
}
else if (REPLACEALL_CMD.equals(e.getActionCommand())) {
spellEvent.replaceWord(checkText.getText(), true);
}
else if (ADD_CMD.equals(e.getActionCommand())) {
spellEvent.addToDictionary(checkText.getText());
}
else if (CANCEL_CMD.equals(e.getActionCommand())) {
spellEvent.cancel();
}
fireActionEvent(e);
}
public static void main (String[] args) {
try {
JSpellForm pane = new JSpellForm();
JFrame frm = new JFrame("Spelling");
frm.getContentPane().add(pane);
frm.setSize(300, 300);
frm.setVisible(true);
frm.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
| src/com/swabunga/spell/swing/JSpellForm.java | /*
* put your module comment here
* formatted with JxBeauty (c) [email protected]
*/
package com.swabunga.spell.swing;
import com.swabunga.spell.event.*;
import javax.swing.*;
import javax.swing.text.*;
import javax.swing.event.*;
import java.io.File;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
/** Implementation of a spell check form.
* <p>This needs to layed out correctly but for the most part it works.</p>
*
* @author Jason Height ([email protected])
*/
public class JSpellForm extends JPanel
implements ActionListener, ListSelectionListener {
/** The Ignore button click action command*/
public static final String IGNORE_CMD = "IGNORE";
/** The Ignore All button click action command*/
public static final String IGNOREALL_CMD = "IGNOREALL";
/** The Add button click action command*/
public static final String ADD_CMD = "ADD";
/** The Replace button click action command*/
public static final String REPLACE_CMD = "REPLACE";
/** The Replace All button click action command*/
public static final String REPLACEALL_CMD = "REPLACEALL";
/** The Cancel button click action command*/
public static final String CANCEL_CMD = "CANCEL";
/** The resource for the Suggestions label*/
private static final String SUGGESTIONS_RES = "SUGGESTIONS";
private static final String INVALIDWORD_RES = "INVALIDWORD";
/* Accessible GUI Components */
protected JList suggestList;
protected JTextArea checkText;
/* The current spell check event */
protected SpellCheckEvent spellEvent;
/** The listener list (holds actionlisteners) */
protected EventListenerList listenerList = new EventListenerList();
protected ResourceBundle messages;
/** Panel constructor */
public JSpellForm () {
messages = ResourceBundle.getBundle("com.swabunga.spell.swing.messages", Locale.getDefault());
initialiseGUI();
}
/** Helper method to create a JButton with a command, a text label and a listener*/
private static final JButton createButton (String command, String text, ActionListener listener) {
JButton btn = new JButton(text);
btn.setActionCommand(command);
btn.addActionListener(listener);
return btn;
}
/** Creates the buttons on the left hand side of the panel*/
protected JPanel makeEastPanel () {
JPanel jPanel1 = new JPanel();
BoxLayout layout = new BoxLayout(jPanel1, BoxLayout.Y_AXIS);
jPanel1.setLayout(layout);
JButton ignoreBtn = createButton(IGNORE_CMD, messages.getString(IGNORE_CMD), this);
ignoreBtn.setMaximumSize( new Dimension(Short.MAX_VALUE, Short.MAX_VALUE ));
jPanel1.add(ignoreBtn);
JButton ignoreAllBtn = createButton(IGNOREALL_CMD, messages.getString(IGNOREALL_CMD), this);
ignoreAllBtn.setMaximumSize( new Dimension(Short.MAX_VALUE, Short.MAX_VALUE ));
jPanel1.add(ignoreAllBtn);
JButton addBtn = createButton(ADD_CMD, messages.getString(ADD_CMD), this);
addBtn.setMaximumSize( new Dimension(Short.MAX_VALUE, Short.MAX_VALUE ));
jPanel1.add(addBtn);
JButton changeBtn = createButton(REPLACE_CMD, messages.getString(REPLACE_CMD), this);
changeBtn.setMaximumSize( new Dimension(Short.MAX_VALUE, Short.MAX_VALUE ));
jPanel1.add(changeBtn);
JButton changeAllBtn = createButton(REPLACEALL_CMD, messages.getString(REPLACEALL_CMD), this);
changeAllBtn.setMaximumSize( new Dimension(Short.MAX_VALUE, Short.MAX_VALUE ));
jPanel1.add(changeAllBtn);
JButton cancelBtn = createButton(CANCEL_CMD, messages.getString(CANCEL_CMD), this);
cancelBtn.setMaximumSize( new Dimension(Short.MAX_VALUE, Short.MAX_VALUE ));
jPanel1.add(cancelBtn);
return jPanel1;
}
protected JPanel makeCentrePanel () {
JPanel jPanel2 = new JPanel();
jPanel2.setLayout(new BoxLayout(jPanel2, BoxLayout.Y_AXIS));
JLabel lbl1 = new JLabel(messages.getString(INVALIDWORD_RES));
jPanel2.add(lbl1);
checkText = new JTextArea();
jPanel2.add(new JScrollPane(checkText));
JLabel lbl2 = new JLabel(messages.getString(SUGGESTIONS_RES));
jPanel2.add(lbl2);
suggestList = new JList();
suggestList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
jPanel2.add(new JScrollPane(suggestList));
suggestList.addListSelectionListener(this);
return jPanel2;
}
/** Called by the constructor to initialise the GUI*/
protected void initialiseGUI () {
setLayout(new BorderLayout());
this.add(makeEastPanel(), BorderLayout.EAST);
this.add(makeCentrePanel(), BorderLayout.CENTER);
}
/** Register an action listener */
public void addActionListener (ActionListener l) {
listenerList.add(ActionListener.class, l);
}
/** Deregister an action listener*/
public void removeActionListener (ActionListener l) {
listenerList.remove(ActionListener.class, l);
}
protected void fireActionEvent (ActionEvent e) {
// Guaranteed to return a non-null array
Object[] listeners = listenerList.getListenerList();
// Process the listeners last to first, notifying
// those that are interested in this event
for (int i = listeners.length - 2; i >= 0; i -= 2) {
if (listeners[i] == ActionListener.class) {
((ActionListener)listeners[i + 1]).actionPerformed(e);
}
}
}
/** Sets the current spell check event that is being shown to the user*/
public void setSpellEvent (SpellCheckEvent event) {
spellEvent = event;
DefaultListModel m = new DefaultListModel();
java.util.List suggestions = event.getSuggestions();
for (int i = 0; i < suggestions.size(); i++) {
m.addElement(suggestions.get(i));
}
suggestList.setModel(m);
checkText.setText(event.getInvalidWord());
}
/** Fired when a value in the list is selected*/
public void valueChanged(ListSelectionEvent e) {
if (!e.getValueIsAdjusting()) {
Object selectedValue = suggestList.getSelectedValue();
if (selectedValue != null)
checkText.setText(selectedValue.toString());
}
}
/** Fired when a button is selected */
public void actionPerformed (ActionEvent e) {
if (IGNORE_CMD.equals(e.getActionCommand())) {
spellEvent.ignoreWord(false);
}
else if (IGNOREALL_CMD.equals(e.getActionCommand())) {
spellEvent.ignoreWord(true);
}
else if (REPLACE_CMD.equals(e.getActionCommand())) {
spellEvent.replaceWord(checkText.getText(), false);
}
else if (REPLACEALL_CMD.equals(e.getActionCommand())) {
spellEvent.replaceWord(checkText.getText(), true);
}
else if (ADD_CMD.equals(e.getActionCommand())) {
spellEvent.addToDictionary(checkText.getText());
}
else if (CANCEL_CMD.equals(e.getActionCommand())) {
spellEvent.cancel();
}
fireActionEvent(e);
}
public static void main (String[] args) {
try {
JSpellForm pane = new JSpellForm();
JFrame frm = new JFrame("Spelling");
frm.getContentPane().add(pane);
frm.setSize(300, 300);
frm.setVisible(true);
frm.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
| Made first item in seggestion list selected by default
| src/com/swabunga/spell/swing/JSpellForm.java | Made first item in seggestion list selected by default |
|
Java | lgpl-2.1 | 4006694b59712d257970eb0579f72c70d60818bd | 0 | cytoscape/cytoscape-impl,cytoscape/cytoscape-impl,cytoscape/cytoscape-impl,cytoscape/cytoscape-impl,cytoscape/cytoscape-impl | package org.cytoscape.ding.debug;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import javax.swing.GroupLayout;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.Timer;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableModel;
import org.cytoscape.ding.impl.canvas.AnnotationSelectionCanvas;
import org.cytoscape.util.swing.BasicCollapsiblePanel;
import org.cytoscape.util.swing.LookAndFeelUtil;
@SuppressWarnings("serial")
public class FrameRatePanel extends BasicCollapsiblePanel {
private JLabel frameRateLabel;
private JTable table;
private LinkedList<DebugRootFrameInfo> frames = new LinkedList<>();
private Timer timer;
private final long window = 5000; // five seconds
public FrameRatePanel() {
super("Frame Rate");
createContents();
timer = new Timer(1000, e -> updateFrameRate());
timer.setRepeats(true);
}
private void createContents() {
frameRateLabel = new JLabel("Frame Rate: ");
LookAndFeelUtil.makeSmall(frameRateLabel);
table = new JTable();
table.setShowGrid(true);
JScrollPane scrollPane = new JScrollPane(table);
scrollPane.setPreferredSize(new Dimension(450, 200));
JPanel panel = new JPanel();
panel.setOpaque(false);
GroupLayout layout = new GroupLayout(panel);
panel.setLayout(layout);
layout.setAutoCreateContainerGaps(true);
layout.setAutoCreateGaps(true);
layout.setHorizontalGroup(layout.createParallelGroup()
.addComponent(frameRateLabel)
.addComponent(scrollPane)
);
layout.setVerticalGroup(layout.createSequentialGroup()
.addComponent(frameRateLabel)
.addComponent(scrollPane)
);
JPanel content = getContentPane();
content.setLayout(new BorderLayout());
content.add(BorderLayout.WEST, panel);
}
private void updateFrameRateLabel(int frameCount, long frameTime) {
double framesPerSecondRaw = (double)frameCount / ((double)frameTime / 1000.0);
frameRateLabel.setText(String.format("Frame Rate: %.2f per sec", framesPerSecondRaw));
}
public void addFrame(DebugRootFrameInfo frame) {
synchronized(frames) {
// MKTODO maybe there's a better way to handle cancelled frames
if(frame.getType() == DebugFrameType.MAIN_FAST && !frame.isCancelled()) {
frames.addLast(frame);
}
}
if(!timer.isRunning()) {
timer.start();
}
}
private void updateFrameRate() {
long frameTime = 0;
int frameCount = 0;
List<DebugFrameInfo> windowFrames = new ArrayList<>();
synchronized(frames) {
if(frames.isEmpty())
return;
long endOfWindow = frames.getLast().getEndTime();
long startOfWindow = endOfWindow - window;
// MKTODO what if the last frame is larger than the window
ListIterator<DebugRootFrameInfo> listIterator = frames.listIterator(frames.size());
while(listIterator.hasPrevious()) {
var frame = listIterator.previous();
if(frame.getStartTime() < startOfWindow) {
break;
}
windowFrames.add(frame);
frameTime += frame.getTime();
frameCount++;
}
while(listIterator.hasPrevious()) {
listIterator.previous();
listIterator.remove();
}
}
updateFrameRateLabel(frameCount, frameTime);
DebugFrameInfo root = DebugFrameInfo.merge(windowFrames);
TableModel model = createTabelModel(root);
table.setModel(model);
table.getColumnModel().getColumn(0).setPreferredWidth(150);
table.getColumnModel().getColumn(1).setPreferredWidth(100);
table.getColumnModel().getColumn(2).setPreferredWidth(100);
table.getColumnModel().getColumn(3).setPreferredWidth(100);
}
private TableModel createTabelModel(DebugFrameInfo frame) {
String[] columnNames = {"Render", "Time/" + window + "ms", "% rel", "% frame"};
Object[][] data = computeFrameData(frame);
DefaultTableModel model = new DefaultTableModel(data, columnNames);
return model;
}
private Object[][] computeFrameData(DebugFrameInfo root) {
int rows = DebugUtil.countNodesInTree((DebugFrameInfo)root, t -> t.getSubFrames());
Object[][] data = new Object[rows + 1 -1][]; // :) add a row for "idle", and remove annotation selection row
double percent = ((double)(window - root.getTime()) / (double)window) * 100.0;
String percentText = String.format("%.1f", percent);
data[0] = new Object[] { "Idle/Overhead", window - root.getTime(), percentText, ""};
flattenAndExtract(1, 0, root.getTime(), data, null, root);
return data;
}
private int flattenAndExtract(int i, int depth, long frameTot, Object[][] data, DebugFrameInfo parent, DebugFrameInfo frame) {
data[i] = getDataForRow(parent, frame, depth, frameTot);
for(DebugFrameInfo child : frame.getSubFrames()) {
if(!AnnotationSelectionCanvas.DEBUG_NAME.equals(child.getTask())) {
i = flattenAndExtract(++i, depth+1, frameTot, data, frame, child);
}
}
return i;
}
private Object[] getDataForRow(DebugFrameInfo parent, DebugFrameInfo frame, int depth, long frameTot) {
String indent = " ".repeat(depth);
String name = indent + frame.getTask();
String time = indent + frame.getTime();
double percentFrame = ((double)frame.getTime() / (double)frameTot) * 100.0;
double percentRel;
if(parent == null) { // root frame
percentRel = ((double)frame.getTime() / (double)window) * 100.0;;
} else {
percentRel = ((double)frame.getTime() / (double)parent.getTime()) * 100.0;
}
String percentRelText = indent + String.format("%.1f", percentRel);
String percentFrameText = indent + String.format("%.1f", percentFrame);
return new Object[] { name, time, percentRelText, percentFrameText };
}
}
| ding-impl/ding-presentation-impl/src/main/java/org/cytoscape/ding/debug/FrameRatePanel.java | package org.cytoscape.ding.debug;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import javax.swing.GroupLayout;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.Timer;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableModel;
import org.cytoscape.ding.impl.canvas.AnnotationSelectionCanvas;
import org.cytoscape.util.swing.BasicCollapsiblePanel;
import org.cytoscape.util.swing.LookAndFeelUtil;
@SuppressWarnings("serial")
public class FrameRatePanel extends BasicCollapsiblePanel {
private JLabel frameRateLabel;
private JTable table;
private LinkedList<DebugRootFrameInfo> frames = new LinkedList<>();
private Timer timer;
private final long window = 5000; // five seconds
public FrameRatePanel() {
super("Frame Rate");
createContents();
timer = new Timer(1000, e -> updateFrameRate());
timer.setRepeats(true);
}
private void createContents() {
frameRateLabel = new JLabel("Frame Rate: ");
LookAndFeelUtil.makeSmall(frameRateLabel);
table = new JTable();
table.setShowGrid(true);
JScrollPane scrollPane = new JScrollPane(table);
scrollPane.setPreferredSize(new Dimension(400, 200));
JPanel panel = new JPanel();
panel.setOpaque(false);
GroupLayout layout = new GroupLayout(panel);
panel.setLayout(layout);
layout.setAutoCreateContainerGaps(true);
layout.setAutoCreateGaps(true);
layout.setHorizontalGroup(layout.createParallelGroup()
.addComponent(frameRateLabel)
.addComponent(scrollPane)
);
layout.setVerticalGroup(layout.createSequentialGroup()
.addComponent(frameRateLabel)
.addComponent(scrollPane)
);
JPanel content = getContentPane();
content.setLayout(new BorderLayout());
content.add(BorderLayout.WEST, panel);
}
private void updateFrameRateLabel(int frameCount, long frameTime) {
double framesPerSecondRaw = (double)frameCount / ((double)frameTime / 1000.0);
frameRateLabel.setText(String.format("Frame Rate: %.2f per sec", framesPerSecondRaw));
}
public void addFrame(DebugRootFrameInfo frame) {
synchronized(frames) {
// MKTODO maybe there's a better way to handle cancelled frames
if(frame.getType() == DebugFrameType.MAIN_FAST && !frame.isCancelled()) {
frames.addLast(frame);
}
}
if(!timer.isRunning()) {
timer.start();
}
}
private void updateFrameRate() {
long frameTime = 0;
int frameCount = 0;
List<DebugFrameInfo> windowFrames = new ArrayList<>();
synchronized(frames) {
if(frames.isEmpty())
return;
long endOfWindow = frames.getLast().getEndTime();
long startOfWindow = endOfWindow - window;
// MKTODO what if the last frame is larger than the window
ListIterator<DebugRootFrameInfo> listIterator = frames.listIterator(frames.size());
while(listIterator.hasPrevious()) {
var frame = listIterator.previous();
if(frame.getStartTime() < startOfWindow) {
break;
}
windowFrames.add(frame);
frameTime += frame.getTime();
frameCount++;
}
while(listIterator.hasPrevious()) {
listIterator.previous();
listIterator.remove();
}
}
updateFrameRateLabel(frameCount, frameTime);
DebugFrameInfo root = DebugFrameInfo.merge(windowFrames);
TableModel model = createTabelModel(root);
table.setModel(model);
table.getColumnModel().getColumn(0).setPreferredWidth(200);
table.getColumnModel().getColumn(1).setPreferredWidth(100);
table.getColumnModel().getColumn(2).setPreferredWidth(100);
}
private TableModel createTabelModel(DebugFrameInfo frame) {
String[] columnNames = {"Render", "Time/" + window + "ms", "% rel", "% tot"};
Object[][] data = computeFrameData(frame);
DefaultTableModel model = new DefaultTableModel(data, columnNames);
return model;
}
private Object[][] computeFrameData(DebugFrameInfo root) {
int rows = DebugUtil.countNodesInTree((DebugFrameInfo)root, t -> t.getSubFrames());
Object[][] data = new Object[rows + 1 -1][]; // :) add a row for "idle", and remove annotation selection row
double percent = ((double)(window - root.getTime()) / (double)window) * 100.0;
String percentText = String.format("%.1f", percent);
data[0] = new Object[] { "Idle/Overhead", window - root.getTime(), percentText, percentText};
flattenAndExtract(1, 0, data, null, root);
return data;
}
private int flattenAndExtract(int i, int depth, Object[][] data, DebugFrameInfo parent, DebugFrameInfo frame) {
data[i] = getDataForRow(parent, frame, depth);
for(DebugFrameInfo child : frame.getSubFrames()) {
if(!AnnotationSelectionCanvas.DEBUG_NAME.equals(child.getTask())) {
i = flattenAndExtract(++i, depth+1, data, frame, child);
}
}
return i;
}
private Object[] getDataForRow(DebugFrameInfo parent, DebugFrameInfo frame, int depth) {
String indent = " ".repeat(depth);
String name = indent + frame.getTask();
String time = indent + frame.getTime();
double percent;
if(parent == null) { // root frame
percent = ((double)frame.getTime() / (double)window) * 100.0;
} else {
percent = ((double)frame.getTime() / (double)parent.getTime()) * 100.0;
}
String percentText = indent + String.format("%.1f", percent);
return new Object[] { name, time, percentText };
}
}
| debug panel add percent total of frame column | ding-impl/ding-presentation-impl/src/main/java/org/cytoscape/ding/debug/FrameRatePanel.java | debug panel add percent total of frame column |
|
Java | lgpl-2.1 | 14654e99dc965162321458d2f48dcdf057a3133a | 0 | yersan/wildfly-core,darranl/wildfly-core,aloubyansky/wildfly-core,yersan/wildfly-core,soul2zimate/wildfly-core,aloubyansky/wildfly-core,luck3y/wildfly-core,ivassile/wildfly-core,luck3y/wildfly-core,ivassile/wildfly-core,soul2zimate/wildfly-core,yersan/wildfly-core,JiriOndrusek/wildfly-core,bstansberry/wildfly-core,darranl/wildfly-core,ivassile/wildfly-core,jfdenise/wildfly-core,jfdenise/wildfly-core,jamezp/wildfly-core,JiriOndrusek/wildfly-core,bstansberry/wildfly-core,jfdenise/wildfly-core,jamezp/wildfly-core,soul2zimate/wildfly-core,bstansberry/wildfly-core,jamezp/wildfly-core,JiriOndrusek/wildfly-core,darranl/wildfly-core,aloubyansky/wildfly-core,luck3y/wildfly-core | package org.jboss.as.test.integration.management.cli;
import org.jboss.as.cli.CommandContext;
import org.jboss.as.test.integration.management.util.CLIWrapper;
import org.jboss.as.test.shared.TestSuiteEnvironment;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.jboss.shrinkwrap.impl.base.exporter.zip.ZipExporterImpl;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.wildfly.core.testrunner.WildflyTestRunner;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileVisitOption;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.junit.Assert.assertEquals;
@RunWith(WildflyTestRunner.class)
@Ignore("WFCORE-1821")
public class ModuleOpsCompletionTestCase {
private static final String MODULE_NAME = "org.jboss.test.cli.climoduletest";
private static CLIWrapper cli;
private static File jarFile;
@BeforeClass
public static void before() throws Exception {
cli = new CLIWrapper(true, null, System.in);
final JavaArchive jar = ShrinkWrap.create(JavaArchive.class, "Dummy.jar");
jar.addClass(ModuleTestCase.class);
jarFile = new File(TestSuiteEnvironment.getTmpDir() + File.separator + "Dummy.jar");
new ZipExporterImpl(jar).exportTo(jarFile, true);
}
@AfterClass
public static void after() throws Exception {
cli.close();
}
@Test
public void testModuleAddCompletionSuggestions() throws Exception {
final CommandContext ctx = cli.getCommandContext();
final Stream<String> allTopLevelDirs = listTopLevelModuleDirs().map(File::getName);
// name on add operation should suggest all possible folders (not only valid module names)
testSuggestion(ctx, allTopLevelDirs, "module add --name=", false);
testSuggestion(ctx, Arrays.asList("org"), "module add --name=o", false);
testSuggestion(ctx, Arrays.asList("org", "org."), "module add --name=org", false);
try {
// suggest folder without module descriptor
new File(getModulePath(), "foo").mkdir();
testSuggestion(ctx, Arrays.asList("foo"), "module add --name=f", false);
} finally {
new File(getModulePath(), "foo").delete();
}
}
@Test
public void testModuleRemoveCompletionSuggestions() throws Exception {
final CommandContext ctx = cli.getCommandContext();
// name on remove operation should only suggest valid module names from the modules root directory
try {
testAdd("main");
testSuggestion(ctx, Arrays.asList("org."), "module remove --name=", false);
testSuggestion(ctx, Arrays.asList("org."), "module remove --name=org", false);
} finally {
testRemove("main");
}
}
@Test
public void testModuleDependenciesCompletionSuggestions() throws Exception {
final CommandContext ctx = cli.getCommandContext();
final Stream<String> topLevelDirs = listTopLevelModuleDirs()
.filter(this::isModuleTree)
.map(f->f.getName() + ".");
// dependencies should suggest all possible modules
testSuggestion(ctx, topLevelDirs, "module add --name=foo --dependencies=", true);
// completes started folder names
testSuggestion(ctx, Arrays.asList("org."), "module add --name=foo --dependencies=o", true);
testSuggestion(ctx, Arrays.asList("org."), "module add --name=foo --dependencies=bar,o", true);
}
private void testAdd(String slotName) throws Exception {
// create a module
cli.sendLine("module add --name=" + MODULE_NAME
+ ("main".equals(slotName) ? "" : " --slot=" + slotName)
+ " --resources=" + jarFile.getAbsolutePath()
);
}
private void testRemove(String slotName) throws Exception {
// remove the module
cli.sendLine("module remove --name=" + MODULE_NAME
+ ("main".equals(slotName) ? "" : " --slot=" + slotName)
);
}
private Stream<File> listTopLevelModuleDirs() {
ArrayList<File> res = new ArrayList<>();
for (File dir : getModulePath().listFiles(f -> !f.getName().equals("system"))) {
res.add(dir);
}
if (new File(getModulePath(), "system/layers/").exists() ) {
for (File layer : new File(getModulePath(), "system/layers/").listFiles()) {
for (File dir : layer.listFiles()) {
res.add(dir);
}
}
}
if (new File(getModulePath(), "system/add-ons/").exists() ) {
for (File layer : new File(getModulePath(), "system/add-ons/").listFiles()) {
for (File dir : layer.listFiles()) {
res.add(dir);
}
}
}
return res.stream().sorted();
}
private boolean isModuleTree(File f) {
try {
return Files.find(f.toPath(), Integer.MAX_VALUE, (p, attr)->p.getFileName().toString().equals("module.xml"), FileVisitOption.FOLLOW_LINKS).count() > 0;
} catch (IOException e) {
return false;
}
}
private void testSuggestion(CommandContext ctx, Stream<String> expected, String buffer, boolean allowMultipleValues) {
testSuggestion(ctx, expected.collect(Collectors.toList()), buffer, allowMultipleValues);
}
private void testSuggestion(CommandContext ctx, List<String> expected, String buffer, boolean allowMultipleValues) {
List<String> candidates = new ArrayList<>();
final int offset = ctx.getDefaultCommandCompleter().complete(ctx, buffer, buffer.length(), candidates);
assertEquals(expected, candidates);
final int expectedIndex;
if (allowMultipleValues && buffer.lastIndexOf('=') < buffer.lastIndexOf(',')) {
expectedIndex = buffer.lastIndexOf(',') + 1;
} else {
expectedIndex = buffer.lastIndexOf('=') + 1;
}
assertEquals(expectedIndex, offset);
}
private File getModulePath() {
String modulePath = TestSuiteEnvironment.getSystemProperty("module.path", null);
if (modulePath == null) {
String jbossHome = TestSuiteEnvironment.getSystemProperty("jboss.dist", null);
if (jbossHome == null) {
throw new IllegalStateException(
"Neither -Dmodule.path nor -Djboss.home were set");
}
modulePath = jbossHome + File.separatorChar + "modules";
} else {
modulePath = modulePath.split(File.pathSeparator)[0];
}
File moduleDir = new File(modulePath);
if (!moduleDir.exists()) {
throw new IllegalStateException(
"Determined module path does not exist");
}
if (!moduleDir.isDirectory()) {
throw new IllegalStateException(
"Determined module path is not a dir");
}
return moduleDir;
}
}
| testsuite/standalone/src/test/java/org/jboss/as/test/integration/management/cli/ModuleOpsCompletionTestCase.java | package org.jboss.as.test.integration.management.cli;
import org.jboss.as.cli.CommandContext;
import org.jboss.as.test.integration.management.util.CLIWrapper;
import org.jboss.as.test.shared.TestSuiteEnvironment;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.jboss.shrinkwrap.impl.base.exporter.zip.ZipExporterImpl;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.wildfly.core.testrunner.WildflyTestRunner;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileVisitOption;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.junit.Assert.assertEquals;
@RunWith(WildflyTestRunner.class)
public class ModuleOpsCompletionTestCase {
private static final String MODULE_NAME = "org.jboss.test.cli.climoduletest";
private static CLIWrapper cli;
private static File jarFile;
@BeforeClass
public static void before() throws Exception {
cli = new CLIWrapper(true, null, System.in);
final JavaArchive jar = ShrinkWrap.create(JavaArchive.class, "Dummy.jar");
jar.addClass(ModuleTestCase.class);
jarFile = new File(TestSuiteEnvironment.getTmpDir() + File.separator + "Dummy.jar");
new ZipExporterImpl(jar).exportTo(jarFile, true);
}
@AfterClass
public static void after() throws Exception {
cli.close();
}
@Test
public void testModuleAddCompletionSuggestions() throws Exception {
final CommandContext ctx = cli.getCommandContext();
final Stream<String> allTopLevelDirs = listTopLevelModuleDirs().map(File::getName);
// name on add operation should suggest all possible folders (not only valid module names)
testSuggestion(ctx, allTopLevelDirs, "module add --name=", false);
testSuggestion(ctx, Arrays.asList("org"), "module add --name=o", false);
testSuggestion(ctx, Arrays.asList("org", "org."), "module add --name=org", false);
try {
// suggest folder without module descriptor
new File(getModulePath(), "foo").mkdir();
testSuggestion(ctx, Arrays.asList("foo"), "module add --name=f", false);
} finally {
new File(getModulePath(), "foo").delete();
}
}
@Test
public void testModuleRemoveCompletionSuggestions() throws Exception {
final CommandContext ctx = cli.getCommandContext();
// name on remove operation should only suggest valid module names from the modules root directory
try {
testAdd("main");
testSuggestion(ctx, Arrays.asList("org."), "module remove --name=", false);
testSuggestion(ctx, Arrays.asList("org."), "module remove --name=org", false);
} finally {
testRemove("main");
}
}
@Test
public void testModuleDependenciesCompletionSuggestions() throws Exception {
final CommandContext ctx = cli.getCommandContext();
final Stream<String> topLevelDirs = listTopLevelModuleDirs()
.filter(this::isModuleTree)
.map(f->f.getName() + ".");
// dependencies should suggest all possible modules
testSuggestion(ctx, topLevelDirs, "module add --name=foo --dependencies=", true);
// completes started folder names
testSuggestion(ctx, Arrays.asList("org."), "module add --name=foo --dependencies=o", true);
testSuggestion(ctx, Arrays.asList("org."), "module add --name=foo --dependencies=bar,o", true);
}
private void testAdd(String slotName) throws Exception {
// create a module
cli.sendLine("module add --name=" + MODULE_NAME
+ ("main".equals(slotName) ? "" : " --slot=" + slotName)
+ " --resources=" + jarFile.getAbsolutePath()
);
}
private void testRemove(String slotName) throws Exception {
// remove the module
cli.sendLine("module remove --name=" + MODULE_NAME
+ ("main".equals(slotName) ? "" : " --slot=" + slotName)
);
}
private Stream<File> listTopLevelModuleDirs() {
ArrayList<File> res = new ArrayList<>();
for (File dir : getModulePath().listFiles(f -> !f.getName().equals("system"))) {
res.add(dir);
}
if (new File(getModulePath(), "system/layers/").exists() ) {
for (File layer : new File(getModulePath(), "system/layers/").listFiles()) {
for (File dir : layer.listFiles()) {
res.add(dir);
}
}
}
if (new File(getModulePath(), "system/add-ons/").exists() ) {
for (File layer : new File(getModulePath(), "system/add-ons/").listFiles()) {
for (File dir : layer.listFiles()) {
res.add(dir);
}
}
}
return res.stream().sorted();
}
private boolean isModuleTree(File f) {
try {
return Files.find(f.toPath(), Integer.MAX_VALUE, (p, attr)->p.getFileName().toString().equals("module.xml"), FileVisitOption.FOLLOW_LINKS).count() > 0;
} catch (IOException e) {
return false;
}
}
private void testSuggestion(CommandContext ctx, Stream<String> expected, String buffer, boolean allowMultipleValues) {
testSuggestion(ctx, expected.collect(Collectors.toList()), buffer, allowMultipleValues);
}
private void testSuggestion(CommandContext ctx, List<String> expected, String buffer, boolean allowMultipleValues) {
List<String> candidates = new ArrayList<>();
final int offset = ctx.getDefaultCommandCompleter().complete(ctx, buffer, buffer.length(), candidates);
assertEquals(expected, candidates);
final int expectedIndex;
if (allowMultipleValues && buffer.lastIndexOf('=') < buffer.lastIndexOf(',')) {
expectedIndex = buffer.lastIndexOf(',') + 1;
} else {
expectedIndex = buffer.lastIndexOf('=') + 1;
}
assertEquals(expectedIndex, offset);
}
private File getModulePath() {
String modulePath = TestSuiteEnvironment.getSystemProperty("module.path", null);
if (modulePath == null) {
String jbossHome = TestSuiteEnvironment.getSystemProperty("jboss.dist", null);
if (jbossHome == null) {
throw new IllegalStateException(
"Neither -Dmodule.path nor -Djboss.home were set");
}
modulePath = jbossHome + File.separatorChar + "modules";
} else {
modulePath = modulePath.split(File.pathSeparator)[0];
}
File moduleDir = new File(modulePath);
if (!moduleDir.exists()) {
throw new IllegalStateException(
"Determined module path does not exist");
}
if (!moduleDir.isDirectory()) {
throw new IllegalStateException(
"Determined module path is not a dir");
}
return moduleDir;
}
}
| @Ignore failing test
| testsuite/standalone/src/test/java/org/jboss/as/test/integration/management/cli/ModuleOpsCompletionTestCase.java | @Ignore failing test |
|
Java | unlicense | ee8b6f7c6d69113f93574c7734d05fbf1dae25f4 | 0 | elvman/ouzel,elnormous/ouzel,elnormous/ouzel,elvman/ouzel,elnormous/ouzel | // Copyright (C) 2017 Elviss Strazdins
// This file is part of the Ouzel engine.
package org.ouzelengine;
import android.content.Context;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.SurfaceView;
public class View extends SurfaceView
{
private final InputManager inputManager;
public View(Context context)
{
super(context);
inputManager = new InputManager();
setFocusableInTouchMode(true);
}
@Override public boolean onTouchEvent(MotionEvent event)
{
switch (event.getAction() & MotionEvent.ACTION_MASK)
{
case MotionEvent.ACTION_POINTER_DOWN:
{
final int indexPointerDown = event.getAction() >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
final int pointerId = event.getPointerId(indexPointerDown);
final float x = event.getX(indexPointerDown);
final float y = event.getY(indexPointerDown);
OuzelLibJNIWrapper.handleActionDown(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_DOWN:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionDown(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_HOVER_MOVE:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionMove(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_MOVE:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionMove(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_POINTER_UP:
{
final int indexPointUp = event.getAction() >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
final int pointerId = event.getPointerId(indexPointUp);
final float x = event.getX(indexPointUp);
final float y = event.getY(indexPointUp);
OuzelLibJNIWrapper.handleActionUp(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_UP:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionUp(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_CANCEL:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionCancel(pointerId, x, y);
return true;
}
}
return super.onTouchEvent(event);
}
@Override public boolean onKeyDown(int keyCode, KeyEvent event)
{
inputManager.onInputEvent(event);
OuzelLibJNIWrapper.handleKeyDown(keyCode);
return super.onKeyDown(keyCode, event);
}
@Override public boolean onKeyUp(int keyCode, KeyEvent event)
{
inputManager.onInputEvent(event);
OuzelLibJNIWrapper.handleKeyUp(keyCode);
return super.onKeyUp(keyCode, event);
}
}
| samples/android/src/org/ouzelengine/View.java | // Copyright (C) 2017 Elviss Strazdins
// This file is part of the Ouzel engine.
package org.ouzelengine;
import android.content.Context;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.SurfaceView;
public class View extends SurfaceView
{
private final InputManager inputManager;
public View(Context context)
{
super(context);
inputManager = new InputManager();
setFocusableInTouchMode(true);
}
@Override public boolean onTouchEvent(MotionEvent event)
{
switch (event.getAction() & MotionEvent.ACTION_MASK)
{
case MotionEvent.ACTION_POINTER_DOWN:
{
final int indexPointerDown = event.getAction() >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
final int pointerId = event.getPointerId(indexPointerDown);
final float x = event.getX(indexPointerDown);
final float y = event.getY(indexPointerDown);
OuzelLibJNIWrapper.handleActionDown(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_DOWN:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionDown(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_MOVE:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionMove(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_POINTER_UP:
{
final int indexPointUp = event.getAction() >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
final int pointerId = event.getPointerId(indexPointUp);
final float x = event.getX(indexPointUp);
final float y = event.getY(indexPointUp);
OuzelLibJNIWrapper.handleActionUp(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_UP:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionUp(pointerId, x, y);
return true;
}
case MotionEvent.ACTION_CANCEL:
{
final int pointerId = event.getPointerId(0);
final float x = event.getX(0);
final float y = event.getY(0);
OuzelLibJNIWrapper.handleActionCancel(pointerId, x, y);
return true;
}
}
return super.onTouchEvent(event);
}
@Override public boolean onKeyDown(int keyCode, KeyEvent event)
{
inputManager.onInputEvent(event);
OuzelLibJNIWrapper.handleKeyDown(keyCode);
return super.onKeyDown(keyCode, event);
}
@Override public boolean onKeyUp(int keyCode, KeyEvent event)
{
inputManager.onInputEvent(event);
OuzelLibJNIWrapper.handleKeyUp(keyCode);
return super.onKeyUp(keyCode, event);
}
}
| Handle ACTION_HOVER_MOVE on Android
| samples/android/src/org/ouzelengine/View.java | Handle ACTION_HOVER_MOVE on Android |
|
Java | apache-2.0 | dd4870b476e8e32696c9f7998131d310ff264920 | 0 | chtyim/cdap,caskdata/cdap,chtyim/cdap,chtyim/cdap,caskdata/cdap,chtyim/cdap,chtyim/cdap,caskdata/cdap,caskdata/cdap,caskdata/cdap,caskdata/cdap,chtyim/cdap | /*
* Copyright © 2015 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package co.cask.cdap.partitioned;
import co.cask.cdap.api.ProgramLifecycle;
import co.cask.cdap.api.Resources;
import co.cask.cdap.api.TxRunnable;
import co.cask.cdap.api.annotation.UseDataSet;
import co.cask.cdap.api.app.AbstractApplication;
import co.cask.cdap.api.common.Bytes;
import co.cask.cdap.api.data.DatasetContext;
import co.cask.cdap.api.data.batch.BatchWritable;
import co.cask.cdap.api.data.batch.DatasetOutputCommitter;
import co.cask.cdap.api.data.batch.OutputFormatProvider;
import co.cask.cdap.api.dataset.DatasetSpecification;
import co.cask.cdap.api.dataset.lib.AbstractDataset;
import co.cask.cdap.api.dataset.lib.KeyValueTable;
import co.cask.cdap.api.dataset.lib.PartitionDetail;
import co.cask.cdap.api.dataset.lib.PartitionKey;
import co.cask.cdap.api.dataset.lib.PartitionOutput;
import co.cask.cdap.api.dataset.lib.PartitionedFileSet;
import co.cask.cdap.api.dataset.lib.PartitionedFileSetArguments;
import co.cask.cdap.api.dataset.lib.PartitionedFileSetProperties;
import co.cask.cdap.api.dataset.lib.Partitioning;
import co.cask.cdap.api.dataset.lib.partitioned.KVTableStatePersistor;
import co.cask.cdap.api.dataset.lib.partitioned.PartitionBatchInput;
import co.cask.cdap.api.dataset.lib.partitioned.TransactionalPartitionConsumer;
import co.cask.cdap.api.dataset.module.EmbeddedDataset;
import co.cask.cdap.api.mapreduce.AbstractMapReduce;
import co.cask.cdap.api.mapreduce.MapReduceContext;
import co.cask.cdap.api.mapreduce.MapReduceTaskContext;
import co.cask.cdap.api.service.AbstractService;
import co.cask.cdap.api.service.http.AbstractHttpServiceHandler;
import co.cask.cdap.api.service.http.HttpServiceRequest;
import co.cask.cdap.api.service.http.HttpServiceResponder;
import co.cask.cdap.api.worker.AbstractWorker;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
import com.google.common.io.ByteStreams;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.twill.filesystem.Location;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.WritableByteChannel;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.QueryParam;
/**
* App used to test that MapReduce and Worker can incrementally consume partitions.
*/
public class AppWithPartitionConsumers extends AbstractApplication {
@Override
public void configure() {
setName("AppWithPartitionConsumers");
setDescription("Application with MapReduce job and Worker consuming partitions of a PartitionedFileSet Dataset");
createDataset("consumingState", KeyValueTable.class);
createDataset("counts", IncrementingKeyValueTable.class);
addMapReduce(new WordCountMapReduce());
addWorker(new WordCountWorker());
addService(new DatasetService());
// Create the "lines" partitioned file set, configure it to work with MapReduce
createDataset("lines", PartitionedFileSet.class, PartitionedFileSetProperties.builder()
// Properties for partitioning
.setPartitioning(Partitioning.builder().addLongField("time").build())
// Properties for file set
.setInputFormat(TextInputFormat.class)
.setOutputFormat(TextOutputFormat.class)
.setOutputProperty(TextOutputFormat.SEPERATOR, ",")
.build());
// Create the "outputLines" partitioned file set, configure it to work with MapReduce
createDataset("outputLines", PartitionedFileSet.class, PartitionedFileSetProperties.builder()
// Properties for partitioning
.setPartitioning(Partitioning.builder().addLongField("time").build())
// Properties for file set
.setInputFormat(TextInputFormat.class)
.setOutputFormat(TextOutputFormat.class)
.setOutputProperty(TextOutputFormat.SEPERATOR, ",")
// enable explore
.setEnableExploreOnCreate(true)
.setExploreFormat("text")
.setExploreFormatProperty("delimiter", "\n")
.setExploreSchema("record STRING")
.build());
}
// BatchWritable which increments the values of the underlying KeyValue table, upon each write from the batch job.
public static class IncrementingKeyValueTable extends AbstractDataset implements BatchWritable<byte[], Long> {
private final KeyValueTable keyValueTable;
public IncrementingKeyValueTable(DatasetSpecification spec,
@EmbeddedDataset("store") KeyValueTable keyValueTable) {
super(spec.getName(), keyValueTable);
this.keyValueTable = keyValueTable;
}
@Override
public void write(byte[] key, Long value) {
keyValueTable.increment(key, value);
}
@Nullable
public Long read(String key) {
byte[] read = keyValueTable.read(key);
return read == null ? null : Bytes.toLong(read);
}
}
public static class WordCountWorker extends AbstractWorker {
public static final String NAME = "WordCountWorker";
@Override
public void run() {
TransactionalPartitionConsumer partitionConsumer =
new TransactionalPartitionConsumer(getContext(), "lines",
new KVTableStatePersistor("consumingState", "state.key"));
final List<PartitionDetail> partitions = partitionConsumer.consumePartitions().getPartitions();
if (partitions.isEmpty()) {
return;
}
// process the partitions (same as WordCountMapReduce):
// - read the partitions' files
// - increment the words' counts in the 'counts' dataset accordingly
// - write the counts to the 'outputLines' partitioned fileset
getContext().execute(new TxRunnable() {
@Override
public void run(DatasetContext context) throws Exception {
Map<String, Long> wordCounts = new HashMap<>();
for (PartitionDetail partition : partitions) {
ByteBuffer content;
Location location = partition.getLocation();
content = ByteBuffer.wrap(ByteStreams.toByteArray(location.getInputStream()));
String string = Bytes.toString(Bytes.toBytes(content));
for (String token : string.split(" ")) {
Long count = Objects.firstNonNull(wordCounts.get(token), 0L);
wordCounts.put(token, count + 1);
}
}
IncrementingKeyValueTable counts = context.getDataset("counts");
for (Map.Entry<String, Long> entry : wordCounts.entrySet()) {
counts.write(Bytes.toBytes(entry.getKey()), entry.getValue());
}
PartitionedFileSet outputLines = context.getDataset("outputLines");
PartitionKey partitionKey = PartitionKey.builder().addLongField("time", System.currentTimeMillis()).build();
PartitionOutput outputPartition = outputLines.getPartitionOutput(partitionKey);
Location partitionDir = outputPartition.getLocation();
partitionDir.mkdirs();
Location outputLocation = partitionDir.append("file");
outputLocation.createNew();
try (OutputStream outputStream = outputLocation.getOutputStream()) {
outputStream.write(Bytes.toBytes(Joiner.on("\n").join(wordCounts.values())));
}
outputPartition.addPartition();
}
});
partitionConsumer.onFinish(partitions, true);
}
}
public static class WordCountMapReduce extends AbstractMapReduce {
public static final String NAME = "WordCountMapReduce";
private PartitionBatchInput.BatchPartitionCommitter batchPartitionCommitter;
@Override
public void configure() {
setOutputDataset("counts");
setMapperResources(new Resources(1024));
setReducerResources(new Resources(1024));
}
@Override
public void beforeSubmit(MapReduceContext context) throws Exception {
batchPartitionCommitter =
PartitionBatchInput.setInput(context, "lines", new KVTableStatePersistor("consumingState", "state.key"));
Map<String, String> outputArgs = new HashMap<>();
PartitionKey partitionKey = PartitionKey.builder().addLongField("time", context.getLogicalStartTime()).build();
PartitionedFileSetArguments.setOutputPartitionKey(outputArgs, partitionKey);
// We know that PartitionedFileSet is an OutputFormatProvider, so we set an instance of it as an output to the
// MapReduce job to test MapReduceContext#addOutput(String, OutputFormatProvider)
final PartitionedFileSet outputLines = context.getDataset("outputLines", outputArgs);
context.addOutput("outputLines", outputLines);
Job job = context.getHadoopJob();
job.setMapperClass(Tokenizer.class);
job.setReducerClass(Counter.class);
job.setNumReduceTasks(1);
}
@Override
public void onFinish(boolean succeeded, MapReduceContext context) throws Exception {
batchPartitionCommitter.onFinish(succeeded);
}
/**
* A mapper that tokenizes each input line and emits each token with a value of 1.
*/
public static class Tokenizer extends Mapper<LongWritable, Text, Text, IntWritable> {
private Text word = new Text();
private static final IntWritable ONE = new IntWritable(1);
@Override
public void map(LongWritable key, Text data, Context context)
throws IOException, InterruptedException {
for (String token : data.toString().split(" ")) {
word.set(token);
context.write(word, ONE);
}
}
}
/**
* A reducer that sums up the counts for each key.
*/
public static class Counter extends Reducer<Text, IntWritable, byte[], Long>
implements ProgramLifecycle<MapReduceTaskContext<byte[], Long>> {
private MapReduceTaskContext<byte[], Long> mapReduceTaskContext;
@Override
public void initialize(MapReduceTaskContext<byte[], Long> context) throws Exception {
this.mapReduceTaskContext = context;
}
@Override
public void destroy() {
}
@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
long sum = 0L;
for (IntWritable value : values) {
sum += value.get();
}
mapReduceTaskContext.write("counts", key.getBytes(), sum);
mapReduceTaskContext.write("outputLines", null, sum);
}
}
}
public static class DatasetService extends AbstractService {
@Override
protected void configure() {
setName("DatasetService");
addHandler(new DatasetServingHandler());
}
/**
* A handler that allows reading and writing with lines and counts Datasets.
*/
public static class DatasetServingHandler extends AbstractHttpServiceHandler {
@UseDataSet("lines")
private PartitionedFileSet lines;
@UseDataSet("counts")
private IncrementingKeyValueTable keyValueTable;
@PUT
@Path("lines")
public void write(HttpServiceRequest request, HttpServiceResponder responder,
@QueryParam("time") Long time) {
PartitionKey key = PartitionKey.builder().addLongField("time", time).build();
PartitionOutput partitionOutput = lines.getPartitionOutput(key);
Location location = partitionOutput.getLocation();
try {
try (WritableByteChannel channel = Channels.newChannel(location.getOutputStream())) {
channel.write(request.getContent());
}
partitionOutput.addPartition();
} catch (IOException e) {
responder.sendError(400, String.format("Unable to write path '%s'", location));
return;
}
responder.sendStatus(200);
}
@GET
@Path("counts")
public void get(HttpServiceRequest request, HttpServiceResponder responder,
@QueryParam("word") String word) {
Long count = keyValueTable.read(word);
if (count == null) {
// if the word is missing from the table, it has a word count of 0
count = 0L;
}
responder.sendJson(count);
}
}
}
}
| cdap-unit-test/src/test/java/co/cask/cdap/partitioned/AppWithPartitionConsumers.java | /*
* Copyright © 2015 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package co.cask.cdap.partitioned;
import co.cask.cdap.api.ProgramLifecycle;
import co.cask.cdap.api.Resources;
import co.cask.cdap.api.TxRunnable;
import co.cask.cdap.api.annotation.UseDataSet;
import co.cask.cdap.api.app.AbstractApplication;
import co.cask.cdap.api.common.Bytes;
import co.cask.cdap.api.data.DatasetContext;
import co.cask.cdap.api.data.batch.BatchWritable;
import co.cask.cdap.api.data.batch.DatasetOutputCommitter;
import co.cask.cdap.api.data.batch.OutputFormatProvider;
import co.cask.cdap.api.dataset.DatasetSpecification;
import co.cask.cdap.api.dataset.lib.AbstractDataset;
import co.cask.cdap.api.dataset.lib.KeyValueTable;
import co.cask.cdap.api.dataset.lib.PartitionDetail;
import co.cask.cdap.api.dataset.lib.PartitionKey;
import co.cask.cdap.api.dataset.lib.PartitionOutput;
import co.cask.cdap.api.dataset.lib.PartitionedFileSet;
import co.cask.cdap.api.dataset.lib.PartitionedFileSetArguments;
import co.cask.cdap.api.dataset.lib.PartitionedFileSetProperties;
import co.cask.cdap.api.dataset.lib.Partitioning;
import co.cask.cdap.api.dataset.lib.partitioned.KVTableStatePersistor;
import co.cask.cdap.api.dataset.lib.partitioned.PartitionBatchInput;
import co.cask.cdap.api.dataset.lib.partitioned.TransactionalPartitionConsumer;
import co.cask.cdap.api.dataset.module.EmbeddedDataset;
import co.cask.cdap.api.mapreduce.AbstractMapReduce;
import co.cask.cdap.api.mapreduce.MapReduceContext;
import co.cask.cdap.api.mapreduce.MapReduceTaskContext;
import co.cask.cdap.api.service.AbstractService;
import co.cask.cdap.api.service.http.AbstractHttpServiceHandler;
import co.cask.cdap.api.service.http.HttpServiceRequest;
import co.cask.cdap.api.service.http.HttpServiceResponder;
import co.cask.cdap.api.worker.AbstractWorker;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
import com.google.common.io.ByteStreams;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.twill.filesystem.Location;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.WritableByteChannel;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.QueryParam;
/**
* App used to test that MapReduce and Worker can incrementally consume partitions.
*/
public class AppWithPartitionConsumers extends AbstractApplication {
@Override
public void configure() {
setName("AppWithPartitionConsumers");
setDescription("Application with MapReduce job and Worker consuming partitions of a PartitionedFileSet Dataset");
createDataset("consumingState", KeyValueTable.class);
createDataset("counts", IncrementingKeyValueTable.class);
addMapReduce(new WordCountMapReduce());
addWorker(new WordCountWorker());
addService(new DatasetService());
// Create the "lines" partitioned file set, configure it to work with MapReduce
createDataset("lines", PartitionedFileSet.class, PartitionedFileSetProperties.builder()
// Properties for partitioning
.setPartitioning(Partitioning.builder().addLongField("time").build())
// Properties for file set
.setInputFormat(TextInputFormat.class)
.setOutputFormat(TextOutputFormat.class)
.setOutputProperty(TextOutputFormat.SEPERATOR, ",")
.build());
// Create the "outputLines" partitioned file set, configure it to work with MapReduce
createDataset("outputLines", PartitionedFileSet.class, PartitionedFileSetProperties.builder()
// Properties for partitioning
.setPartitioning(Partitioning.builder().addLongField("time").build())
// Properties for file set
.setInputFormat(TextInputFormat.class)
.setOutputFormat(TextOutputFormat.class)
.setOutputProperty(TextOutputFormat.SEPERATOR, ",")
// enable explore
.setEnableExploreOnCreate(true)
.setExploreFormat("text")
.setExploreFormatProperty("delimiter", "\n")
.setExploreSchema("record STRING")
.build());
}
// BatchWritable which increments the values of the underlying KeyValue table, upon each write from the batch job.
public static class IncrementingKeyValueTable extends AbstractDataset implements BatchWritable<byte[], Long> {
private final KeyValueTable keyValueTable;
public IncrementingKeyValueTable(DatasetSpecification spec,
@EmbeddedDataset("store") KeyValueTable keyValueTable) {
super(spec.getName(), keyValueTable);
this.keyValueTable = keyValueTable;
}
@Override
public void write(byte[] key, Long value) {
keyValueTable.increment(key, value);
}
@Nullable
public Long read(String key) {
byte[] read = keyValueTable.read(key);
return read == null ? null : Bytes.toLong(read);
}
}
public static class WordCountWorker extends AbstractWorker {
public static final String NAME = "WordCountWorker";
@Override
public void run() {
TransactionalPartitionConsumer partitionConsumer =
new TransactionalPartitionConsumer(getContext(), "lines",
new KVTableStatePersistor("consumingState", "state.key"));
final List<PartitionDetail> partitions = partitionConsumer.consumePartitions().getPartitions();
if (partitions.isEmpty()) {
return;
}
// process the partitions (same as WordCountMapReduce):
// - read the partitions' files
// - increment the words' counts in the 'counts' dataset accordingly
// - write the counts to the 'outputLines' partitioned fileset
getContext().execute(new TxRunnable() {
@Override
public void run(DatasetContext context) throws Exception {
Map<String, Long> wordCounts = new HashMap<>();
for (PartitionDetail partition : partitions) {
ByteBuffer content;
Location location = partition.getLocation();
content = ByteBuffer.wrap(ByteStreams.toByteArray(location.getInputStream()));
String string = Bytes.toString(Bytes.toBytes(content));
for (String token : string.split(" ")) {
Long count = Objects.firstNonNull(wordCounts.get(token), 0L);
wordCounts.put(token, count + 1);
}
}
IncrementingKeyValueTable counts = context.getDataset("counts");
for (Map.Entry<String, Long> entry : wordCounts.entrySet()) {
counts.write(Bytes.toBytes(entry.getKey()), entry.getValue());
}
PartitionedFileSet outputLines = context.getDataset("outputLines");
PartitionKey partitionKey = PartitionKey.builder().addLongField("time", System.currentTimeMillis()).build();
PartitionOutput outputPartition = outputLines.getPartitionOutput(partitionKey);
Location partitionDir = outputPartition.getLocation();
partitionDir.mkdirs();
Location outputLocation = partitionDir.append("file");
outputLocation.createNew();
try (OutputStream outputStream = outputLocation.getOutputStream()) {
outputStream.write(Bytes.toBytes(Joiner.on("\n").join(wordCounts.values())));
}
outputPartition.addPartition();
}
});
partitionConsumer.onFinish(partitions, true);
}
}
public static class WordCountMapReduce extends AbstractMapReduce {
public static final String NAME = "WordCountMapReduce";
private PartitionBatchInput.BatchPartitionCommitter batchPartitionCommitter;
@Override
public void configure() {
setOutputDataset("counts");
setMapperResources(new Resources(1024));
setReducerResources(new Resources(1024));
}
@Override
public void beforeSubmit(MapReduceContext context) throws Exception {
batchPartitionCommitter =
PartitionBatchInput.setInput(context, "lines", new KVTableStatePersistor("consumingState", "state.key"));
Map<String, String> outputArgs = new HashMap<>();
PartitionKey partitionKey = PartitionKey.builder().addLongField("time", context.getLogicalStartTime()).build();
PartitionedFileSetArguments.setOutputPartitionKey(outputArgs, partitionKey);
// We know that PartitionedFileSet is an OutputFormatProvider, so we set our own instance of an
// OutputFormatProvider (that delegates to the PFS's implementation) as an output to the MapReduce job
// to test MapReduceContext#addOutput(String, OutputFormatProvider)
final PartitionedFileSet outputLines = context.getDataset("outputLines", outputArgs);
context.addOutput("outputLines", new OutputFormatProvider() {
@Override
public String getOutputFormatClassName() {
return outputLines.getOutputFormatClassName();
}
@Override
public Map<String, String> getOutputFormatConfiguration() {
return outputLines.getOutputFormatConfiguration();
}
});
Job job = context.getHadoopJob();
job.setMapperClass(Tokenizer.class);
job.setReducerClass(Counter.class);
job.setNumReduceTasks(1);
}
@Override
public void onFinish(boolean succeeded, MapReduceContext context) throws Exception {
batchPartitionCommitter.onFinish(succeeded);
// We have to manually call the PFS's onSuccess, because since we only added it as an OutputFormatProvider,
// it doesn't get treated as a dataset (its onSuccess method won't be called by our MR framework)
Map<String, String> outputArgs = new HashMap<>();
PartitionKey partitionKey = PartitionKey.builder().addLongField("time", context.getLogicalStartTime()).build();
PartitionedFileSetArguments.setOutputPartitionKey(outputArgs, partitionKey);
PartitionedFileSet outputLines = context.getDataset("outputLines", outputArgs);
((DatasetOutputCommitter) outputLines).onSuccess();
super.onFinish(succeeded, context);
}
/**
* A mapper that tokenizes each input line and emits each token with a value of 1.
*/
public static class Tokenizer extends Mapper<LongWritable, Text, Text, IntWritable> {
private Text word = new Text();
private static final IntWritable ONE = new IntWritable(1);
@Override
public void map(LongWritable key, Text data, Context context)
throws IOException, InterruptedException {
for (String token : data.toString().split(" ")) {
word.set(token);
context.write(word, ONE);
}
}
}
/**
* A reducer that sums up the counts for each key.
*/
public static class Counter extends Reducer<Text, IntWritable, byte[], Long>
implements ProgramLifecycle<MapReduceTaskContext<byte[], Long>> {
private MapReduceTaskContext<byte[], Long> mapReduceTaskContext;
@Override
public void initialize(MapReduceTaskContext<byte[], Long> context) throws Exception {
this.mapReduceTaskContext = context;
}
@Override
public void destroy() {
}
@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
long sum = 0L;
for (IntWritable value : values) {
sum += value.get();
}
mapReduceTaskContext.write("counts", key.getBytes(), sum);
mapReduceTaskContext.write("outputLines", null, sum);
}
}
}
public static class DatasetService extends AbstractService {
@Override
protected void configure() {
setName("DatasetService");
addHandler(new DatasetServingHandler());
}
/**
* A handler that allows reading and writing with lines and counts Datasets.
*/
public static class DatasetServingHandler extends AbstractHttpServiceHandler {
@UseDataSet("lines")
private PartitionedFileSet lines;
@UseDataSet("counts")
private IncrementingKeyValueTable keyValueTable;
@PUT
@Path("lines")
public void write(HttpServiceRequest request, HttpServiceResponder responder,
@QueryParam("time") Long time) {
PartitionKey key = PartitionKey.builder().addLongField("time", time).build();
PartitionOutput partitionOutput = lines.getPartitionOutput(key);
Location location = partitionOutput.getLocation();
try {
try (WritableByteChannel channel = Channels.newChannel(location.getOutputStream())) {
channel.write(request.getContent());
}
partitionOutput.addPartition();
} catch (IOException e) {
responder.sendError(400, String.format("Unable to write path '%s'", location));
return;
}
responder.sendStatus(200);
}
@GET
@Path("counts")
public void get(HttpServiceRequest request, HttpServiceResponder responder,
@QueryParam("word") String word) {
Long count = keyValueTable.read(word);
if (count == null) {
// if the word is missing from the table, it has a word count of 0
count = 0L;
}
responder.sendJson(count);
}
}
}
}
| Update AppWithPartitionConsumers to test MapReduceContext#addOutput(String, OutputFormatProvider).
| cdap-unit-test/src/test/java/co/cask/cdap/partitioned/AppWithPartitionConsumers.java | Update AppWithPartitionConsumers to test MapReduceContext#addOutput(String, OutputFormatProvider). |
|
Java | apache-2.0 | 05615b07cbc46f6587cffdb61c1235e784a591c3 | 0 | WANdisco/gerrit,gerrit-review/gerrit,qtproject/qtqa-gerrit,gerrit-review/gerrit,GerritCodeReview/gerrit,gerrit-review/gerrit,WANdisco/gerrit,WANdisco/gerrit,qtproject/qtqa-gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,gerrit-review/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,gerrit-review/gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,GerritCodeReview/gerrit,WANdisco/gerrit,WANdisco/gerrit,gerrit-review/gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,gerrit-review/gerrit,WANdisco/gerrit,qtproject/qtqa-gerrit,GerritCodeReview/gerrit | // Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.git.receive;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.gerrit.common.FooterConstants.CHANGE_ID;
import static com.google.gerrit.reviewdb.client.RefNames.REFS_CHANGES;
import static com.google.gerrit.server.change.HashtagsUtil.cleanupHashtag;
import static com.google.gerrit.server.git.MultiProgressMonitor.UNKNOWN;
import static com.google.gerrit.server.git.receive.ReceiveConstants.COMMAND_REJECTION_MESSAGE_FOOTER;
import static com.google.gerrit.server.git.receive.ReceiveConstants.ONLY_OWNER_CAN_MODIFY_WIP;
import static com.google.gerrit.server.git.receive.ReceiveConstants.SAME_CHANGE_ID_IN_MULTIPLE_CHANGES;
import static com.google.gerrit.server.git.validators.CommitValidators.NEW_PATCHSET_PATTERN;
import static com.google.gerrit.server.mail.MailUtil.getRecipientsFromFooters;
import static java.util.Comparator.comparingInt;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static org.eclipse.jgit.lib.Constants.R_HEADS;
import static org.eclipse.jgit.transport.ReceiveCommand.Result.NOT_ATTEMPTED;
import static org.eclipse.jgit.transport.ReceiveCommand.Result.OK;
import static org.eclipse.jgit.transport.ReceiveCommand.Result.REJECTED_MISSING_OBJECT;
import static org.eclipse.jgit.transport.ReceiveCommand.Result.REJECTED_NONFASTFORWARD;
import static org.eclipse.jgit.transport.ReceiveCommand.Result.REJECTED_OTHER_REASON;
import com.google.common.base.Function;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Iterables;
import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.MultimapBuilder;
import com.google.common.collect.SetMultimap;
import com.google.common.collect.Sets;
import com.google.common.collect.SortedSetMultimap;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.TimeUtil;
import com.google.gerrit.common.data.LabelType;
import com.google.gerrit.common.data.LabelTypes;
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.common.data.PermissionRule;
import com.google.gerrit.extensions.api.changes.HashtagsInput;
import com.google.gerrit.extensions.api.changes.NotifyHandling;
import com.google.gerrit.extensions.api.changes.RecipientType;
import com.google.gerrit.extensions.api.changes.SubmitInput;
import com.google.gerrit.extensions.api.projects.ProjectConfigEntryType;
import com.google.gerrit.extensions.client.GeneralPreferencesInfo;
import com.google.gerrit.extensions.registration.DynamicMap;
import com.google.gerrit.extensions.registration.DynamicMap.Entry;
import com.google.gerrit.extensions.registration.DynamicSet;
import com.google.gerrit.extensions.restapi.AuthException;
import com.google.gerrit.extensions.restapi.BadRequestException;
import com.google.gerrit.extensions.restapi.ResourceConflictException;
import com.google.gerrit.extensions.restapi.RestApiException;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.client.PatchSetInfo;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.reviewdb.client.RevId;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.ApprovalsUtil;
import com.google.gerrit.server.ChangeUtil;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.PatchSetUtil;
import com.google.gerrit.server.Sequences;
import com.google.gerrit.server.account.AccountResolver;
import com.google.gerrit.server.account.AccountsUpdate;
import com.google.gerrit.server.change.ChangeInserter;
import com.google.gerrit.server.change.SetHashtagsOp;
import com.google.gerrit.server.config.AllProjectsName;
import com.google.gerrit.server.config.CanonicalWebUrl;
import com.google.gerrit.server.config.PluginConfig;
import com.google.gerrit.server.config.ProjectConfigEntry;
import com.google.gerrit.server.edit.ChangeEdit;
import com.google.gerrit.server.edit.ChangeEditUtil;
import com.google.gerrit.server.events.CommitReceivedEvent;
import com.google.gerrit.server.git.BanCommit;
import com.google.gerrit.server.git.GroupCollector;
import com.google.gerrit.server.git.MergeOp;
import com.google.gerrit.server.git.MergeOpRepoManager;
import com.google.gerrit.server.git.MergedByPushOp;
import com.google.gerrit.server.git.MultiProgressMonitor;
import com.google.gerrit.server.git.MultiProgressMonitor.Task;
import com.google.gerrit.server.git.ProjectConfig;
import com.google.gerrit.server.git.ReceivePackInitializer;
import com.google.gerrit.server.git.SubmoduleException;
import com.google.gerrit.server.git.SubmoduleOp;
import com.google.gerrit.server.git.TagCache;
import com.google.gerrit.server.git.ValidationError;
import com.google.gerrit.server.git.validators.CommitValidationException;
import com.google.gerrit.server.git.validators.CommitValidationMessage;
import com.google.gerrit.server.git.validators.CommitValidators;
import com.google.gerrit.server.git.validators.RefOperationValidationException;
import com.google.gerrit.server.git.validators.RefOperationValidators;
import com.google.gerrit.server.git.validators.ValidationMessage;
import com.google.gerrit.server.index.change.ChangeIndexer;
import com.google.gerrit.server.mail.MailUtil.MailRecipients;
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gerrit.server.notedb.NotesMigration;
import com.google.gerrit.server.notedb.ReviewerStateInternal;
import com.google.gerrit.server.patch.PatchSetInfoFactory;
import com.google.gerrit.server.permissions.ChangePermission;
import com.google.gerrit.server.permissions.GlobalPermission;
import com.google.gerrit.server.permissions.PermissionBackend;
import com.google.gerrit.server.permissions.PermissionBackendException;
import com.google.gerrit.server.permissions.RefPermission;
import com.google.gerrit.server.project.CreateRefControl;
import com.google.gerrit.server.project.NoSuchChangeException;
import com.google.gerrit.server.project.NoSuchProjectException;
import com.google.gerrit.server.project.ProjectCache;
import com.google.gerrit.server.project.ProjectControl;
import com.google.gerrit.server.project.ProjectState;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gerrit.server.query.change.InternalChangeQuery;
import com.google.gerrit.server.ssh.SshInfo;
import com.google.gerrit.server.update.BatchUpdate;
import com.google.gerrit.server.update.BatchUpdateOp;
import com.google.gerrit.server.update.ChangeContext;
import com.google.gerrit.server.update.Context;
import com.google.gerrit.server.update.RepoContext;
import com.google.gerrit.server.update.RepoOnlyOp;
import com.google.gerrit.server.update.UpdateException;
import com.google.gerrit.server.util.LabelVote;
import com.google.gerrit.server.util.MagicBranch;
import com.google.gerrit.server.util.RequestId;
import com.google.gerrit.server.util.RequestScopePropagator;
import com.google.gerrit.util.cli.CmdLineParser;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.assistedinject.Assisted;
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.regex.Matcher;
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.notes.NoteMap;
import org.eclipse.jgit.revwalk.FooterLine;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevSort;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.revwalk.filter.RevFilter;
import org.eclipse.jgit.transport.ReceiveCommand;
import org.eclipse.jgit.transport.ReceiveCommand.Result;
import org.eclipse.jgit.transport.ReceivePack;
import org.kohsuke.args4j.CmdLineException;
import org.kohsuke.args4j.Option;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Receives change upload using the Git receive-pack protocol. */
class ReceiveCommits {
private static final Logger log = LoggerFactory.getLogger(ReceiveCommits.class);
private static final String BYPASS_REVIEW = "bypass-review";
private enum ReceiveError {
CONFIG_UPDATE(
"You are not allowed to perform this operation.\n"
+ "Configuration changes can only be pushed by project owners\n"
+ "who also have 'Push' rights on "
+ RefNames.REFS_CONFIG),
UPDATE(
"You are not allowed to perform this operation.\n"
+ "To push into this reference you need 'Push' rights."),
DELETE("You need 'Push' rights with the 'Force Push'\nflag set to delete references."),
DELETE_CHANGES("Cannot delete from '" + REFS_CHANGES + "'"),
CODE_REVIEW(
"You need 'Push' rights to upload code review requests.\n"
+ "Verify that you are pushing to the right branch.");
private final String value;
ReceiveError(String value) {
this.value = value;
}
String get() {
return value;
}
}
interface Factory {
ReceiveCommits create(
ProjectControl projectControl,
ReceivePack receivePack,
AllRefsWatcher allRefsWatcher,
SetMultimap<ReviewerStateInternal, Account.Id> extraReviewers);
}
private class ReceivePackMessageSender implements MessageSender {
@Override
public void sendMessage(String what) {
rp.sendMessage(what);
}
@Override
public void sendError(String what) {
rp.sendError(what);
}
@Override
public void sendBytes(byte[] what) {
sendBytes(what, 0, what.length);
}
@Override
public void sendBytes(byte[] what, int off, int len) {
try {
rp.getMessageOutputStream().write(what, off, len);
} catch (IOException e) {
// Ignore write failures (matching JGit behavior).
}
}
@Override
public void flush() {
try {
rp.getMessageOutputStream().flush();
} catch (IOException e) {
// Ignore write failures (matching JGit behavior).
}
}
}
private static final Function<Exception, RestApiException> INSERT_EXCEPTION =
new Function<Exception, RestApiException>() {
@Override
public RestApiException apply(Exception input) {
if (input instanceof RestApiException) {
return (RestApiException) input;
} else if ((input instanceof ExecutionException)
&& (input.getCause() instanceof RestApiException)) {
return (RestApiException) input.getCause();
}
return new RestApiException("Error inserting change/patchset", input);
}
};
// ReceiveCommits has a lot of fields, sorry. Here and in the constructor they are split up
// somewhat, and kept sorted lexicographically within sections, except where later assignments
// depend on previous ones.
// Injected fields.
private final AccountResolver accountResolver;
private final AccountsUpdate.Server accountsUpdate;
private final AllProjectsName allProjectsName;
private final BatchUpdate.Factory batchUpdateFactory;
private final ChangeEditUtil editUtil;
private final ChangeIndexer indexer;
private final ChangeInserter.Factory changeInserterFactory;
private final ChangeNotes.Factory notesFactory;
private final CmdLineParser.Factory optionParserFactory;
private final CommitValidators.Factory commitValidatorsFactory;
private final DynamicMap<ProjectConfigEntry> pluginConfigEntries;
private final DynamicSet<ReceivePackInitializer> initializers;
private final IdentifiedUser user;
private final MergedByPushOp.Factory mergedByPushOpFactory;
private final NotesMigration notesMigration;
private final PatchSetInfoFactory patchSetInfoFactory;
private final PatchSetUtil psUtil;
private final PermissionBackend permissionBackend;
private final ProjectCache projectCache;
private final Provider<InternalChangeQuery> queryProvider;
private final Provider<MergeOp> mergeOpProvider;
private final Provider<MergeOpRepoManager> ormProvider;
private final ReceiveConfig receiveConfig;
private final RefOperationValidators.Factory refValidatorsFactory;
private final ReplaceOp.Factory replaceOpFactory;
private final RequestScopePropagator requestScopePropagator;
private final ReviewDb db;
private final Sequences seq;
private final SetHashtagsOp.Factory hashtagsFactory;
private final SshInfo sshInfo;
private final String canonicalWebUrl;
private final SubmoduleOp.Factory subOpFactory;
private final TagCache tagCache;
private final CreateRefControl createRefControl;
// Assisted injected fields.
private final AllRefsWatcher allRefsWatcher;
private final ImmutableSetMultimap<ReviewerStateInternal, Account.Id> extraReviewers;
private final ProjectControl projectControl;
private final ReceivePack rp;
// Immutable fields derived from constructor arguments.
private final LabelTypes labelTypes;
private final NoteMap rejectCommits;
private final PermissionBackend.ForProject permissions;
private final Project project;
private final Repository repo;
private final RequestId receiveId;
// Collections populated during processing.
private final List<UpdateGroupsRequest> updateGroups;
private final List<ValidationMessage> messages;
private final ListMultimap<ReceiveError, String> errors;
private final ListMultimap<String, String> pushOptions;
private final Map<Change.Id, ReplaceRequest> replaceByChange;
private final Set<ObjectId> validCommits;
/**
* Actual commands to be executed, as opposed to the mix of actual and magic commands that were
* provided over the wire.
*
* <p>Excludes commands executed implicitly as part of other {@link BatchUpdateOp}s, such as
* creating patch set refs.
*/
private final List<ReceiveCommand> actualCommands;
// Collections lazily populated during processing.
private List<CreateRequest> newChanges;
private ListMultimap<Change.Id, Ref> refsByChange;
private ListMultimap<ObjectId, Ref> refsById;
// Other settings populated during processing.
private MagicBranchInput magicBranch;
private boolean newChangeForAllNotInTarget;
// Handles for outputting back over the wire to the end user.
private Task newProgress;
private Task replaceProgress;
private Task closeProgress;
private Task commandProgress;
private MessageSender messageSender;
@Inject
ReceiveCommits(
@CanonicalWebUrl String canonicalWebUrl,
AccountResolver accountResolver,
AccountsUpdate.Server accountsUpdate,
AllProjectsName allProjectsName,
BatchUpdate.Factory batchUpdateFactory,
ChangeEditUtil editUtil,
ChangeIndexer indexer,
ChangeInserter.Factory changeInserterFactory,
ChangeNotes.Factory notesFactory,
CmdLineParser.Factory optionParserFactory,
CommitValidators.Factory commitValidatorsFactory,
DynamicMap<ProjectConfigEntry> pluginConfigEntries,
DynamicSet<ReceivePackInitializer> initializers,
MergedByPushOp.Factory mergedByPushOpFactory,
NotesMigration notesMigration,
PatchSetInfoFactory patchSetInfoFactory,
PatchSetUtil psUtil,
PermissionBackend permissionBackend,
ProjectCache projectCache,
Provider<InternalChangeQuery> queryProvider,
Provider<MergeOp> mergeOpProvider,
Provider<MergeOpRepoManager> ormProvider,
ReceiveConfig receiveConfig,
RefOperationValidators.Factory refValidatorsFactory,
ReplaceOp.Factory replaceOpFactory,
RequestScopePropagator requestScopePropagator,
ReviewDb db,
Sequences seq,
SetHashtagsOp.Factory hashtagsFactory,
SshInfo sshInfo,
SubmoduleOp.Factory subOpFactory,
TagCache tagCache,
CreateRefControl createRefControl,
@Assisted ProjectControl projectControl,
@Assisted ReceivePack rp,
@Assisted AllRefsWatcher allRefsWatcher,
@Assisted SetMultimap<ReviewerStateInternal, Account.Id> extraReviewers)
throws IOException {
// Injected fields.
this.accountResolver = accountResolver;
this.accountsUpdate = accountsUpdate;
this.allProjectsName = allProjectsName;
this.batchUpdateFactory = batchUpdateFactory;
this.canonicalWebUrl = canonicalWebUrl;
this.changeInserterFactory = changeInserterFactory;
this.commitValidatorsFactory = commitValidatorsFactory;
this.db = db;
this.editUtil = editUtil;
this.hashtagsFactory = hashtagsFactory;
this.indexer = indexer;
this.initializers = initializers;
this.mergeOpProvider = mergeOpProvider;
this.mergedByPushOpFactory = mergedByPushOpFactory;
this.notesFactory = notesFactory;
this.notesMigration = notesMigration;
this.optionParserFactory = optionParserFactory;
this.ormProvider = ormProvider;
this.patchSetInfoFactory = patchSetInfoFactory;
this.permissionBackend = permissionBackend;
this.pluginConfigEntries = pluginConfigEntries;
this.projectCache = projectCache;
this.psUtil = psUtil;
this.queryProvider = queryProvider;
this.receiveConfig = receiveConfig;
this.refValidatorsFactory = refValidatorsFactory;
this.replaceOpFactory = replaceOpFactory;
this.requestScopePropagator = requestScopePropagator;
this.seq = seq;
this.sshInfo = sshInfo;
this.subOpFactory = subOpFactory;
this.tagCache = tagCache;
this.createRefControl = createRefControl;
// Assisted injected fields.
this.allRefsWatcher = allRefsWatcher;
this.extraReviewers = ImmutableSetMultimap.copyOf(extraReviewers);
this.projectControl = projectControl;
this.rp = rp;
// Immutable fields derived from constructor arguments.
repo = rp.getRepository();
user = projectControl.getUser().asIdentifiedUser();
project = projectControl.getProject();
labelTypes = projectControl.getLabelTypes();
permissions = permissionBackend.user(user).project(project.getNameKey());
receiveId = RequestId.forProject(project.getNameKey());
rejectCommits = BanCommit.loadRejectCommitsMap(rp.getRepository(), rp.getRevWalk());
// Collections populated during processing.
actualCommands = new ArrayList<>();
errors = LinkedListMultimap.create();
messages = new ArrayList<>();
pushOptions = LinkedListMultimap.create();
replaceByChange = new LinkedHashMap<>();
updateGroups = new ArrayList<>();
validCommits = new HashSet<>();
// Collections lazily populated during processing.
newChanges = Collections.emptyList();
// Other settings populated during processing.
newChangeForAllNotInTarget =
projectControl.getProjectState().isCreateNewChangeForAllNotInTarget();
// Handles for outputting back over the wire to the end user.
messageSender = new ReceivePackMessageSender();
}
void init() {
for (ReceivePackInitializer i : initializers) {
i.init(projectControl.getProject().getNameKey(), rp);
}
}
/** Set a message sender for this operation. */
void setMessageSender(MessageSender ms) {
messageSender = ms != null ? ms : new ReceivePackMessageSender();
}
MessageSender getMessageSender() {
if (messageSender == null) {
setMessageSender(null);
}
return messageSender;
}
Project getProject() {
return project;
}
private void addMessage(String message) {
messages.add(new CommitValidationMessage(message, false));
}
void addError(String error) {
messages.add(new CommitValidationMessage(error, true));
}
void sendMessages() {
for (ValidationMessage m : messages) {
if (m.isError()) {
messageSender.sendError(m.getMessage());
} else {
messageSender.sendMessage(m.getMessage());
}
}
}
void processCommands(Collection<ReceiveCommand> commands, MultiProgressMonitor progress) {
newProgress = progress.beginSubTask("new", UNKNOWN);
replaceProgress = progress.beginSubTask("updated", UNKNOWN);
closeProgress = progress.beginSubTask("closed", UNKNOWN);
commandProgress = progress.beginSubTask("refs", UNKNOWN);
try {
parseCommands(commands);
} catch (PermissionBackendException | NoSuchProjectException | IOException err) {
for (ReceiveCommand cmd : actualCommands) {
if (cmd.getResult() == NOT_ATTEMPTED) {
cmd.setResult(REJECTED_OTHER_REASON, "internal server error");
}
}
logError(String.format("Failed to process refs in %s", project.getName()), err);
}
if (magicBranch != null && magicBranch.cmd.getResult() == NOT_ATTEMPTED) {
selectNewAndReplacedChangesFromMagicBranch();
}
preparePatchSetsForReplace();
insertChangesAndPatchSets();
newProgress.end();
replaceProgress.end();
if (!errors.isEmpty()) {
logDebug("Handling error conditions: {}", errors.keySet());
for (ReceiveError error : errors.keySet()) {
rp.sendMessage(buildError(error, errors.get(error)));
}
rp.sendMessage(String.format("User: %s", displayName(user)));
rp.sendMessage(COMMAND_REJECTION_MESSAGE_FOOTER);
}
Set<Branch.NameKey> branches = new HashSet<>();
for (ReceiveCommand c : actualCommands) {
// Most post-update steps should happen in UpdateOneRefOp#postUpdate. The only steps that
// should happen in this loop are things that can't happen within one BatchUpdate because they
// involve kicking off an additional BatchUpdate.
if (c.getResult() != OK) {
continue;
}
if (isHead(c) || isConfig(c)) {
switch (c.getType()) {
case CREATE:
case UPDATE:
case UPDATE_NONFASTFORWARD:
autoCloseChanges(c);
branches.add(new Branch.NameKey(project.getNameKey(), c.getRefName()));
break;
case DELETE:
break;
}
}
}
// Update superproject gitlinks if required.
if (!branches.isEmpty()) {
try (MergeOpRepoManager orm = ormProvider.get()) {
orm.setContext(db, TimeUtil.nowTs(), user, receiveId);
SubmoduleOp op = subOpFactory.create(branches, orm);
op.updateSuperProjects();
} catch (SubmoduleException e) {
logError("Can't update the superprojects", e);
}
}
closeProgress.end();
commandProgress.end();
progress.end();
reportMessages();
}
private void reportMessages() {
List<CreateRequest> created =
newChanges.stream().filter(r -> r.change != null).collect(toList());
if (!created.isEmpty()) {
addMessage("");
addMessage("New Changes:");
for (CreateRequest c : created) {
addMessage(
formatChangeUrl(
canonicalWebUrl,
c.change,
c.change.getSubject(),
c.change.getStatus() == Change.Status.DRAFT,
false));
}
addMessage("");
}
List<ReplaceRequest> updated =
replaceByChange
.values()
.stream()
.filter(r -> !r.skip && r.inputCommand.getResult() == OK)
.sorted(comparingInt(r -> r.notes.getChangeId().get()))
.collect(toList());
if (!updated.isEmpty()) {
addMessage("");
addMessage("Updated Changes:");
boolean edit = magicBranch != null && magicBranch.edit;
for (ReplaceRequest u : updated) {
String subject;
if (edit) {
try {
subject = rp.getRevWalk().parseCommit(u.newCommitId).getShortMessage();
} catch (IOException e) {
// Log and fall back to original change subject
logWarn("failed to get subject for edit patch set", e);
subject = u.notes.getChange().getSubject();
}
} else {
subject = u.info.getSubject();
}
addMessage(
formatChangeUrl(
canonicalWebUrl,
u.notes.getChange(),
subject,
u.replaceOp != null && u.replaceOp.getPatchSet().isDraft(),
edit));
}
addMessage("");
}
}
private static String formatChangeUrl(
String url, Change change, String subject, boolean draft, boolean edit) {
StringBuilder m =
new StringBuilder()
.append(" ")
.append(url)
.append(change.getChangeId())
.append(" ")
.append(ChangeUtil.cropSubject(subject));
if (draft) {
m.append(" [DRAFT]");
}
if (edit) {
m.append(" [EDIT]");
}
return m.toString();
}
private void insertChangesAndPatchSets() {
ReceiveCommand magicBranchCmd = magicBranch != null ? magicBranch.cmd : null;
if (magicBranchCmd != null && magicBranchCmd.getResult() != NOT_ATTEMPTED) {
logWarn(
String.format(
"Skipping change updates on %s because ref update failed: %s %s",
project.getName(),
magicBranchCmd.getResult(),
Strings.nullToEmpty(magicBranchCmd.getMessage())));
return;
}
try (BatchUpdate bu =
batchUpdateFactory.create(
db, project.getNameKey(), user.materializedCopy(), TimeUtil.nowTs());
ObjectInserter ins = repo.newObjectInserter();
ObjectReader reader = ins.newReader();
RevWalk rw = new RevWalk(reader)) {
bu.setRepository(repo, rw, ins).updateChangesInParallel();
bu.setRequestId(receiveId);
bu.setRefLogMessage("push");
logDebug("Adding {} replace requests", newChanges.size());
for (ReplaceRequest replace : replaceByChange.values()) {
replace.addOps(bu, replaceProgress);
}
logDebug("Adding {} create requests", newChanges.size());
for (CreateRequest create : newChanges) {
create.addOps(bu);
}
logDebug("Adding {} group update requests", newChanges.size());
updateGroups.forEach(r -> r.addOps(bu));
logDebug("Adding {} additional ref updates", actualCommands.size());
actualCommands.forEach(c -> bu.addRepoOnlyOp(new UpdateOneRefOp(c)));
logDebug("Executing batch");
try {
bu.execute();
} catch (UpdateException e) {
throw INSERT_EXCEPTION.apply(e);
}
if (magicBranchCmd != null) {
magicBranchCmd.setResult(OK);
}
for (ReplaceRequest replace : replaceByChange.values()) {
String rejectMessage = replace.getRejectMessage();
if (rejectMessage == null) {
if (replace.inputCommand.getResult() == NOT_ATTEMPTED) {
// Not necessarily the magic branch, so need to set OK on the original value.
replace.inputCommand.setResult(OK);
}
} else {
logDebug("Rejecting due to message from ReplaceOp");
reject(replace.inputCommand, rejectMessage);
}
}
} catch (ResourceConflictException e) {
addMessage(e.getMessage());
reject(magicBranchCmd, "conflict");
} catch (RestApiException | IOException err) {
logError("Can't insert change/patch set for " + project.getName(), err);
reject(magicBranchCmd, "internal server error: " + err.getMessage());
}
if (magicBranch != null && magicBranch.submit) {
try {
submit(newChanges, replaceByChange.values());
} catch (ResourceConflictException e) {
addMessage(e.getMessage());
reject(magicBranchCmd, "conflict");
} catch (RestApiException
| OrmException
| UpdateException
| IOException
| ConfigInvalidException
| PermissionBackendException e) {
logError("Error submitting changes to " + project.getName(), e);
reject(magicBranchCmd, "error during submit");
}
}
}
private String buildError(ReceiveError error, List<String> branches) {
StringBuilder sb = new StringBuilder();
if (branches.size() == 1) {
sb.append("Branch ").append(branches.get(0)).append(":\n");
sb.append(error.get());
return sb.toString();
}
sb.append("Branches");
String delim = " ";
for (String branch : branches) {
sb.append(delim).append(branch);
delim = ", ";
}
return sb.append(":\n").append(error.get()).toString();
}
private static String displayName(IdentifiedUser user) {
String displayName = user.getUserName();
if (displayName == null) {
displayName = user.getAccount().getPreferredEmail();
}
return displayName;
}
private void parseCommands(Collection<ReceiveCommand> commands)
throws PermissionBackendException, NoSuchProjectException, IOException {
List<String> optionList = rp.getPushOptions();
if (optionList != null) {
for (String option : optionList) {
int e = option.indexOf('=');
if (e > 0) {
pushOptions.put(option.substring(0, e), option.substring(e + 1));
} else {
pushOptions.put(option, "");
}
}
}
logDebug("Parsing {} commands", commands.size());
for (ReceiveCommand cmd : commands) {
if (cmd.getResult() != NOT_ATTEMPTED) {
// Already rejected by the core receive process.
logDebug("Already processed by core: {} {}", cmd.getResult(), cmd);
continue;
}
if (!Repository.isValidRefName(cmd.getRefName()) || cmd.getRefName().contains("//")) {
reject(cmd, "not valid ref");
continue;
}
if (MagicBranch.isMagicBranch(cmd.getRefName())) {
parseMagicBranch(cmd);
continue;
}
if (projectControl.getProjectState().isAllUsers()
&& RefNames.REFS_USERS_SELF.equals(cmd.getRefName())) {
String newName = RefNames.refsUsers(user.getAccountId());
logDebug("Swapping out command for {} to {}", RefNames.REFS_USERS_SELF, newName);
final ReceiveCommand orgCmd = cmd;
cmd =
new ReceiveCommand(cmd.getOldId(), cmd.getNewId(), newName, cmd.getType()) {
@Override
public void setResult(Result s, String m) {
super.setResult(s, m);
orgCmd.setResult(s, m);
}
};
}
Matcher m = NEW_PATCHSET_PATTERN.matcher(cmd.getRefName());
if (m.matches()) {
// The referenced change must exist and must still be open.
//
Change.Id changeId = Change.Id.parse(m.group(1));
parseReplaceCommand(cmd, changeId);
continue;
}
switch (cmd.getType()) {
case CREATE:
parseCreate(cmd);
break;
case UPDATE:
parseUpdate(cmd);
break;
case DELETE:
parseDelete(cmd);
break;
case UPDATE_NONFASTFORWARD:
parseRewind(cmd);
break;
default:
reject(cmd, "prohibited by Gerrit: unknown command type " + cmd.getType());
continue;
}
if (cmd.getResult() != NOT_ATTEMPTED) {
continue;
}
if (isConfig(cmd)) {
logDebug("Processing {} command", cmd.getRefName());
if (!projectControl.isOwner()) {
reject(cmd, "not project owner");
continue;
}
switch (cmd.getType()) {
case CREATE:
case UPDATE:
case UPDATE_NONFASTFORWARD:
try {
ProjectConfig cfg = new ProjectConfig(project.getNameKey());
cfg.load(rp.getRevWalk(), cmd.getNewId());
if (!cfg.getValidationErrors().isEmpty()) {
addError("Invalid project configuration:");
for (ValidationError err : cfg.getValidationErrors()) {
addError(" " + err.getMessage());
}
reject(cmd, "invalid project configuration");
logError(
"User "
+ user.getUserName()
+ " tried to push invalid project configuration "
+ cmd.getNewId().name()
+ " for "
+ project.getName());
continue;
}
Project.NameKey newParent = cfg.getProject().getParent(allProjectsName);
Project.NameKey oldParent = project.getParent(allProjectsName);
if (oldParent == null) {
// update of the 'All-Projects' project
if (newParent != null) {
reject(cmd, "invalid project configuration: root project cannot have parent");
continue;
}
} else {
if (!oldParent.equals(newParent)) {
try {
permissionBackend.user(user).check(GlobalPermission.ADMINISTRATE_SERVER);
} catch (AuthException e) {
reject(cmd, "invalid project configuration: only Gerrit admin can set parent");
continue;
}
}
if (projectCache.get(newParent) == null) {
reject(cmd, "invalid project configuration: parent does not exist");
continue;
}
}
for (Entry<ProjectConfigEntry> e : pluginConfigEntries) {
PluginConfig pluginCfg = cfg.getPluginConfig(e.getPluginName());
ProjectConfigEntry configEntry = e.getProvider().get();
String value = pluginCfg.getString(e.getExportName());
String oldValue =
projectControl
.getProjectState()
.getConfig()
.getPluginConfig(e.getPluginName())
.getString(e.getExportName());
if (configEntry.getType() == ProjectConfigEntryType.ARRAY) {
oldValue =
Arrays.stream(
projectControl
.getProjectState()
.getConfig()
.getPluginConfig(e.getPluginName())
.getStringList(e.getExportName()))
.collect(joining("\n"));
}
if ((value == null ? oldValue != null : !value.equals(oldValue))
&& !configEntry.isEditable(projectControl.getProjectState())) {
reject(
cmd,
String.format(
"invalid project configuration: Not allowed to set parameter"
+ " '%s' of plugin '%s' on project '%s'.",
e.getExportName(), e.getPluginName(), project.getName()));
continue;
}
if (ProjectConfigEntryType.LIST.equals(configEntry.getType())
&& value != null
&& !configEntry.getPermittedValues().contains(value)) {
reject(
cmd,
String.format(
"invalid project configuration: The value '%s' is "
+ "not permitted for parameter '%s' of plugin '%s'.",
value, e.getExportName(), e.getPluginName()));
}
}
} catch (Exception e) {
reject(cmd, "invalid project configuration");
logError(
"User "
+ user.getUserName()
+ " tried to push invalid project configuration "
+ cmd.getNewId().name()
+ " for "
+ project.getName(),
e);
continue;
}
break;
case DELETE:
break;
default:
reject(
cmd,
"prohibited by Gerrit: don't know how to handle config update of type "
+ cmd.getType());
continue;
}
}
}
}
private void parseCreate(ReceiveCommand cmd)
throws PermissionBackendException, NoSuchProjectException, IOException {
RevObject obj;
try {
obj = rp.getRevWalk().parseAny(cmd.getNewId());
} catch (IOException err) {
logError(
"Invalid object " + cmd.getNewId().name() + " for " + cmd.getRefName() + " creation",
err);
reject(cmd, "invalid object");
return;
}
logDebug("Creating {}", cmd);
if (isHead(cmd) && !isCommit(cmd)) {
return;
}
Branch.NameKey branch = new Branch.NameKey(project.getName(), cmd.getRefName());
String rejectReason = createRefControl.canCreateRef(rp.getRepository(), obj, user, branch);
if (rejectReason != null) {
reject(cmd, "prohibited by Gerrit: " + rejectReason);
return;
}
if (!validRefOperation(cmd)) {
// validRefOperation sets messages, so no need to provide more feedback.
return;
}
validateNewCommits(new Branch.NameKey(project.getNameKey(), cmd.getRefName()), cmd);
actualCommands.add(cmd);
}
private void parseUpdate(ReceiveCommand cmd) throws PermissionBackendException {
logDebug("Updating {}", cmd);
boolean ok;
try {
permissions.ref(cmd.getRefName()).check(RefPermission.UPDATE);
ok = true;
} catch (AuthException err) {
ok = false;
}
if (ok) {
if (isHead(cmd) && !isCommit(cmd)) {
return;
}
if (!validRefOperation(cmd)) {
return;
}
validateNewCommits(new Branch.NameKey(project.getNameKey(), cmd.getRefName()), cmd);
actualCommands.add(cmd);
} else {
if (RefNames.REFS_CONFIG.equals(cmd.getRefName())) {
errors.put(ReceiveError.CONFIG_UPDATE, RefNames.REFS_CONFIG);
} else {
errors.put(ReceiveError.UPDATE, cmd.getRefName());
}
reject(cmd, "prohibited by Gerrit: ref update access denied");
}
}
private boolean isCommit(ReceiveCommand cmd) {
RevObject obj;
try {
obj = rp.getRevWalk().parseAny(cmd.getNewId());
} catch (IOException err) {
logError("Invalid object " + cmd.getNewId().name() + " for " + cmd.getRefName(), err);
reject(cmd, "invalid object");
return false;
}
if (obj instanceof RevCommit) {
return true;
}
reject(cmd, "not a commit");
return false;
}
private void parseDelete(ReceiveCommand cmd) throws PermissionBackendException {
logDebug("Deleting {}", cmd);
if (cmd.getRefName().startsWith(REFS_CHANGES)) {
errors.put(ReceiveError.DELETE_CHANGES, cmd.getRefName());
reject(cmd, "cannot delete changes");
} else if (canDelete(cmd)) {
if (!validRefOperation(cmd)) {
return;
}
actualCommands.add(cmd);
} else if (RefNames.REFS_CONFIG.equals(cmd.getRefName())) {
reject(cmd, "cannot delete project configuration");
} else {
errors.put(ReceiveError.DELETE, cmd.getRefName());
reject(cmd, "cannot delete references");
}
}
private boolean canDelete(ReceiveCommand cmd) throws PermissionBackendException {
try {
permissions.ref(cmd.getRefName()).check(RefPermission.DELETE);
return true;
} catch (AuthException e) {
return false;
}
}
private void parseRewind(ReceiveCommand cmd) throws PermissionBackendException {
RevCommit newObject;
try {
newObject = rp.getRevWalk().parseCommit(cmd.getNewId());
} catch (IncorrectObjectTypeException notCommit) {
newObject = null;
} catch (IOException err) {
logError(
"Invalid object " + cmd.getNewId().name() + " for " + cmd.getRefName() + " forced update",
err);
reject(cmd, "invalid object");
return;
}
logDebug("Rewinding {}", cmd);
if (newObject != null) {
validateNewCommits(new Branch.NameKey(project.getNameKey(), cmd.getRefName()), cmd);
if (cmd.getResult() != NOT_ATTEMPTED) {
return;
}
}
boolean ok;
try {
permissions.ref(cmd.getRefName()).check(RefPermission.FORCE_UPDATE);
ok = true;
} catch (AuthException err) {
ok = false;
}
if (ok) {
if (!validRefOperation(cmd)) {
return;
}
actualCommands.add(cmd);
} else {
cmd.setResult(
REJECTED_NONFASTFORWARD, " need '" + PermissionRule.FORCE_PUSH + "' privilege.");
}
}
static class MagicBranchInput {
private static final Splitter COMMAS = Splitter.on(',').omitEmptyStrings();
final ReceiveCommand cmd;
final LabelTypes labelTypes;
final NotesMigration notesMigration;
private final boolean defaultPublishComments;
Branch.NameKey dest;
PermissionBackend.ForRef perm;
Set<Account.Id> reviewer = Sets.newLinkedHashSet();
Set<Account.Id> cc = Sets.newLinkedHashSet();
Map<String, Short> labels = new HashMap<>();
String message;
List<RevCommit> baseCommit;
CmdLineParser clp;
Set<String> hashtags = new HashSet<>();
@Option(name = "--base", metaVar = "BASE", usage = "merge base of changes")
List<ObjectId> base;
@Option(name = "--topic", metaVar = "NAME", usage = "attach topic to changes")
String topic;
@Option(name = "--draft", usage = "mark new/updated changes as draft")
boolean draft;
@Option(name = "--private", usage = "mark new/updated change as private")
boolean isPrivate;
@Option(name = "--remove-private", usage = "remove privacy flag from updated change")
boolean removePrivate;
@Option(
name = "--wip",
aliases = {"-work-in-progress"},
usage = "mark change as work in progress"
)
boolean workInProgress;
@Option(name = "--ready", usage = "mark change as ready")
boolean ready;
@Option(
name = "--edit",
aliases = {"-e"},
usage = "upload as change edit"
)
boolean edit;
@Option(name = "--submit", usage = "immediately submit the change")
boolean submit;
@Option(name = "--merged", usage = "create single change for a merged commit")
boolean merged;
@Option(name = "--publish-comments", usage = "publish all draft comments on updated changes")
private boolean publishComments;
@Option(
name = "--no-publish-comments",
aliases = {"--np"},
usage = "do not publish draft comments"
)
private boolean noPublishComments;
@Option(
name = "--notify",
usage =
"Notify handling that defines to whom email notifications "
+ "should be sent. Allowed values are NONE, OWNER, "
+ "OWNER_REVIEWERS, ALL. If not set, the default is ALL."
)
private NotifyHandling notify;
@Option(name = "--notify-to", metaVar = "USER", usage = "user that should be notified")
List<Account.Id> tos = new ArrayList<>();
@Option(name = "--notify-cc", metaVar = "USER", usage = "user that should be CC'd")
List<Account.Id> ccs = new ArrayList<>();
@Option(name = "--notify-bcc", metaVar = "USER", usage = "user that should be BCC'd")
List<Account.Id> bccs = new ArrayList<>();
@Option(
name = "--reviewer",
aliases = {"-r"},
metaVar = "EMAIL",
usage = "add reviewer to changes"
)
void reviewer(Account.Id id) {
reviewer.add(id);
}
@Option(name = "--cc", metaVar = "EMAIL", usage = "notify user by CC")
void cc(Account.Id id) {
cc.add(id);
}
@Option(name = "--publish", usage = "publish new/updated changes")
void publish(boolean publish) {
draft = !publish;
}
@Option(
name = "--label",
aliases = {"-l"},
metaVar = "LABEL+VALUE",
usage = "label(s) to assign (defaults to +1 if no value provided"
)
void addLabel(String token) throws CmdLineException {
LabelVote v = LabelVote.parse(token);
try {
LabelType.checkName(v.label());
ApprovalsUtil.checkLabel(labelTypes, v.label(), v.value());
} catch (BadRequestException e) {
throw clp.reject(e.getMessage());
}
labels.put(v.label(), v.value());
}
@Option(
name = "--message",
aliases = {"-m"},
metaVar = "MESSAGE",
usage = "Comment message to apply to the review"
)
void addMessage(String token) {
// git push does not allow spaces in refs.
message = token.replace("_", " ");
}
@Option(
name = "--hashtag",
aliases = {"-t"},
metaVar = "HASHTAG",
usage = "add hashtag to changes"
)
void addHashtag(String token) throws CmdLineException {
if (!notesMigration.readChanges()) {
throw clp.reject("cannot add hashtags; noteDb is disabled");
}
String hashtag = cleanupHashtag(token);
if (!hashtag.isEmpty()) {
hashtags.add(hashtag);
}
// TODO(dpursehouse): validate hashtags
}
MagicBranchInput(
IdentifiedUser user,
ReceiveCommand cmd,
LabelTypes labelTypes,
NotesMigration notesMigration) {
this.cmd = cmd;
this.draft = cmd.getRefName().startsWith(MagicBranch.NEW_DRAFT_CHANGE);
this.labelTypes = labelTypes;
this.notesMigration = notesMigration;
GeneralPreferencesInfo prefs = user.getAccount().getGeneralPreferencesInfo();
this.defaultPublishComments =
prefs != null
? firstNonNull(
user.getAccount().getGeneralPreferencesInfo().publishCommentsOnPush, false)
: false;
}
MailRecipients getMailRecipients() {
return new MailRecipients(reviewer, cc);
}
ListMultimap<RecipientType, Account.Id> getAccountsToNotify() {
ListMultimap<RecipientType, Account.Id> accountsToNotify =
MultimapBuilder.hashKeys().arrayListValues().build();
accountsToNotify.putAll(RecipientType.TO, tos);
accountsToNotify.putAll(RecipientType.CC, ccs);
accountsToNotify.putAll(RecipientType.BCC, bccs);
return accountsToNotify;
}
boolean shouldPublishComments() {
if (publishComments) {
return true;
} else if (noPublishComments) {
return false;
}
return defaultPublishComments;
}
String parse(
CmdLineParser clp,
Repository repo,
Set<String> refs,
ListMultimap<String, String> pushOptions)
throws CmdLineException {
String ref = RefNames.fullName(MagicBranch.getDestBranchName(cmd.getRefName()));
ListMultimap<String, String> options = LinkedListMultimap.create(pushOptions);
int optionStart = ref.indexOf('%');
if (0 < optionStart) {
for (String s : COMMAS.split(ref.substring(optionStart + 1))) {
int e = s.indexOf('=');
if (0 < e) {
options.put(s.substring(0, e), s.substring(e + 1));
} else {
options.put(s, "");
}
}
ref = ref.substring(0, optionStart);
}
if (!options.isEmpty()) {
clp.parseOptionMap(options);
}
// Split the destination branch by branch and topic. The topic
// suffix is entirely optional, so it might not even exist.
String head = readHEAD(repo);
int split = ref.length();
for (; ; ) {
String name = ref.substring(0, split);
if (refs.contains(name) || name.equals(head)) {
break;
}
split = name.lastIndexOf('/', split - 1);
if (split <= Constants.R_REFS.length()) {
return ref;
}
}
if (split < ref.length()) {
topic = Strings.emptyToNull(ref.substring(split + 1));
}
return ref.substring(0, split);
}
NotifyHandling getNotify() {
if (notify != null) {
return notify;
}
if (workInProgress) {
return NotifyHandling.OWNER;
}
return NotifyHandling.ALL;
}
NotifyHandling getNotify(ChangeNotes notes) {
if (notify != null) {
return notify;
}
if (workInProgress || (!ready && notes.getChange().isWorkInProgress())) {
return NotifyHandling.OWNER;
}
return NotifyHandling.ALL;
}
}
/**
* Gets an unmodifiable view of the pushOptions.
*
* <p>The collection is empty if the client does not support push options, or if the client did
* not send any options.
*
* @return an unmodifiable view of pushOptions.
*/
@Nullable
ListMultimap<String, String> getPushOptions() {
return ImmutableListMultimap.copyOf(pushOptions);
}
private void parseMagicBranch(ReceiveCommand cmd) throws PermissionBackendException {
// Permit exactly one new change request per push.
if (magicBranch != null) {
reject(cmd, "duplicate request");
return;
}
logDebug("Found magic branch {}", cmd.getRefName());
magicBranch = new MagicBranchInput(user, cmd, labelTypes, notesMigration);
magicBranch.reviewer.addAll(extraReviewers.get(ReviewerStateInternal.REVIEWER));
magicBranch.cc.addAll(extraReviewers.get(ReviewerStateInternal.CC));
String ref;
CmdLineParser clp = optionParserFactory.create(magicBranch);
magicBranch.clp = clp;
try {
ref = magicBranch.parse(clp, repo, rp.getAdvertisedRefs().keySet(), pushOptions);
} catch (CmdLineException e) {
if (!clp.wasHelpRequestedByOption()) {
logDebug("Invalid branch syntax");
reject(cmd, e.getMessage());
return;
}
ref = null; // never happen
}
if (clp.wasHelpRequestedByOption()) {
StringWriter w = new StringWriter();
w.write("\nHelp for refs/for/branch:\n\n");
clp.printUsage(w, null);
addMessage(w.toString());
reject(cmd, "see help");
return;
}
if (projectControl.getProjectState().isAllUsers() && RefNames.REFS_USERS_SELF.equals(ref)) {
logDebug("Handling {}", RefNames.REFS_USERS_SELF);
ref = RefNames.refsUsers(user.getAccountId());
}
if (!rp.getAdvertisedRefs().containsKey(ref)
&& !ref.equals(readHEAD(repo))
&& !ref.equals(RefNames.REFS_CONFIG)) {
logDebug("Ref {} not found", ref);
if (ref.startsWith(Constants.R_HEADS)) {
String n = ref.substring(Constants.R_HEADS.length());
reject(cmd, "branch " + n + " not found");
} else {
reject(cmd, ref + " not found");
}
return;
}
magicBranch.dest = new Branch.NameKey(project.getNameKey(), ref);
magicBranch.perm = permissions.ref(ref);
if (!projectControl.getProject().getState().permitsWrite()) {
reject(cmd, "project state does not permit write");
return;
}
if (magicBranch.draft) {
if (!receiveConfig.allowDrafts) {
errors.put(ReceiveError.CODE_REVIEW, ref);
reject(cmd, "draft workflow is disabled");
return;
} else if (projectControl
.controlForRef(MagicBranch.NEW_DRAFT_CHANGE + ref)
.isBlocked(Permission.PUSH)) {
errors.put(ReceiveError.CODE_REVIEW, ref);
reject(cmd, "cannot upload drafts");
return;
}
}
try {
magicBranch.perm.check(RefPermission.CREATE_CHANGE);
} catch (AuthException denied) {
errors.put(ReceiveError.CODE_REVIEW, ref);
reject(cmd, denied.getMessage());
return;
}
if (magicBranch.isPrivate && magicBranch.removePrivate) {
reject(cmd, "the options 'private' and 'remove-private' are mutually exclusive");
return;
}
if (magicBranch.workInProgress && magicBranch.ready) {
reject(cmd, "the options 'wip' and 'ready' are mutually exclusive");
return;
}
if (magicBranch.publishComments && magicBranch.noPublishComments) {
reject(
cmd, "the options 'publish-comments' and 'no-publish-comments' are mutually exclusive");
return;
}
if (magicBranch.draft && magicBranch.submit) {
reject(cmd, "cannot submit draft");
return;
}
if (magicBranch.submit) {
try {
permissions.ref(ref).check(RefPermission.UPDATE_BY_SUBMIT);
} catch (AuthException e) {
reject(cmd, e.getMessage());
return;
}
}
RevWalk walk = rp.getRevWalk();
RevCommit tip;
try {
tip = walk.parseCommit(magicBranch.cmd.getNewId());
logDebug("Tip of push: {}", tip.name());
} catch (IOException ex) {
magicBranch.cmd.setResult(REJECTED_MISSING_OBJECT);
logError("Invalid pack upload; one or more objects weren't sent", ex);
return;
}
String destBranch = magicBranch.dest.get();
try {
if (magicBranch.merged) {
if (magicBranch.draft) {
reject(cmd, "cannot be draft & merged");
return;
}
if (magicBranch.base != null) {
reject(cmd, "cannot use merged with base");
return;
}
RevCommit branchTip = readBranchTip(cmd, magicBranch.dest);
if (branchTip == null) {
return; // readBranchTip already rejected cmd.
}
if (!walk.isMergedInto(tip, branchTip)) {
reject(cmd, "not merged into branch");
return;
}
}
// If tip is a merge commit, or the root commit or
// if %base or %merged was specified, ignore newChangeForAllNotInTarget.
if (tip.getParentCount() > 1
|| magicBranch.base != null
|| magicBranch.merged
|| tip.getParentCount() == 0) {
logDebug("Forcing newChangeForAllNotInTarget = false");
newChangeForAllNotInTarget = false;
}
if (magicBranch.base != null) {
logDebug("Handling %base: {}", magicBranch.base);
magicBranch.baseCommit = Lists.newArrayListWithCapacity(magicBranch.base.size());
for (ObjectId id : magicBranch.base) {
try {
magicBranch.baseCommit.add(walk.parseCommit(id));
} catch (IncorrectObjectTypeException notCommit) {
reject(cmd, "base must be a commit");
return;
} catch (MissingObjectException e) {
reject(cmd, "base not found");
return;
} catch (IOException e) {
logWarn(String.format("Project %s cannot read %s", project.getName(), id.name()), e);
reject(cmd, "internal server error");
return;
}
}
} else if (newChangeForAllNotInTarget) {
RevCommit branchTip = readBranchTip(cmd, magicBranch.dest);
if (branchTip == null) {
return; // readBranchTip already rejected cmd.
}
magicBranch.baseCommit = Collections.singletonList(branchTip);
logDebug("Set baseCommit = {}", magicBranch.baseCommit.get(0).name());
}
} catch (IOException ex) {
logWarn(
String.format("Error walking to %s in project %s", destBranch, project.getName()), ex);
reject(cmd, "internal server error");
return;
}
// Validate that the new commits are connected with the target
// branch. If they aren't, we want to abort. We do this check by
// looking to see if we can compute a merge base between the new
// commits and the target branch head.
//
try {
Ref targetRef = rp.getAdvertisedRefs().get(magicBranch.dest.get());
if (targetRef == null || targetRef.getObjectId() == null) {
// The destination branch does not yet exist. Assume the
// history being sent for review will start it and thus
// is "connected" to the branch.
logDebug("Branch is unborn");
return;
}
RevCommit h = walk.parseCommit(targetRef.getObjectId());
logDebug("Current branch tip: {}", h.name());
RevFilter oldRevFilter = walk.getRevFilter();
try {
walk.reset();
walk.setRevFilter(RevFilter.MERGE_BASE);
walk.markStart(tip);
walk.markStart(h);
if (walk.next() == null) {
reject(magicBranch.cmd, "no common ancestry");
}
} finally {
walk.reset();
walk.setRevFilter(oldRevFilter);
}
} catch (IOException e) {
magicBranch.cmd.setResult(REJECTED_MISSING_OBJECT);
logError("Invalid pack upload; one or more objects weren't sent", e);
}
}
private static String readHEAD(Repository repo) {
try {
return repo.getFullBranch();
} catch (IOException e) {
log.error("Cannot read HEAD symref", e);
return null;
}
}
private RevCommit readBranchTip(ReceiveCommand cmd, Branch.NameKey branch) throws IOException {
Ref r = allRefs().get(branch.get());
if (r == null) {
reject(cmd, branch.get() + " not found");
return null;
}
return rp.getRevWalk().parseCommit(r.getObjectId());
}
private void parseReplaceCommand(ReceiveCommand cmd, Change.Id changeId) {
logDebug("Parsing replace command");
if (cmd.getType() != ReceiveCommand.Type.CREATE) {
reject(cmd, "invalid usage");
return;
}
RevCommit newCommit;
try {
newCommit = rp.getRevWalk().parseCommit(cmd.getNewId());
logDebug("Replacing with {}", newCommit);
} catch (IOException e) {
logError("Cannot parse " + cmd.getNewId().name() + " as commit", e);
reject(cmd, "invalid commit");
return;
}
Change changeEnt;
try {
changeEnt = notesFactory.createChecked(db, project.getNameKey(), changeId).getChange();
} catch (NoSuchChangeException e) {
logError("Change not found " + changeId, e);
reject(cmd, "change " + changeId + " not found");
return;
} catch (OrmException e) {
logError("Cannot lookup existing change " + changeId, e);
reject(cmd, "database error");
return;
}
if (!project.getNameKey().equals(changeEnt.getProject())) {
reject(cmd, "change " + changeId + " does not belong to project " + project.getName());
return;
}
logDebug("Replacing change {}", changeEnt.getId());
requestReplace(cmd, true, changeEnt, newCommit);
}
private boolean requestReplace(
ReceiveCommand cmd, boolean checkMergedInto, Change change, RevCommit newCommit) {
if (change.getStatus().isClosed()) {
reject(cmd, "change " + canonicalWebUrl + change.getId() + " closed");
return false;
}
ReplaceRequest req = new ReplaceRequest(change.getId(), newCommit, cmd, checkMergedInto);
if (replaceByChange.containsKey(req.ontoChange)) {
reject(cmd, "duplicate request");
return false;
}
replaceByChange.put(req.ontoChange, req);
return true;
}
private void selectNewAndReplacedChangesFromMagicBranch() {
logDebug("Finding new and replaced changes");
newChanges = new ArrayList<>();
ListMultimap<ObjectId, Ref> existing = changeRefsById();
GroupCollector groupCollector =
GroupCollector.create(changeRefsById(), db, psUtil, notesFactory, project.getNameKey());
try {
RevCommit start = setUpWalkForSelectingChanges();
if (start == null) {
return;
}
LinkedHashMap<RevCommit, ChangeLookup> pending = new LinkedHashMap<>();
Set<Change.Key> newChangeIds = new HashSet<>();
int maxBatchChanges = receiveConfig.getEffectiveMaxBatchChangesLimit(user);
int total = 0;
int alreadyTracked = 0;
boolean rejectImplicitMerges =
start.getParentCount() == 1
&& projectCache.get(project.getNameKey()).isRejectImplicitMerges()
// Don't worry about implicit merges when creating changes for
// already-merged commits; they're already in history, so it's too
// late.
&& !magicBranch.merged;
Set<RevCommit> mergedParents;
if (rejectImplicitMerges) {
mergedParents = new HashSet<>();
} else {
mergedParents = null;
}
for (; ; ) {
RevCommit c = rp.getRevWalk().next();
if (c == null) {
break;
}
total++;
rp.getRevWalk().parseBody(c);
String name = c.name();
groupCollector.visit(c);
Collection<Ref> existingRefs = existing.get(c);
if (rejectImplicitMerges) {
Collections.addAll(mergedParents, c.getParents());
mergedParents.remove(c);
}
boolean commitAlreadyTracked = !existingRefs.isEmpty();
if (commitAlreadyTracked) {
alreadyTracked++;
// Corner cases where an existing commit might need a new group:
// A) Existing commit has a null group; wasn't assigned during schema
// upgrade, or schema upgrade is performed on a running server.
// B) Let A<-B<-C, then:
// 1. Push A to refs/heads/master
// 2. Push B to refs/for/master
// 3. Force push A~ to refs/heads/master
// 4. Push C to refs/for/master.
// B will be in existing so we aren't replacing the patch set. It
// used to have its own group, but now needs to to be changed to
// A's group.
// C) Commit is a PatchSet of a pre-existing change uploaded with a
// different target branch.
for (Ref ref : existingRefs) {
updateGroups.add(new UpdateGroupsRequest(ref, c));
}
if (!(newChangeForAllNotInTarget || magicBranch.base != null)) {
continue;
}
}
List<String> idList = c.getFooterLines(CHANGE_ID);
String idStr = !idList.isEmpty() ? idList.get(idList.size() - 1).trim() : null;
if (idStr != null) {
pending.put(c, new ChangeLookup(c, new Change.Key(idStr)));
} else {
pending.put(c, new ChangeLookup(c));
}
int n = pending.size() + newChanges.size();
if (maxBatchChanges != 0 && n > maxBatchChanges) {
logDebug("{} changes exceeds limit of {}", n, maxBatchChanges);
reject(
magicBranch.cmd,
"the number of pushed changes in a batch exceeds the max limit " + maxBatchChanges);
newChanges = Collections.emptyList();
return;
}
if (commitAlreadyTracked) {
boolean changeExistsOnDestBranch = false;
for (ChangeData cd : pending.get(c).destChanges) {
if (cd.change().getDest().equals(magicBranch.dest)) {
changeExistsOnDestBranch = true;
break;
}
}
if (changeExistsOnDestBranch) {
continue;
}
logDebug("Creating new change for {} even though it is already tracked", name);
}
if (!validCommit(rp.getRevWalk(), magicBranch.perm, magicBranch.dest, magicBranch.cmd, c)) {
// Not a change the user can propose? Abort as early as possible.
newChanges = Collections.emptyList();
logDebug("Aborting early due to invalid commit");
return;
}
// Don't allow merges to be uploaded in commit chain via all-not-in-target
if (newChangeForAllNotInTarget && c.getParentCount() > 1) {
reject(
magicBranch.cmd,
"Pushing merges in commit chains with 'all not in target' is not allowed,\n"
+ "to override please set the base manually");
logDebug("Rejecting merge commit {} with newChangeForAllNotInTarget", name);
// TODO(dborowitz): Should we early return here?
}
if (idList.isEmpty()) {
newChanges.add(new CreateRequest(c, magicBranch.dest.get()));
continue;
}
}
logDebug(
"Finished initial RevWalk with {} commits total: {} already"
+ " tracked, {} new changes with no Change-Id, and {} deferred"
+ " lookups",
total,
alreadyTracked,
newChanges.size(),
pending.size());
if (rejectImplicitMerges) {
rejectImplicitMerges(mergedParents);
}
for (Iterator<ChangeLookup> itr = pending.values().iterator(); itr.hasNext(); ) {
ChangeLookup p = itr.next();
if (p.changeKey == null) {
continue;
}
if (newChangeIds.contains(p.changeKey)) {
logDebug("Multiple commits with Change-Id {}", p.changeKey);
reject(magicBranch.cmd, SAME_CHANGE_ID_IN_MULTIPLE_CHANGES);
newChanges = Collections.emptyList();
return;
}
List<ChangeData> changes = p.destChanges;
if (changes.size() > 1) {
logDebug(
"Multiple changes in branch {} with Change-Id {}: {}",
magicBranch.dest,
p.changeKey,
changes.stream().map(cd -> cd.getId().toString()).collect(joining()));
// WTF, multiple changes in this branch have the same key?
// Since the commit is new, the user should recreate it with
// a different Change-Id. In practice, we should never see
// this error message as Change-Id should be unique per branch.
//
reject(magicBranch.cmd, p.changeKey.get() + " has duplicates");
newChanges = Collections.emptyList();
return;
}
if (changes.size() == 1) {
// Schedule as a replacement to this one matching change.
//
RevId currentPs = changes.get(0).currentPatchSet().getRevision();
// If Commit is already current PatchSet of target Change.
if (p.commit.name().equals(currentPs.get())) {
if (pending.size() == 1) {
// There are no commits left to check, all commits in pending were already
// current PatchSet of the corresponding target changes.
reject(magicBranch.cmd, "commit(s) already exists (as current patchset)");
} else {
// Commit is already current PatchSet.
// Remove from pending and try next commit.
itr.remove();
continue;
}
}
if (requestReplace(magicBranch.cmd, false, changes.get(0).change(), p.commit)) {
continue;
}
newChanges = Collections.emptyList();
return;
}
if (changes.size() == 0) {
if (!isValidChangeId(p.changeKey.get())) {
reject(magicBranch.cmd, "invalid Change-Id");
newChanges = Collections.emptyList();
return;
}
// In case the change look up from the index failed,
// double check against the existing refs
if (foundInExistingRef(existing.get(p.commit))) {
if (pending.size() == 1) {
reject(magicBranch.cmd, "commit(s) already exists (as current patchset)");
newChanges = Collections.emptyList();
return;
}
itr.remove();
continue;
}
newChangeIds.add(p.changeKey);
}
newChanges.add(new CreateRequest(p.commit, magicBranch.dest.get()));
}
logDebug(
"Finished deferred lookups with {} updates and {} new changes",
replaceByChange.size(),
newChanges.size());
} catch (IOException e) {
// Should never happen, the core receive process would have
// identified the missing object earlier before we got control.
//
magicBranch.cmd.setResult(REJECTED_MISSING_OBJECT);
logError("Invalid pack upload; one or more objects weren't sent", e);
newChanges = Collections.emptyList();
return;
} catch (OrmException e) {
logError("Cannot query database to locate prior changes", e);
reject(magicBranch.cmd, "database error");
newChanges = Collections.emptyList();
return;
}
if (newChanges.isEmpty() && replaceByChange.isEmpty()) {
reject(magicBranch.cmd, "no new changes");
return;
}
if (!newChanges.isEmpty() && magicBranch.edit) {
reject(magicBranch.cmd, "edit is not supported for new changes");
return;
}
try {
SortedSetMultimap<ObjectId, String> groups = groupCollector.getGroups();
List<Integer> newIds = seq.nextChangeIds(newChanges.size());
for (int i = 0; i < newChanges.size(); i++) {
CreateRequest create = newChanges.get(i);
create.setChangeId(newIds.get(i));
create.groups = ImmutableList.copyOf(groups.get(create.commit));
}
for (ReplaceRequest replace : replaceByChange.values()) {
replace.groups = ImmutableList.copyOf(groups.get(replace.newCommitId));
}
for (UpdateGroupsRequest update : updateGroups) {
update.groups = ImmutableList.copyOf((groups.get(update.commit)));
}
logDebug("Finished updating groups from GroupCollector");
} catch (OrmException e) {
logError("Error collecting groups for changes", e);
reject(magicBranch.cmd, "internal server error");
return;
}
}
private boolean foundInExistingRef(Collection<Ref> existingRefs) throws OrmException {
for (Ref ref : existingRefs) {
ChangeNotes notes =
notesFactory.create(db, project.getNameKey(), Change.Id.fromRef(ref.getName()));
Change change = notes.getChange();
if (change.getDest().equals(magicBranch.dest)) {
logDebug("Found change {} from existing refs.", change.getKey());
// Reindex the change asynchronously, ignoring errors.
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError = indexer.indexAsync(project.getNameKey(), change.getId());
return true;
}
}
return false;
}
private RevCommit setUpWalkForSelectingChanges() throws IOException {
RevWalk rw = rp.getRevWalk();
RevCommit start = rw.parseCommit(magicBranch.cmd.getNewId());
rw.reset();
rw.sort(RevSort.TOPO);
rw.sort(RevSort.REVERSE, true);
rp.getRevWalk().markStart(start);
if (magicBranch.baseCommit != null) {
markExplicitBasesUninteresting();
} else if (magicBranch.merged) {
logDebug("Marking parents of merged commit {} uninteresting", start.name());
for (RevCommit c : start.getParents()) {
rw.markUninteresting(c);
}
} else {
markHeadsAsUninteresting(rw, magicBranch.dest != null ? magicBranch.dest.get() : null);
}
return start;
}
private void markExplicitBasesUninteresting() throws IOException {
logDebug("Marking {} base commits uninteresting", magicBranch.baseCommit.size());
for (RevCommit c : magicBranch.baseCommit) {
rp.getRevWalk().markUninteresting(c);
}
Ref targetRef = allRefs().get(magicBranch.dest.get());
if (targetRef != null) {
logDebug(
"Marking target ref {} ({}) uninteresting",
magicBranch.dest.get(),
targetRef.getObjectId().name());
rp.getRevWalk().markUninteresting(rp.getRevWalk().parseCommit(targetRef.getObjectId()));
}
}
private void rejectImplicitMerges(Set<RevCommit> mergedParents) throws IOException {
if (!mergedParents.isEmpty()) {
Ref targetRef = allRefs().get(magicBranch.dest.get());
if (targetRef != null) {
RevWalk rw = rp.getRevWalk();
RevCommit tip = rw.parseCommit(targetRef.getObjectId());
boolean containsImplicitMerges = true;
for (RevCommit p : mergedParents) {
containsImplicitMerges &= !rw.isMergedInto(p, tip);
}
if (containsImplicitMerges) {
rw.reset();
for (RevCommit p : mergedParents) {
rw.markStart(p);
}
rw.markUninteresting(tip);
RevCommit c;
while ((c = rw.next()) != null) {
rw.parseBody(c);
messages.add(
new CommitValidationMessage(
"ERROR: Implicit Merge of "
+ c.abbreviate(7).name()
+ " "
+ c.getShortMessage(),
false));
}
reject(magicBranch.cmd, "implicit merges detected");
}
}
}
}
private void markHeadsAsUninteresting(RevWalk rw, @Nullable String forRef) {
int i = 0;
for (Ref ref : allRefs().values()) {
if ((ref.getName().startsWith(R_HEADS) || ref.getName().equals(forRef))
&& ref.getObjectId() != null) {
try {
rw.markUninteresting(rw.parseCommit(ref.getObjectId()));
i++;
} catch (IOException e) {
logWarn(String.format("Invalid ref %s in %s", ref.getName(), project.getName()), e);
}
}
}
logDebug("Marked {} heads as uninteresting", i);
}
private static boolean isValidChangeId(String idStr) {
return idStr.matches("^I[0-9a-fA-F]{40}$") && !idStr.matches("^I00*$");
}
private class ChangeLookup {
final RevCommit commit;
final Change.Key changeKey;
final List<ChangeData> destChanges;
ChangeLookup(RevCommit c, Change.Key key) throws OrmException {
commit = c;
changeKey = key;
destChanges = queryProvider.get().byBranchKey(magicBranch.dest, key);
}
ChangeLookup(RevCommit c) throws OrmException {
commit = c;
destChanges = queryProvider.get().byBranchCommit(magicBranch.dest, c.getName());
changeKey = null;
}
}
private class CreateRequest {
final RevCommit commit;
private final String refName;
Change.Id changeId;
ReceiveCommand cmd;
ChangeInserter ins;
List<String> groups = ImmutableList.of();
Change change;
CreateRequest(RevCommit commit, String refName) {
this.commit = commit;
this.refName = refName;
}
private void setChangeId(int id) {
boolean privateByDefault = projectCache.get(project.getNameKey()).isPrivateByDefault();
changeId = new Change.Id(id);
ins =
changeInserterFactory
.create(changeId, commit, refName)
.setTopic(magicBranch.topic)
.setPrivate(magicBranch.isPrivate || (privateByDefault && !magicBranch.removePrivate))
.setWorkInProgress(magicBranch.workInProgress)
// Changes already validated in validateNewCommits.
.setValidate(false);
if (magicBranch.draft) {
ins.setDraft(magicBranch.draft);
} else if (magicBranch.merged) {
ins.setStatus(Change.Status.MERGED);
}
cmd = new ReceiveCommand(ObjectId.zeroId(), commit, ins.getPatchSetId().toRefName());
if (rp.getPushCertificate() != null) {
ins.setPushCertificate(rp.getPushCertificate().toTextWithSignature());
}
}
private void addOps(BatchUpdate bu) throws RestApiException {
checkState(changeId != null, "must call setChangeId before addOps");
try {
RevWalk rw = rp.getRevWalk();
rw.parseBody(commit);
final PatchSet.Id psId = ins.setGroups(groups).getPatchSetId();
Account.Id me = user.getAccountId();
List<FooterLine> footerLines = commit.getFooterLines();
MailRecipients recipients = new MailRecipients();
Map<String, Short> approvals = new HashMap<>();
checkNotNull(magicBranch);
recipients.add(magicBranch.getMailRecipients());
approvals = magicBranch.labels;
recipients.add(
getRecipientsFromFooters(db, accountResolver, magicBranch.draft, footerLines));
recipients.remove(me);
StringBuilder msg =
new StringBuilder(
ApprovalsUtil.renderMessageWithApprovals(
psId.get(), approvals, Collections.<String, PatchSetApproval>emptyMap()));
msg.append('.');
if (!Strings.isNullOrEmpty(magicBranch.message)) {
msg.append("\n").append(magicBranch.message);
}
bu.insertChange(
ins.setReviewers(recipients.getReviewers())
.setExtraCC(recipients.getCcOnly())
.setApprovals(approvals)
.setMessage(msg.toString())
.setNotify(magicBranch.getNotify())
.setAccountsToNotify(magicBranch.getAccountsToNotify())
.setRequestScopePropagator(requestScopePropagator)
.setSendMail(true)
.setPatchSetDescription(magicBranch.message));
if (!magicBranch.hashtags.isEmpty()) {
// Any change owner is allowed to add hashtags when creating a change.
bu.addOp(
changeId,
hashtagsFactory.create(new HashtagsInput(magicBranch.hashtags)).setFireEvent(false));
}
if (!Strings.isNullOrEmpty(magicBranch.topic)) {
bu.addOp(
changeId,
new BatchUpdateOp() {
@Override
public boolean updateChange(ChangeContext ctx) {
ctx.getUpdate(psId).setTopic(magicBranch.topic);
return true;
}
});
}
bu.addOp(
changeId,
new BatchUpdateOp() {
@Override
public boolean updateChange(ChangeContext ctx) {
change = ctx.getChange();
return false;
}
});
bu.addOp(changeId, new ChangeProgressOp(newProgress));
} catch (Exception e) {
throw INSERT_EXCEPTION.apply(e);
}
}
}
private void submit(Collection<CreateRequest> create, Collection<ReplaceRequest> replace)
throws OrmException, RestApiException, UpdateException, IOException, ConfigInvalidException,
PermissionBackendException {
Map<ObjectId, Change> bySha = Maps.newHashMapWithExpectedSize(create.size() + replace.size());
for (CreateRequest r : create) {
checkNotNull(r.change, "cannot submit new change %s; op may not have run", r.changeId);
bySha.put(r.commit, r.change);
}
for (ReplaceRequest r : replace) {
bySha.put(r.newCommitId, r.notes.getChange());
}
Change tipChange = bySha.get(magicBranch.cmd.getNewId());
checkNotNull(
tipChange, "tip of push does not correspond to a change; found these changes: %s", bySha);
logDebug(
"Processing submit with tip change {} ({})", tipChange.getId(), magicBranch.cmd.getNewId());
try (MergeOp op = mergeOpProvider.get()) {
op.merge(db, tipChange, user, false, new SubmitInput(), false);
}
}
private void preparePatchSetsForReplace() {
try {
readChangesForReplace();
for (Iterator<ReplaceRequest> itr = replaceByChange.values().iterator(); itr.hasNext(); ) {
ReplaceRequest req = itr.next();
if (req.inputCommand.getResult() == NOT_ATTEMPTED) {
req.validate(false);
if (req.skip && req.cmd == null) {
itr.remove();
}
}
}
} catch (OrmException err) {
logError(
String.format(
"Cannot read database before replacement for project %s", project.getName()),
err);
for (ReplaceRequest req : replaceByChange.values()) {
if (req.inputCommand.getResult() == NOT_ATTEMPTED) {
req.inputCommand.setResult(REJECTED_OTHER_REASON, "internal server error");
}
}
} catch (IOException | PermissionBackendException err) {
logError(
String.format(
"Cannot read repository before replacement for project %s", project.getName()),
err);
for (ReplaceRequest req : replaceByChange.values()) {
if (req.inputCommand.getResult() == NOT_ATTEMPTED) {
req.inputCommand.setResult(REJECTED_OTHER_REASON, "internal server error");
}
}
}
logDebug("Read {} changes to replace", replaceByChange.size());
if (magicBranch != null && magicBranch.cmd.getResult() != NOT_ATTEMPTED) {
// Cancel creations tied to refs/for/ or refs/drafts/ command.
for (ReplaceRequest req : replaceByChange.values()) {
if (req.inputCommand == magicBranch.cmd && req.cmd != null) {
req.cmd.setResult(Result.REJECTED_OTHER_REASON, "aborted");
}
}
for (CreateRequest req : newChanges) {
req.cmd.setResult(Result.REJECTED_OTHER_REASON, "aborted");
}
}
}
private void readChangesForReplace() throws OrmException {
Collection<ChangeNotes> allNotes =
notesFactory.create(
db, replaceByChange.values().stream().map(r -> r.ontoChange).collect(toList()));
for (ChangeNotes notes : allNotes) {
replaceByChange.get(notes.getChangeId()).notes = notes;
}
}
private class ReplaceRequest {
final Change.Id ontoChange;
final ObjectId newCommitId;
final ReceiveCommand inputCommand;
final boolean checkMergedInto;
ChangeNotes notes;
BiMap<RevCommit, PatchSet.Id> revisions;
PatchSet.Id psId;
ReceiveCommand prev;
ReceiveCommand cmd;
PatchSetInfo info;
boolean skip;
private PatchSet.Id priorPatchSet;
List<String> groups = ImmutableList.of();
private ReplaceOp replaceOp;
ReplaceRequest(
Change.Id toChange, RevCommit newCommit, ReceiveCommand cmd, boolean checkMergedInto) {
this.ontoChange = toChange;
this.newCommitId = newCommit.copy();
this.inputCommand = checkNotNull(cmd);
this.checkMergedInto = checkMergedInto;
revisions = HashBiMap.create();
for (Ref ref : refs(toChange)) {
try {
revisions.forcePut(
rp.getRevWalk().parseCommit(ref.getObjectId()), PatchSet.Id.fromRef(ref.getName()));
} catch (IOException err) {
logWarn(
String.format(
"Project %s contains invalid change ref %s", project.getName(), ref.getName()),
err);
}
}
}
/**
* Validate the new patch set commit for this change.
*
* <p><strong>Side effects:</strong>
*
* <ul>
* <li>May add error or warning messages to the progress monitor
* <li>Will reject {@code cmd} prior to returning false
* <li>May reset {@code rp.getRevWalk()}; do not call in the middle of a walk.
* </ul>
*
* @param autoClose whether the caller intends to auto-close the change after adding a new patch
* set.
* @return whether the new commit is valid
* @throws IOException
* @throws OrmException
* @throws PermissionBackendException
*/
boolean validate(boolean autoClose)
throws IOException, OrmException, PermissionBackendException {
if (!autoClose && inputCommand.getResult() != NOT_ATTEMPTED) {
return false;
} else if (notes == null) {
reject(inputCommand, "change " + ontoChange + " not found");
return false;
}
Change change = notes.getChange();
priorPatchSet = change.currentPatchSetId();
if (!revisions.containsValue(priorPatchSet)) {
reject(inputCommand, "change " + ontoChange + " missing revisions");
return false;
}
RevCommit newCommit = rp.getRevWalk().parseCommit(newCommitId);
RevCommit priorCommit = revisions.inverse().get(priorPatchSet);
try {
permissions.change(notes).database(db).check(ChangePermission.ADD_PATCH_SET);
} catch (AuthException no) {
reject(inputCommand, "cannot add patch set to " + ontoChange + ".");
return false;
}
if (change.getStatus().isClosed()) {
reject(inputCommand, "change " + ontoChange + " closed");
return false;
} else if (revisions.containsKey(newCommit)) {
reject(inputCommand, "commit already exists (in the change)");
return false;
}
for (Ref r : rp.getRepository().getRefDatabase().getRefs("refs/changes").values()) {
if (r.getObjectId().equals(newCommit)) {
reject(inputCommand, "commit already exists (in the project)");
return false;
}
}
for (RevCommit prior : revisions.keySet()) {
// Don't allow a change to directly depend upon itself. This is a
// very common error due to users making a new commit rather than
// amending when trying to address review comments.
if (rp.getRevWalk().isMergedInto(prior, newCommit)) {
reject(inputCommand, SAME_CHANGE_ID_IN_MULTIPLE_CHANGES);
return false;
}
}
PermissionBackend.ForRef perm = permissions.ref(change.getDest().get());
if (!validCommit(rp.getRevWalk(), perm, change.getDest(), inputCommand, newCommit)) {
return false;
}
rp.getRevWalk().parseBody(priorCommit);
// Don't allow the same tree if the commit message is unmodified
// or no parents were updated (rebase), else warn that only part
// of the commit was modified.
if (newCommit.getTree().equals(priorCommit.getTree())) {
boolean messageEq = eq(newCommit.getFullMessage(), priorCommit.getFullMessage());
boolean parentsEq = parentsEqual(newCommit, priorCommit);
boolean authorEq = authorEqual(newCommit, priorCommit);
ObjectReader reader = rp.getRevWalk().getObjectReader();
if (messageEq && parentsEq && authorEq && !autoClose) {
addMessage(
String.format(
"(W) No changes between prior commit %s and new commit %s",
reader.abbreviate(priorCommit).name(), reader.abbreviate(newCommit).name()));
} else {
StringBuilder msg = new StringBuilder();
msg.append("(I) ");
msg.append(reader.abbreviate(newCommit).name());
msg.append(":");
msg.append(" no files changed");
if (!authorEq) {
msg.append(", author changed");
}
if (!messageEq) {
msg.append(", message updated");
}
if (!parentsEq) {
msg.append(", was rebased");
}
addMessage(msg.toString());
}
}
if (magicBranch != null
&& (magicBranch.workInProgress || magicBranch.ready)
&& magicBranch.workInProgress != change.isWorkInProgress()
&& !user.getAccountId().equals(change.getOwner())) {
reject(inputCommand, ONLY_OWNER_CAN_MODIFY_WIP);
return false;
}
if (magicBranch != null && magicBranch.edit) {
return newEdit();
}
newPatchSet();
return true;
}
private boolean newEdit() {
psId = notes.getChange().currentPatchSetId();
Optional<ChangeEdit> edit = null;
try {
edit = editUtil.byChange(notes, user);
} catch (AuthException | IOException e) {
logError("Cannot retrieve edit", e);
return false;
}
if (edit.isPresent()) {
if (edit.get().getBasePatchSet().getId().equals(psId)) {
// replace edit
cmd =
new ReceiveCommand(edit.get().getEditCommit(), newCommitId, edit.get().getRefName());
} else {
// delete old edit ref on rebase
prev =
new ReceiveCommand(
edit.get().getEditCommit(), ObjectId.zeroId(), edit.get().getRefName());
createEditCommand();
}
} else {
createEditCommand();
}
return true;
}
private void createEditCommand() {
// create new edit
cmd =
new ReceiveCommand(
ObjectId.zeroId(),
newCommitId,
RefNames.refsEdit(user.getAccountId(), notes.getChangeId(), psId));
}
private void newPatchSet() throws IOException, OrmException {
RevCommit newCommit = rp.getRevWalk().parseCommit(newCommitId);
psId =
ChangeUtil.nextPatchSetIdFromAllRefsMap(allRefs(), notes.getChange().currentPatchSetId());
info = patchSetInfoFactory.get(rp.getRevWalk(), newCommit, psId);
cmd = new ReceiveCommand(ObjectId.zeroId(), newCommitId, psId.toRefName());
}
void addOps(BatchUpdate bu, @Nullable Task progress) throws IOException {
if (magicBranch != null && magicBranch.edit) {
bu.addOp(notes.getChangeId(), new ReindexOnlyOp());
if (prev != null) {
bu.addRepoOnlyOp(new UpdateOneRefOp(prev));
}
bu.addRepoOnlyOp(new UpdateOneRefOp(cmd));
return;
}
RevWalk rw = rp.getRevWalk();
// TODO(dborowitz): Move to ReplaceOp#updateRepo.
RevCommit newCommit = rw.parseCommit(newCommitId);
rw.parseBody(newCommit);
RevCommit priorCommit = revisions.inverse().get(priorPatchSet);
replaceOp =
replaceOpFactory
.create(
projectControl,
notes.getChange().getDest(),
checkMergedInto,
priorPatchSet,
priorCommit,
psId,
newCommit,
info,
groups,
magicBranch,
rp.getPushCertificate())
.setRequestScopePropagator(requestScopePropagator);
bu.addOp(notes.getChangeId(), replaceOp);
if (progress != null) {
bu.addOp(notes.getChangeId(), new ChangeProgressOp(progress));
}
}
String getRejectMessage() {
return replaceOp != null ? replaceOp.getRejectMessage() : null;
}
}
private class UpdateGroupsRequest {
private final PatchSet.Id psId;
private final RevCommit commit;
List<String> groups = ImmutableList.of();
UpdateGroupsRequest(Ref ref, RevCommit commit) {
this.psId = checkNotNull(PatchSet.Id.fromRef(ref.getName()));
this.commit = commit;
}
private void addOps(BatchUpdate bu) {
bu.addOp(
psId.getParentKey(),
new BatchUpdateOp() {
@Override
public boolean updateChange(ChangeContext ctx) throws OrmException {
PatchSet ps = psUtil.get(ctx.getDb(), ctx.getNotes(), psId);
List<String> oldGroups = ps.getGroups();
if (oldGroups == null) {
if (groups == null) {
return false;
}
} else if (sameGroups(oldGroups, groups)) {
return false;
}
psUtil.setGroups(ctx.getDb(), ctx.getUpdate(psId), ps, groups);
return true;
}
});
}
private boolean sameGroups(List<String> a, List<String> b) {
return Sets.newHashSet(a).equals(Sets.newHashSet(b));
}
}
private class UpdateOneRefOp implements RepoOnlyOp {
private final ReceiveCommand cmd;
private UpdateOneRefOp(ReceiveCommand cmd) {
this.cmd = checkNotNull(cmd);
}
@Override
public void updateRepo(RepoContext ctx) throws IOException {
ctx.addRefUpdate(cmd);
}
@Override
public void postUpdate(Context ctx) {
String refName = cmd.getRefName();
if (cmd.getType() == ReceiveCommand.Type.UPDATE) { // aka fast-forward
logDebug("Updating tag cache on fast-forward of {}", cmd.getRefName());
tagCache.updateFastForward(project.getNameKey(), refName, cmd.getOldId(), cmd.getNewId());
}
if (isConfig(cmd)) {
logDebug("Reloading project in cache");
projectCache.evict(project);
ProjectState ps = projectCache.get(project.getNameKey());
try {
logDebug("Updating project description");
repo.setGitwebDescription(ps.getProject().getDescription());
} catch (IOException e) {
log.warn("cannot update description of " + project.getName(), e);
}
}
}
}
private static class ReindexOnlyOp implements BatchUpdateOp {
@Override
public boolean updateChange(ChangeContext ctx) {
// Trigger reindexing even though change isn't actually updated.
return true;
}
}
private List<Ref> refs(Change.Id changeId) {
return refsByChange().get(changeId);
}
private void initChangeRefMaps() {
if (refsByChange == null) {
int estRefsPerChange = 4;
refsById = MultimapBuilder.hashKeys().arrayListValues().build();
refsByChange =
MultimapBuilder.hashKeys(allRefs().size() / estRefsPerChange)
.arrayListValues(estRefsPerChange)
.build();
for (Ref ref : allRefs().values()) {
ObjectId obj = ref.getObjectId();
if (obj != null) {
PatchSet.Id psId = PatchSet.Id.fromRef(ref.getName());
if (psId != null) {
refsById.put(obj, ref);
refsByChange.put(psId.getParentKey(), ref);
}
}
}
}
}
private ListMultimap<Change.Id, Ref> refsByChange() {
initChangeRefMaps();
return refsByChange;
}
private ListMultimap<ObjectId, Ref> changeRefsById() {
initChangeRefMaps();
return refsById;
}
static boolean parentsEqual(RevCommit a, RevCommit b) {
if (a.getParentCount() != b.getParentCount()) {
return false;
}
for (int i = 0; i < a.getParentCount(); i++) {
if (!a.getParent(i).equals(b.getParent(i))) {
return false;
}
}
return true;
}
static boolean authorEqual(RevCommit a, RevCommit b) {
PersonIdent aAuthor = a.getAuthorIdent();
PersonIdent bAuthor = b.getAuthorIdent();
if (aAuthor == null && bAuthor == null) {
return true;
} else if (aAuthor == null || bAuthor == null) {
return false;
}
return eq(aAuthor.getName(), bAuthor.getName())
&& eq(aAuthor.getEmailAddress(), bAuthor.getEmailAddress());
}
static boolean eq(String a, String b) {
if (a == null && b == null) {
return true;
} else if (a == null || b == null) {
return false;
} else {
return a.equals(b);
}
}
private boolean validRefOperation(ReceiveCommand cmd) {
RefOperationValidators refValidators = refValidatorsFactory.create(getProject(), user, cmd);
try {
messages.addAll(refValidators.validateForRefOperation());
} catch (RefOperationValidationException e) {
messages.addAll(Lists.newArrayList(e.getMessages()));
reject(cmd, e.getMessage());
return false;
}
return true;
}
private void validateNewCommits(Branch.NameKey branch, ReceiveCommand cmd)
throws PermissionBackendException {
PermissionBackend.ForRef perm = permissions.ref(branch.get());
if (!RefNames.REFS_CONFIG.equals(cmd.getRefName())
&& !(MagicBranch.isMagicBranch(cmd.getRefName())
|| NEW_PATCHSET_PATTERN.matcher(cmd.getRefName()).matches())
&& pushOptions.containsKey(BYPASS_REVIEW)) {
try {
perm.check(RefPermission.BYPASS_REVIEW);
if (!Iterables.isEmpty(rejectCommits)) {
throw new AuthException("reject-commits prevents " + BYPASS_REVIEW);
}
logDebug("Short-circuiting new commit validation");
} catch (AuthException denied) {
reject(cmd, denied.getMessage());
}
return;
}
boolean defaultName = Strings.isNullOrEmpty(user.getAccount().getFullName());
RevWalk walk = rp.getRevWalk();
walk.reset();
walk.sort(RevSort.NONE);
try {
RevObject parsedObject = walk.parseAny(cmd.getNewId());
if (!(parsedObject instanceof RevCommit)) {
return;
}
ListMultimap<ObjectId, Ref> existing = changeRefsById();
walk.markStart((RevCommit) parsedObject);
markHeadsAsUninteresting(walk, cmd.getRefName());
int i = 0;
for (RevCommit c; (c = walk.next()) != null; ) {
i++;
if (existing.keySet().contains(c)) {
continue;
} else if (!validCommit(walk, perm, branch, cmd, c)) {
break;
}
if (defaultName && user.hasEmailAddress(c.getCommitterIdent().getEmailAddress())) {
try {
String committerName = c.getCommitterIdent().getName();
Account account =
accountsUpdate
.create()
.update(
user.getAccountId(),
a -> {
if (Strings.isNullOrEmpty(a.getFullName())) {
a.setFullName(committerName);
}
});
if (account != null && Strings.isNullOrEmpty(account.getFullName())) {
user.getAccount().setFullName(account.getFullName());
}
} catch (IOException | ConfigInvalidException e) {
logWarn("Cannot default full_name", e);
} finally {
defaultName = false;
}
}
}
logDebug("Validated {} new commits", i);
} catch (IOException err) {
cmd.setResult(REJECTED_MISSING_OBJECT);
logError("Invalid pack upload; one or more objects weren't sent", err);
}
}
private boolean validCommit(
RevWalk rw,
PermissionBackend.ForRef perm,
Branch.NameKey branch,
ReceiveCommand cmd,
ObjectId id)
throws IOException {
if (validCommits.contains(id)) {
return true;
}
RevCommit c = rw.parseCommit(id);
rw.parseBody(c);
try (CommitReceivedEvent receiveEvent =
new CommitReceivedEvent(cmd, project, branch.get(), rw.getObjectReader(), c, user)) {
boolean isMerged =
magicBranch != null
&& cmd.getRefName().equals(magicBranch.cmd.getRefName())
&& magicBranch.merged;
CommitValidators validators =
isMerged
? commitValidatorsFactory.forMergedCommits(perm, user.asIdentifiedUser())
: commitValidatorsFactory.forReceiveCommits(
perm, branch, user.asIdentifiedUser(), sshInfo, repo, rw);
messages.addAll(validators.validate(receiveEvent));
} catch (CommitValidationException e) {
logDebug("Commit validation failed on {}", c.name());
messages.addAll(e.getMessages());
reject(cmd, e.getMessage());
return false;
}
validCommits.add(c.copy());
return true;
}
private void autoCloseChanges(ReceiveCommand cmd) {
logDebug("Starting auto-closing of changes");
String refName = cmd.getRefName();
checkState(
!MagicBranch.isMagicBranch(refName),
"shouldn't be auto-closing changes on magic branch %s",
refName);
// TODO(dborowitz): Combine this BatchUpdate with the main one in
// insertChangesAndPatchSets.
try (BatchUpdate bu =
batchUpdateFactory.create(
db, projectControl.getProject().getNameKey(), user, TimeUtil.nowTs());
ObjectInserter ins = repo.newObjectInserter();
ObjectReader reader = ins.newReader();
RevWalk rw = new RevWalk(reader)) {
bu.setRepository(repo, rw, ins).updateChangesInParallel();
bu.setRequestId(receiveId);
// TODO(dborowitz): Teach BatchUpdate to ignore missing changes.
RevCommit newTip = rw.parseCommit(cmd.getNewId());
Branch.NameKey branch = new Branch.NameKey(project.getNameKey(), refName);
rw.reset();
rw.markStart(newTip);
if (!ObjectId.zeroId().equals(cmd.getOldId())) {
rw.markUninteresting(rw.parseCommit(cmd.getOldId()));
}
ListMultimap<ObjectId, Ref> byCommit = changeRefsById();
Map<Change.Key, ChangeNotes> byKey = null;
List<ReplaceRequest> replaceAndClose = new ArrayList<>();
int existingPatchSets = 0;
int newPatchSets = 0;
COMMIT:
for (RevCommit c; (c = rw.next()) != null; ) {
rw.parseBody(c);
for (Ref ref : byCommit.get(c.copy())) {
existingPatchSets++;
PatchSet.Id psId = PatchSet.Id.fromRef(ref.getName());
bu.addOp(
psId.getParentKey(),
mergedByPushOpFactory.create(requestScopePropagator, psId, refName));
continue COMMIT;
}
for (String changeId : c.getFooterLines(CHANGE_ID)) {
if (byKey == null) {
byKey = openChangesByBranch(branch);
}
ChangeNotes onto = byKey.get(new Change.Key(changeId.trim()));
if (onto != null) {
newPatchSets++;
// Hold onto this until we're done with the walk, as the call to
// req.validate below calls isMergedInto which resets the walk.
ReplaceRequest req = new ReplaceRequest(onto.getChangeId(), c, cmd, false);
req.notes = onto;
replaceAndClose.add(req);
continue COMMIT;
}
}
}
for (ReplaceRequest req : replaceAndClose) {
Change.Id id = req.notes.getChangeId();
if (!req.validate(true)) {
logDebug("Not closing {} because validation failed", id);
continue;
}
req.addOps(bu, null);
bu.addOp(
id,
mergedByPushOpFactory
.create(requestScopePropagator, req.psId, refName)
.setPatchSetProvider(
new Provider<PatchSet>() {
@Override
public PatchSet get() {
return req.replaceOp.getPatchSet();
}
}));
bu.addOp(id, new ChangeProgressOp(closeProgress));
}
logDebug(
"Auto-closing {} changes with existing patch sets and {} with new patch sets",
existingPatchSets,
newPatchSets);
bu.execute();
} catch (RestApiException e) {
logError("Can't insert patchset", e);
} catch (IOException | OrmException | UpdateException | PermissionBackendException e) {
logError("Can't scan for changes to close", e);
}
}
private Map<Change.Key, ChangeNotes> openChangesByBranch(Branch.NameKey branch)
throws OrmException {
Map<Change.Key, ChangeNotes> r = new HashMap<>();
for (ChangeData cd : queryProvider.get().byBranchOpen(branch)) {
r.put(cd.change().getKey(), cd.notes());
}
return r;
}
private Map<String, Ref> allRefs() {
return allRefsWatcher.getAllRefs();
}
private void reject(@Nullable ReceiveCommand cmd, String why) {
if (cmd != null) {
cmd.setResult(REJECTED_OTHER_REASON, why);
commandProgress.update(1);
}
}
private static boolean isHead(ReceiveCommand cmd) {
return cmd.getRefName().startsWith(Constants.R_HEADS);
}
private static boolean isConfig(ReceiveCommand cmd) {
return cmd.getRefName().equals(RefNames.REFS_CONFIG);
}
private void logDebug(String msg, Object... args) {
if (log.isDebugEnabled()) {
log.debug(receiveId + msg, args);
}
}
private void logWarn(String msg, Throwable t) {
if (log.isWarnEnabled()) {
if (t != null) {
log.warn(receiveId + msg, t);
} else {
log.warn(receiveId + msg);
}
}
}
private void logWarn(String msg) {
logWarn(msg, null);
}
private void logError(String msg, Throwable t) {
if (log.isErrorEnabled()) {
if (t != null) {
log.error(receiveId + msg, t);
} else {
log.error(receiveId + msg);
}
}
}
private void logError(String msg) {
logError(msg, null);
}
}
| gerrit-server/src/main/java/com/google/gerrit/server/git/receive/ReceiveCommits.java | // Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.git.receive;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.gerrit.common.FooterConstants.CHANGE_ID;
import static com.google.gerrit.reviewdb.client.RefNames.REFS_CHANGES;
import static com.google.gerrit.server.change.HashtagsUtil.cleanupHashtag;
import static com.google.gerrit.server.git.MultiProgressMonitor.UNKNOWN;
import static com.google.gerrit.server.git.receive.ReceiveConstants.COMMAND_REJECTION_MESSAGE_FOOTER;
import static com.google.gerrit.server.git.receive.ReceiveConstants.ONLY_OWNER_CAN_MODIFY_WIP;
import static com.google.gerrit.server.git.receive.ReceiveConstants.SAME_CHANGE_ID_IN_MULTIPLE_CHANGES;
import static com.google.gerrit.server.git.validators.CommitValidators.NEW_PATCHSET_PATTERN;
import static com.google.gerrit.server.mail.MailUtil.getRecipientsFromFooters;
import static java.util.Comparator.comparingInt;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static org.eclipse.jgit.lib.Constants.R_HEADS;
import static org.eclipse.jgit.transport.ReceiveCommand.Result.NOT_ATTEMPTED;
import static org.eclipse.jgit.transport.ReceiveCommand.Result.OK;
import static org.eclipse.jgit.transport.ReceiveCommand.Result.REJECTED_MISSING_OBJECT;
import static org.eclipse.jgit.transport.ReceiveCommand.Result.REJECTED_NONFASTFORWARD;
import static org.eclipse.jgit.transport.ReceiveCommand.Result.REJECTED_OTHER_REASON;
import com.google.common.base.Function;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Iterables;
import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.MultimapBuilder;
import com.google.common.collect.SetMultimap;
import com.google.common.collect.Sets;
import com.google.common.collect.SortedSetMultimap;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.TimeUtil;
import com.google.gerrit.common.data.LabelType;
import com.google.gerrit.common.data.LabelTypes;
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.common.data.PermissionRule;
import com.google.gerrit.extensions.api.changes.HashtagsInput;
import com.google.gerrit.extensions.api.changes.NotifyHandling;
import com.google.gerrit.extensions.api.changes.RecipientType;
import com.google.gerrit.extensions.api.changes.SubmitInput;
import com.google.gerrit.extensions.api.projects.ProjectConfigEntryType;
import com.google.gerrit.extensions.client.GeneralPreferencesInfo;
import com.google.gerrit.extensions.registration.DynamicMap;
import com.google.gerrit.extensions.registration.DynamicMap.Entry;
import com.google.gerrit.extensions.registration.DynamicSet;
import com.google.gerrit.extensions.restapi.AuthException;
import com.google.gerrit.extensions.restapi.BadRequestException;
import com.google.gerrit.extensions.restapi.ResourceConflictException;
import com.google.gerrit.extensions.restapi.RestApiException;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.client.PatchSetInfo;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.reviewdb.client.RevId;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.ApprovalsUtil;
import com.google.gerrit.server.ChangeUtil;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.PatchSetUtil;
import com.google.gerrit.server.Sequences;
import com.google.gerrit.server.account.AccountResolver;
import com.google.gerrit.server.account.AccountsUpdate;
import com.google.gerrit.server.change.ChangeInserter;
import com.google.gerrit.server.change.SetHashtagsOp;
import com.google.gerrit.server.config.AllProjectsName;
import com.google.gerrit.server.config.CanonicalWebUrl;
import com.google.gerrit.server.config.PluginConfig;
import com.google.gerrit.server.config.ProjectConfigEntry;
import com.google.gerrit.server.edit.ChangeEdit;
import com.google.gerrit.server.edit.ChangeEditUtil;
import com.google.gerrit.server.events.CommitReceivedEvent;
import com.google.gerrit.server.git.BanCommit;
import com.google.gerrit.server.git.GroupCollector;
import com.google.gerrit.server.git.MergeOp;
import com.google.gerrit.server.git.MergeOpRepoManager;
import com.google.gerrit.server.git.MergedByPushOp;
import com.google.gerrit.server.git.MultiProgressMonitor;
import com.google.gerrit.server.git.MultiProgressMonitor.Task;
import com.google.gerrit.server.git.ProjectConfig;
import com.google.gerrit.server.git.ReceivePackInitializer;
import com.google.gerrit.server.git.SubmoduleException;
import com.google.gerrit.server.git.SubmoduleOp;
import com.google.gerrit.server.git.TagCache;
import com.google.gerrit.server.git.ValidationError;
import com.google.gerrit.server.git.validators.CommitValidationException;
import com.google.gerrit.server.git.validators.CommitValidationMessage;
import com.google.gerrit.server.git.validators.CommitValidators;
import com.google.gerrit.server.git.validators.RefOperationValidationException;
import com.google.gerrit.server.git.validators.RefOperationValidators;
import com.google.gerrit.server.git.validators.ValidationMessage;
import com.google.gerrit.server.index.change.ChangeIndexer;
import com.google.gerrit.server.mail.MailUtil.MailRecipients;
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gerrit.server.notedb.NotesMigration;
import com.google.gerrit.server.notedb.ReviewerStateInternal;
import com.google.gerrit.server.patch.PatchSetInfoFactory;
import com.google.gerrit.server.permissions.ChangePermission;
import com.google.gerrit.server.permissions.GlobalPermission;
import com.google.gerrit.server.permissions.PermissionBackend;
import com.google.gerrit.server.permissions.PermissionBackendException;
import com.google.gerrit.server.permissions.RefPermission;
import com.google.gerrit.server.project.CreateRefControl;
import com.google.gerrit.server.project.NoSuchChangeException;
import com.google.gerrit.server.project.NoSuchProjectException;
import com.google.gerrit.server.project.ProjectCache;
import com.google.gerrit.server.project.ProjectControl;
import com.google.gerrit.server.project.ProjectState;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gerrit.server.query.change.InternalChangeQuery;
import com.google.gerrit.server.ssh.SshInfo;
import com.google.gerrit.server.update.BatchUpdate;
import com.google.gerrit.server.update.BatchUpdateOp;
import com.google.gerrit.server.update.ChangeContext;
import com.google.gerrit.server.update.Context;
import com.google.gerrit.server.update.RepoContext;
import com.google.gerrit.server.update.RepoOnlyOp;
import com.google.gerrit.server.update.UpdateException;
import com.google.gerrit.server.util.LabelVote;
import com.google.gerrit.server.util.MagicBranch;
import com.google.gerrit.server.util.RequestId;
import com.google.gerrit.server.util.RequestScopePropagator;
import com.google.gerrit.util.cli.CmdLineParser;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.assistedinject.Assisted;
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.regex.Matcher;
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.notes.NoteMap;
import org.eclipse.jgit.revwalk.FooterLine;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevSort;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.revwalk.filter.RevFilter;
import org.eclipse.jgit.transport.ReceiveCommand;
import org.eclipse.jgit.transport.ReceiveCommand.Result;
import org.eclipse.jgit.transport.ReceivePack;
import org.kohsuke.args4j.CmdLineException;
import org.kohsuke.args4j.Option;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Receives change upload using the Git receive-pack protocol. */
class ReceiveCommits {
private static final Logger log = LoggerFactory.getLogger(ReceiveCommits.class);
private static final String BYPASS_REVIEW = "bypass-review";
private enum Error {
CONFIG_UPDATE(
"You are not allowed to perform this operation.\n"
+ "Configuration changes can only be pushed by project owners\n"
+ "who also have 'Push' rights on "
+ RefNames.REFS_CONFIG),
UPDATE(
"You are not allowed to perform this operation.\n"
+ "To push into this reference you need 'Push' rights."),
DELETE("You need 'Push' rights with the 'Force Push'\nflag set to delete references."),
DELETE_CHANGES("Cannot delete from '" + REFS_CHANGES + "'"),
CODE_REVIEW(
"You need 'Push' rights to upload code review requests.\n"
+ "Verify that you are pushing to the right branch.");
private final String value;
Error(String value) {
this.value = value;
}
String get() {
return value;
}
}
interface Factory {
ReceiveCommits create(
ProjectControl projectControl,
ReceivePack receivePack,
AllRefsWatcher allRefsWatcher,
SetMultimap<ReviewerStateInternal, Account.Id> extraReviewers);
}
private class ReceivePackMessageSender implements MessageSender {
@Override
public void sendMessage(String what) {
rp.sendMessage(what);
}
@Override
public void sendError(String what) {
rp.sendError(what);
}
@Override
public void sendBytes(byte[] what) {
sendBytes(what, 0, what.length);
}
@Override
public void sendBytes(byte[] what, int off, int len) {
try {
rp.getMessageOutputStream().write(what, off, len);
} catch (IOException e) {
// Ignore write failures (matching JGit behavior).
}
}
@Override
public void flush() {
try {
rp.getMessageOutputStream().flush();
} catch (IOException e) {
// Ignore write failures (matching JGit behavior).
}
}
}
private static final Function<Exception, RestApiException> INSERT_EXCEPTION =
new Function<Exception, RestApiException>() {
@Override
public RestApiException apply(Exception input) {
if (input instanceof RestApiException) {
return (RestApiException) input;
} else if ((input instanceof ExecutionException)
&& (input.getCause() instanceof RestApiException)) {
return (RestApiException) input.getCause();
}
return new RestApiException("Error inserting change/patchset", input);
}
};
// ReceiveCommits has a lot of fields, sorry. Here and in the constructor they are split up
// somewhat, and kept sorted lexicographically within sections, except where later assignments
// depend on previous ones.
// Injected fields.
private final AccountResolver accountResolver;
private final AccountsUpdate.Server accountsUpdate;
private final AllProjectsName allProjectsName;
private final BatchUpdate.Factory batchUpdateFactory;
private final ChangeEditUtil editUtil;
private final ChangeIndexer indexer;
private final ChangeInserter.Factory changeInserterFactory;
private final ChangeNotes.Factory notesFactory;
private final CmdLineParser.Factory optionParserFactory;
private final CommitValidators.Factory commitValidatorsFactory;
private final DynamicMap<ProjectConfigEntry> pluginConfigEntries;
private final DynamicSet<ReceivePackInitializer> initializers;
private final IdentifiedUser user;
private final MergedByPushOp.Factory mergedByPushOpFactory;
private final NotesMigration notesMigration;
private final PatchSetInfoFactory patchSetInfoFactory;
private final PatchSetUtil psUtil;
private final PermissionBackend permissionBackend;
private final ProjectCache projectCache;
private final Provider<InternalChangeQuery> queryProvider;
private final Provider<MergeOp> mergeOpProvider;
private final Provider<MergeOpRepoManager> ormProvider;
private final ReceiveConfig receiveConfig;
private final RefOperationValidators.Factory refValidatorsFactory;
private final ReplaceOp.Factory replaceOpFactory;
private final RequestScopePropagator requestScopePropagator;
private final ReviewDb db;
private final Sequences seq;
private final SetHashtagsOp.Factory hashtagsFactory;
private final SshInfo sshInfo;
private final String canonicalWebUrl;
private final SubmoduleOp.Factory subOpFactory;
private final TagCache tagCache;
private final CreateRefControl createRefControl;
// Assisted injected fields.
private final AllRefsWatcher allRefsWatcher;
private final ImmutableSetMultimap<ReviewerStateInternal, Account.Id> extraReviewers;
private final ProjectControl projectControl;
private final ReceivePack rp;
// Immutable fields derived from constructor arguments.
private final LabelTypes labelTypes;
private final NoteMap rejectCommits;
private final PermissionBackend.ForProject permissions;
private final Project project;
private final Repository repo;
private final RequestId receiveId;
// Collections populated during processing.
private final List<UpdateGroupsRequest> updateGroups;
private final List<ValidationMessage> messages;
private final ListMultimap<Error, String> errors;
private final ListMultimap<String, String> pushOptions;
private final Map<Change.Id, ReplaceRequest> replaceByChange;
private final Set<ObjectId> validCommits;
/**
* Actual commands to be executed, as opposed to the mix of actual and magic commands that were
* provided over the wire.
*
* <p>Excludes commands executed implicitly as part of other {@link BatchUpdateOp}s, such as
* creating patch set refs.
*/
private final List<ReceiveCommand> actualCommands;
// Collections lazily populated during processing.
private List<CreateRequest> newChanges;
private ListMultimap<Change.Id, Ref> refsByChange;
private ListMultimap<ObjectId, Ref> refsById;
// Other settings populated during processing.
private MagicBranchInput magicBranch;
private boolean newChangeForAllNotInTarget;
// Handles for outputting back over the wire to the end user.
private Task newProgress;
private Task replaceProgress;
private Task closeProgress;
private Task commandProgress;
private MessageSender messageSender;
@Inject
ReceiveCommits(
@CanonicalWebUrl String canonicalWebUrl,
AccountResolver accountResolver,
AccountsUpdate.Server accountsUpdate,
AllProjectsName allProjectsName,
BatchUpdate.Factory batchUpdateFactory,
ChangeEditUtil editUtil,
ChangeIndexer indexer,
ChangeInserter.Factory changeInserterFactory,
ChangeNotes.Factory notesFactory,
CmdLineParser.Factory optionParserFactory,
CommitValidators.Factory commitValidatorsFactory,
DynamicMap<ProjectConfigEntry> pluginConfigEntries,
DynamicSet<ReceivePackInitializer> initializers,
MergedByPushOp.Factory mergedByPushOpFactory,
NotesMigration notesMigration,
PatchSetInfoFactory patchSetInfoFactory,
PatchSetUtil psUtil,
PermissionBackend permissionBackend,
ProjectCache projectCache,
Provider<InternalChangeQuery> queryProvider,
Provider<MergeOp> mergeOpProvider,
Provider<MergeOpRepoManager> ormProvider,
ReceiveConfig receiveConfig,
RefOperationValidators.Factory refValidatorsFactory,
ReplaceOp.Factory replaceOpFactory,
RequestScopePropagator requestScopePropagator,
ReviewDb db,
Sequences seq,
SetHashtagsOp.Factory hashtagsFactory,
SshInfo sshInfo,
SubmoduleOp.Factory subOpFactory,
TagCache tagCache,
CreateRefControl createRefControl,
@Assisted ProjectControl projectControl,
@Assisted ReceivePack rp,
@Assisted AllRefsWatcher allRefsWatcher,
@Assisted SetMultimap<ReviewerStateInternal, Account.Id> extraReviewers)
throws IOException {
// Injected fields.
this.accountResolver = accountResolver;
this.accountsUpdate = accountsUpdate;
this.allProjectsName = allProjectsName;
this.batchUpdateFactory = batchUpdateFactory;
this.canonicalWebUrl = canonicalWebUrl;
this.changeInserterFactory = changeInserterFactory;
this.commitValidatorsFactory = commitValidatorsFactory;
this.db = db;
this.editUtil = editUtil;
this.hashtagsFactory = hashtagsFactory;
this.indexer = indexer;
this.initializers = initializers;
this.mergeOpProvider = mergeOpProvider;
this.mergedByPushOpFactory = mergedByPushOpFactory;
this.notesFactory = notesFactory;
this.notesMigration = notesMigration;
this.optionParserFactory = optionParserFactory;
this.ormProvider = ormProvider;
this.patchSetInfoFactory = patchSetInfoFactory;
this.permissionBackend = permissionBackend;
this.pluginConfigEntries = pluginConfigEntries;
this.projectCache = projectCache;
this.psUtil = psUtil;
this.queryProvider = queryProvider;
this.receiveConfig = receiveConfig;
this.refValidatorsFactory = refValidatorsFactory;
this.replaceOpFactory = replaceOpFactory;
this.requestScopePropagator = requestScopePropagator;
this.seq = seq;
this.sshInfo = sshInfo;
this.subOpFactory = subOpFactory;
this.tagCache = tagCache;
this.createRefControl = createRefControl;
// Assisted injected fields.
this.allRefsWatcher = allRefsWatcher;
this.extraReviewers = ImmutableSetMultimap.copyOf(extraReviewers);
this.projectControl = projectControl;
this.rp = rp;
// Immutable fields derived from constructor arguments.
repo = rp.getRepository();
user = projectControl.getUser().asIdentifiedUser();
project = projectControl.getProject();
labelTypes = projectControl.getLabelTypes();
permissions = permissionBackend.user(user).project(project.getNameKey());
receiveId = RequestId.forProject(project.getNameKey());
rejectCommits = BanCommit.loadRejectCommitsMap(rp.getRepository(), rp.getRevWalk());
// Collections populated during processing.
actualCommands = new ArrayList<>();
errors = LinkedListMultimap.create();
messages = new ArrayList<>();
pushOptions = LinkedListMultimap.create();
replaceByChange = new LinkedHashMap<>();
updateGroups = new ArrayList<>();
validCommits = new HashSet<>();
// Collections lazily populated during processing.
newChanges = Collections.emptyList();
// Other settings populated during processing.
newChangeForAllNotInTarget =
projectControl.getProjectState().isCreateNewChangeForAllNotInTarget();
// Handles for outputting back over the wire to the end user.
messageSender = new ReceivePackMessageSender();
}
void init() {
for (ReceivePackInitializer i : initializers) {
i.init(projectControl.getProject().getNameKey(), rp);
}
}
/** Set a message sender for this operation. */
void setMessageSender(MessageSender ms) {
messageSender = ms != null ? ms : new ReceivePackMessageSender();
}
MessageSender getMessageSender() {
if (messageSender == null) {
setMessageSender(null);
}
return messageSender;
}
Project getProject() {
return project;
}
private void addMessage(String message) {
messages.add(new CommitValidationMessage(message, false));
}
void addError(String error) {
messages.add(new CommitValidationMessage(error, true));
}
void sendMessages() {
for (ValidationMessage m : messages) {
if (m.isError()) {
messageSender.sendError(m.getMessage());
} else {
messageSender.sendMessage(m.getMessage());
}
}
}
void processCommands(Collection<ReceiveCommand> commands, MultiProgressMonitor progress) {
newProgress = progress.beginSubTask("new", UNKNOWN);
replaceProgress = progress.beginSubTask("updated", UNKNOWN);
closeProgress = progress.beginSubTask("closed", UNKNOWN);
commandProgress = progress.beginSubTask("refs", UNKNOWN);
try {
parseCommands(commands);
} catch (PermissionBackendException | NoSuchProjectException | IOException err) {
for (ReceiveCommand cmd : actualCommands) {
if (cmd.getResult() == NOT_ATTEMPTED) {
cmd.setResult(REJECTED_OTHER_REASON, "internal server error");
}
}
logError(String.format("Failed to process refs in %s", project.getName()), err);
}
if (magicBranch != null && magicBranch.cmd.getResult() == NOT_ATTEMPTED) {
selectNewAndReplacedChangesFromMagicBranch();
}
preparePatchSetsForReplace();
insertChangesAndPatchSets();
newProgress.end();
replaceProgress.end();
if (!errors.isEmpty()) {
logDebug("Handling error conditions: {}", errors.keySet());
for (Error error : errors.keySet()) {
rp.sendMessage(buildError(error, errors.get(error)));
}
rp.sendMessage(String.format("User: %s", displayName(user)));
rp.sendMessage(COMMAND_REJECTION_MESSAGE_FOOTER);
}
Set<Branch.NameKey> branches = new HashSet<>();
for (ReceiveCommand c : actualCommands) {
// Most post-update steps should happen in UpdateOneRefOp#postUpdate. The only steps that
// should happen in this loop are things that can't happen within one BatchUpdate because they
// involve kicking off an additional BatchUpdate.
if (c.getResult() != OK) {
continue;
}
if (isHead(c) || isConfig(c)) {
switch (c.getType()) {
case CREATE:
case UPDATE:
case UPDATE_NONFASTFORWARD:
autoCloseChanges(c);
branches.add(new Branch.NameKey(project.getNameKey(), c.getRefName()));
break;
case DELETE:
break;
}
}
}
// Update superproject gitlinks if required.
if (!branches.isEmpty()) {
try (MergeOpRepoManager orm = ormProvider.get()) {
orm.setContext(db, TimeUtil.nowTs(), user, receiveId);
SubmoduleOp op = subOpFactory.create(branches, orm);
op.updateSuperProjects();
} catch (SubmoduleException e) {
logError("Can't update the superprojects", e);
}
}
closeProgress.end();
commandProgress.end();
progress.end();
reportMessages();
}
private void reportMessages() {
List<CreateRequest> created =
newChanges.stream().filter(r -> r.change != null).collect(toList());
if (!created.isEmpty()) {
addMessage("");
addMessage("New Changes:");
for (CreateRequest c : created) {
addMessage(
formatChangeUrl(
canonicalWebUrl,
c.change,
c.change.getSubject(),
c.change.getStatus() == Change.Status.DRAFT,
false));
}
addMessage("");
}
List<ReplaceRequest> updated =
replaceByChange
.values()
.stream()
.filter(r -> !r.skip && r.inputCommand.getResult() == OK)
.sorted(comparingInt(r -> r.notes.getChangeId().get()))
.collect(toList());
if (!updated.isEmpty()) {
addMessage("");
addMessage("Updated Changes:");
boolean edit = magicBranch != null && magicBranch.edit;
for (ReplaceRequest u : updated) {
String subject;
if (edit) {
try {
subject = rp.getRevWalk().parseCommit(u.newCommitId).getShortMessage();
} catch (IOException e) {
// Log and fall back to original change subject
logWarn("failed to get subject for edit patch set", e);
subject = u.notes.getChange().getSubject();
}
} else {
subject = u.info.getSubject();
}
addMessage(
formatChangeUrl(
canonicalWebUrl,
u.notes.getChange(),
subject,
u.replaceOp != null && u.replaceOp.getPatchSet().isDraft(),
edit));
}
addMessage("");
}
}
private static String formatChangeUrl(
String url, Change change, String subject, boolean draft, boolean edit) {
StringBuilder m =
new StringBuilder()
.append(" ")
.append(url)
.append(change.getChangeId())
.append(" ")
.append(ChangeUtil.cropSubject(subject));
if (draft) {
m.append(" [DRAFT]");
}
if (edit) {
m.append(" [EDIT]");
}
return m.toString();
}
private void insertChangesAndPatchSets() {
ReceiveCommand magicBranchCmd = magicBranch != null ? magicBranch.cmd : null;
if (magicBranchCmd != null && magicBranchCmd.getResult() != NOT_ATTEMPTED) {
logWarn(
String.format(
"Skipping change updates on %s because ref update failed: %s %s",
project.getName(),
magicBranchCmd.getResult(),
Strings.nullToEmpty(magicBranchCmd.getMessage())));
return;
}
try (BatchUpdate bu =
batchUpdateFactory.create(
db, project.getNameKey(), user.materializedCopy(), TimeUtil.nowTs());
ObjectInserter ins = repo.newObjectInserter();
ObjectReader reader = ins.newReader();
RevWalk rw = new RevWalk(reader)) {
bu.setRepository(repo, rw, ins).updateChangesInParallel();
bu.setRequestId(receiveId);
bu.setRefLogMessage("push");
logDebug("Adding {} replace requests", newChanges.size());
for (ReplaceRequest replace : replaceByChange.values()) {
replace.addOps(bu, replaceProgress);
}
logDebug("Adding {} create requests", newChanges.size());
for (CreateRequest create : newChanges) {
create.addOps(bu);
}
logDebug("Adding {} group update requests", newChanges.size());
updateGroups.forEach(r -> r.addOps(bu));
logDebug("Adding {} additional ref updates", actualCommands.size());
actualCommands.forEach(c -> bu.addRepoOnlyOp(new UpdateOneRefOp(c)));
logDebug("Executing batch");
try {
bu.execute();
} catch (UpdateException e) {
throw INSERT_EXCEPTION.apply(e);
}
if (magicBranchCmd != null) {
magicBranchCmd.setResult(OK);
}
for (ReplaceRequest replace : replaceByChange.values()) {
String rejectMessage = replace.getRejectMessage();
if (rejectMessage == null) {
if (replace.inputCommand.getResult() == NOT_ATTEMPTED) {
// Not necessarily the magic branch, so need to set OK on the original value.
replace.inputCommand.setResult(OK);
}
} else {
logDebug("Rejecting due to message from ReplaceOp");
reject(replace.inputCommand, rejectMessage);
}
}
} catch (ResourceConflictException e) {
addMessage(e.getMessage());
reject(magicBranchCmd, "conflict");
} catch (RestApiException | IOException err) {
logError("Can't insert change/patch set for " + project.getName(), err);
reject(magicBranchCmd, "internal server error: " + err.getMessage());
}
if (magicBranch != null && magicBranch.submit) {
try {
submit(newChanges, replaceByChange.values());
} catch (ResourceConflictException e) {
addMessage(e.getMessage());
reject(magicBranchCmd, "conflict");
} catch (RestApiException
| OrmException
| UpdateException
| IOException
| ConfigInvalidException
| PermissionBackendException e) {
logError("Error submitting changes to " + project.getName(), e);
reject(magicBranchCmd, "error during submit");
}
}
}
private String buildError(Error error, List<String> branches) {
StringBuilder sb = new StringBuilder();
if (branches.size() == 1) {
sb.append("Branch ").append(branches.get(0)).append(":\n");
sb.append(error.get());
return sb.toString();
}
sb.append("Branches");
String delim = " ";
for (String branch : branches) {
sb.append(delim).append(branch);
delim = ", ";
}
return sb.append(":\n").append(error.get()).toString();
}
private static String displayName(IdentifiedUser user) {
String displayName = user.getUserName();
if (displayName == null) {
displayName = user.getAccount().getPreferredEmail();
}
return displayName;
}
private void parseCommands(Collection<ReceiveCommand> commands)
throws PermissionBackendException, NoSuchProjectException, IOException {
List<String> optionList = rp.getPushOptions();
if (optionList != null) {
for (String option : optionList) {
int e = option.indexOf('=');
if (e > 0) {
pushOptions.put(option.substring(0, e), option.substring(e + 1));
} else {
pushOptions.put(option, "");
}
}
}
logDebug("Parsing {} commands", commands.size());
for (ReceiveCommand cmd : commands) {
if (cmd.getResult() != NOT_ATTEMPTED) {
// Already rejected by the core receive process.
logDebug("Already processed by core: {} {}", cmd.getResult(), cmd);
continue;
}
if (!Repository.isValidRefName(cmd.getRefName()) || cmd.getRefName().contains("//")) {
reject(cmd, "not valid ref");
continue;
}
if (MagicBranch.isMagicBranch(cmd.getRefName())) {
parseMagicBranch(cmd);
continue;
}
if (projectControl.getProjectState().isAllUsers()
&& RefNames.REFS_USERS_SELF.equals(cmd.getRefName())) {
String newName = RefNames.refsUsers(user.getAccountId());
logDebug("Swapping out command for {} to {}", RefNames.REFS_USERS_SELF, newName);
final ReceiveCommand orgCmd = cmd;
cmd =
new ReceiveCommand(cmd.getOldId(), cmd.getNewId(), newName, cmd.getType()) {
@Override
public void setResult(Result s, String m) {
super.setResult(s, m);
orgCmd.setResult(s, m);
}
};
}
Matcher m = NEW_PATCHSET_PATTERN.matcher(cmd.getRefName());
if (m.matches()) {
// The referenced change must exist and must still be open.
//
Change.Id changeId = Change.Id.parse(m.group(1));
parseReplaceCommand(cmd, changeId);
continue;
}
switch (cmd.getType()) {
case CREATE:
parseCreate(cmd);
break;
case UPDATE:
parseUpdate(cmd);
break;
case DELETE:
parseDelete(cmd);
break;
case UPDATE_NONFASTFORWARD:
parseRewind(cmd);
break;
default:
reject(cmd, "prohibited by Gerrit: unknown command type " + cmd.getType());
continue;
}
if (cmd.getResult() != NOT_ATTEMPTED) {
continue;
}
if (isConfig(cmd)) {
logDebug("Processing {} command", cmd.getRefName());
if (!projectControl.isOwner()) {
reject(cmd, "not project owner");
continue;
}
switch (cmd.getType()) {
case CREATE:
case UPDATE:
case UPDATE_NONFASTFORWARD:
try {
ProjectConfig cfg = new ProjectConfig(project.getNameKey());
cfg.load(rp.getRevWalk(), cmd.getNewId());
if (!cfg.getValidationErrors().isEmpty()) {
addError("Invalid project configuration:");
for (ValidationError err : cfg.getValidationErrors()) {
addError(" " + err.getMessage());
}
reject(cmd, "invalid project configuration");
logError(
"User "
+ user.getUserName()
+ " tried to push invalid project configuration "
+ cmd.getNewId().name()
+ " for "
+ project.getName());
continue;
}
Project.NameKey newParent = cfg.getProject().getParent(allProjectsName);
Project.NameKey oldParent = project.getParent(allProjectsName);
if (oldParent == null) {
// update of the 'All-Projects' project
if (newParent != null) {
reject(cmd, "invalid project configuration: root project cannot have parent");
continue;
}
} else {
if (!oldParent.equals(newParent)) {
try {
permissionBackend.user(user).check(GlobalPermission.ADMINISTRATE_SERVER);
} catch (AuthException e) {
reject(cmd, "invalid project configuration: only Gerrit admin can set parent");
continue;
}
}
if (projectCache.get(newParent) == null) {
reject(cmd, "invalid project configuration: parent does not exist");
continue;
}
}
for (Entry<ProjectConfigEntry> e : pluginConfigEntries) {
PluginConfig pluginCfg = cfg.getPluginConfig(e.getPluginName());
ProjectConfigEntry configEntry = e.getProvider().get();
String value = pluginCfg.getString(e.getExportName());
String oldValue =
projectControl
.getProjectState()
.getConfig()
.getPluginConfig(e.getPluginName())
.getString(e.getExportName());
if (configEntry.getType() == ProjectConfigEntryType.ARRAY) {
oldValue =
Arrays.stream(
projectControl
.getProjectState()
.getConfig()
.getPluginConfig(e.getPluginName())
.getStringList(e.getExportName()))
.collect(joining("\n"));
}
if ((value == null ? oldValue != null : !value.equals(oldValue))
&& !configEntry.isEditable(projectControl.getProjectState())) {
reject(
cmd,
String.format(
"invalid project configuration: Not allowed to set parameter"
+ " '%s' of plugin '%s' on project '%s'.",
e.getExportName(), e.getPluginName(), project.getName()));
continue;
}
if (ProjectConfigEntryType.LIST.equals(configEntry.getType())
&& value != null
&& !configEntry.getPermittedValues().contains(value)) {
reject(
cmd,
String.format(
"invalid project configuration: The value '%s' is "
+ "not permitted for parameter '%s' of plugin '%s'.",
value, e.getExportName(), e.getPluginName()));
}
}
} catch (Exception e) {
reject(cmd, "invalid project configuration");
logError(
"User "
+ user.getUserName()
+ " tried to push invalid project configuration "
+ cmd.getNewId().name()
+ " for "
+ project.getName(),
e);
continue;
}
break;
case DELETE:
break;
default:
reject(
cmd,
"prohibited by Gerrit: don't know how to handle config update of type "
+ cmd.getType());
continue;
}
}
}
}
private void parseCreate(ReceiveCommand cmd)
throws PermissionBackendException, NoSuchProjectException, IOException {
RevObject obj;
try {
obj = rp.getRevWalk().parseAny(cmd.getNewId());
} catch (IOException err) {
logError(
"Invalid object " + cmd.getNewId().name() + " for " + cmd.getRefName() + " creation",
err);
reject(cmd, "invalid object");
return;
}
logDebug("Creating {}", cmd);
if (isHead(cmd) && !isCommit(cmd)) {
return;
}
Branch.NameKey branch = new Branch.NameKey(project.getName(), cmd.getRefName());
String rejectReason = createRefControl.canCreateRef(rp.getRepository(), obj, user, branch);
if (rejectReason != null) {
reject(cmd, "prohibited by Gerrit: " + rejectReason);
return;
}
if (!validRefOperation(cmd)) {
// validRefOperation sets messages, so no need to provide more feedback.
return;
}
validateNewCommits(new Branch.NameKey(project.getNameKey(), cmd.getRefName()), cmd);
actualCommands.add(cmd);
}
private void parseUpdate(ReceiveCommand cmd) throws PermissionBackendException {
logDebug("Updating {}", cmd);
boolean ok;
try {
permissions.ref(cmd.getRefName()).check(RefPermission.UPDATE);
ok = true;
} catch (AuthException err) {
ok = false;
}
if (ok) {
if (isHead(cmd) && !isCommit(cmd)) {
return;
}
if (!validRefOperation(cmd)) {
return;
}
validateNewCommits(new Branch.NameKey(project.getNameKey(), cmd.getRefName()), cmd);
actualCommands.add(cmd);
} else {
if (RefNames.REFS_CONFIG.equals(cmd.getRefName())) {
errors.put(Error.CONFIG_UPDATE, RefNames.REFS_CONFIG);
} else {
errors.put(Error.UPDATE, cmd.getRefName());
}
reject(cmd, "prohibited by Gerrit: ref update access denied");
}
}
private boolean isCommit(ReceiveCommand cmd) {
RevObject obj;
try {
obj = rp.getRevWalk().parseAny(cmd.getNewId());
} catch (IOException err) {
logError("Invalid object " + cmd.getNewId().name() + " for " + cmd.getRefName(), err);
reject(cmd, "invalid object");
return false;
}
if (obj instanceof RevCommit) {
return true;
}
reject(cmd, "not a commit");
return false;
}
private void parseDelete(ReceiveCommand cmd) throws PermissionBackendException {
logDebug("Deleting {}", cmd);
if (cmd.getRefName().startsWith(REFS_CHANGES)) {
errors.put(Error.DELETE_CHANGES, cmd.getRefName());
reject(cmd, "cannot delete changes");
} else if (canDelete(cmd)) {
if (!validRefOperation(cmd)) {
return;
}
actualCommands.add(cmd);
} else if (RefNames.REFS_CONFIG.equals(cmd.getRefName())) {
reject(cmd, "cannot delete project configuration");
} else {
errors.put(Error.DELETE, cmd.getRefName());
reject(cmd, "cannot delete references");
}
}
private boolean canDelete(ReceiveCommand cmd) throws PermissionBackendException {
try {
permissions.ref(cmd.getRefName()).check(RefPermission.DELETE);
return true;
} catch (AuthException e) {
return false;
}
}
private void parseRewind(ReceiveCommand cmd) throws PermissionBackendException {
RevCommit newObject;
try {
newObject = rp.getRevWalk().parseCommit(cmd.getNewId());
} catch (IncorrectObjectTypeException notCommit) {
newObject = null;
} catch (IOException err) {
logError(
"Invalid object " + cmd.getNewId().name() + " for " + cmd.getRefName() + " forced update",
err);
reject(cmd, "invalid object");
return;
}
logDebug("Rewinding {}", cmd);
if (newObject != null) {
validateNewCommits(new Branch.NameKey(project.getNameKey(), cmd.getRefName()), cmd);
if (cmd.getResult() != NOT_ATTEMPTED) {
return;
}
}
boolean ok;
try {
permissions.ref(cmd.getRefName()).check(RefPermission.FORCE_UPDATE);
ok = true;
} catch (AuthException err) {
ok = false;
}
if (ok) {
if (!validRefOperation(cmd)) {
return;
}
actualCommands.add(cmd);
} else {
cmd.setResult(
REJECTED_NONFASTFORWARD, " need '" + PermissionRule.FORCE_PUSH + "' privilege.");
}
}
static class MagicBranchInput {
private static final Splitter COMMAS = Splitter.on(',').omitEmptyStrings();
final ReceiveCommand cmd;
final LabelTypes labelTypes;
final NotesMigration notesMigration;
private final boolean defaultPublishComments;
Branch.NameKey dest;
PermissionBackend.ForRef perm;
Set<Account.Id> reviewer = Sets.newLinkedHashSet();
Set<Account.Id> cc = Sets.newLinkedHashSet();
Map<String, Short> labels = new HashMap<>();
String message;
List<RevCommit> baseCommit;
CmdLineParser clp;
Set<String> hashtags = new HashSet<>();
@Option(name = "--base", metaVar = "BASE", usage = "merge base of changes")
List<ObjectId> base;
@Option(name = "--topic", metaVar = "NAME", usage = "attach topic to changes")
String topic;
@Option(name = "--draft", usage = "mark new/updated changes as draft")
boolean draft;
@Option(name = "--private", usage = "mark new/updated change as private")
boolean isPrivate;
@Option(name = "--remove-private", usage = "remove privacy flag from updated change")
boolean removePrivate;
@Option(
name = "--wip",
aliases = {"-work-in-progress"},
usage = "mark change as work in progress"
)
boolean workInProgress;
@Option(name = "--ready", usage = "mark change as ready")
boolean ready;
@Option(
name = "--edit",
aliases = {"-e"},
usage = "upload as change edit"
)
boolean edit;
@Option(name = "--submit", usage = "immediately submit the change")
boolean submit;
@Option(name = "--merged", usage = "create single change for a merged commit")
boolean merged;
@Option(name = "--publish-comments", usage = "publish all draft comments on updated changes")
private boolean publishComments;
@Option(
name = "--no-publish-comments",
aliases = {"--np"},
usage = "do not publish draft comments"
)
private boolean noPublishComments;
@Option(
name = "--notify",
usage =
"Notify handling that defines to whom email notifications "
+ "should be sent. Allowed values are NONE, OWNER, "
+ "OWNER_REVIEWERS, ALL. If not set, the default is ALL."
)
private NotifyHandling notify;
@Option(name = "--notify-to", metaVar = "USER", usage = "user that should be notified")
List<Account.Id> tos = new ArrayList<>();
@Option(name = "--notify-cc", metaVar = "USER", usage = "user that should be CC'd")
List<Account.Id> ccs = new ArrayList<>();
@Option(name = "--notify-bcc", metaVar = "USER", usage = "user that should be BCC'd")
List<Account.Id> bccs = new ArrayList<>();
@Option(
name = "--reviewer",
aliases = {"-r"},
metaVar = "EMAIL",
usage = "add reviewer to changes"
)
void reviewer(Account.Id id) {
reviewer.add(id);
}
@Option(name = "--cc", metaVar = "EMAIL", usage = "notify user by CC")
void cc(Account.Id id) {
cc.add(id);
}
@Option(name = "--publish", usage = "publish new/updated changes")
void publish(boolean publish) {
draft = !publish;
}
@Option(
name = "--label",
aliases = {"-l"},
metaVar = "LABEL+VALUE",
usage = "label(s) to assign (defaults to +1 if no value provided"
)
void addLabel(String token) throws CmdLineException {
LabelVote v = LabelVote.parse(token);
try {
LabelType.checkName(v.label());
ApprovalsUtil.checkLabel(labelTypes, v.label(), v.value());
} catch (BadRequestException e) {
throw clp.reject(e.getMessage());
}
labels.put(v.label(), v.value());
}
@Option(
name = "--message",
aliases = {"-m"},
metaVar = "MESSAGE",
usage = "Comment message to apply to the review"
)
void addMessage(String token) {
// git push does not allow spaces in refs.
message = token.replace("_", " ");
}
@Option(
name = "--hashtag",
aliases = {"-t"},
metaVar = "HASHTAG",
usage = "add hashtag to changes"
)
void addHashtag(String token) throws CmdLineException {
if (!notesMigration.readChanges()) {
throw clp.reject("cannot add hashtags; noteDb is disabled");
}
String hashtag = cleanupHashtag(token);
if (!hashtag.isEmpty()) {
hashtags.add(hashtag);
}
// TODO(dpursehouse): validate hashtags
}
MagicBranchInput(
IdentifiedUser user,
ReceiveCommand cmd,
LabelTypes labelTypes,
NotesMigration notesMigration) {
this.cmd = cmd;
this.draft = cmd.getRefName().startsWith(MagicBranch.NEW_DRAFT_CHANGE);
this.labelTypes = labelTypes;
this.notesMigration = notesMigration;
GeneralPreferencesInfo prefs = user.getAccount().getGeneralPreferencesInfo();
this.defaultPublishComments =
prefs != null
? firstNonNull(
user.getAccount().getGeneralPreferencesInfo().publishCommentsOnPush, false)
: false;
}
MailRecipients getMailRecipients() {
return new MailRecipients(reviewer, cc);
}
ListMultimap<RecipientType, Account.Id> getAccountsToNotify() {
ListMultimap<RecipientType, Account.Id> accountsToNotify =
MultimapBuilder.hashKeys().arrayListValues().build();
accountsToNotify.putAll(RecipientType.TO, tos);
accountsToNotify.putAll(RecipientType.CC, ccs);
accountsToNotify.putAll(RecipientType.BCC, bccs);
return accountsToNotify;
}
boolean shouldPublishComments() {
if (publishComments) {
return true;
} else if (noPublishComments) {
return false;
}
return defaultPublishComments;
}
String parse(
CmdLineParser clp,
Repository repo,
Set<String> refs,
ListMultimap<String, String> pushOptions)
throws CmdLineException {
String ref = RefNames.fullName(MagicBranch.getDestBranchName(cmd.getRefName()));
ListMultimap<String, String> options = LinkedListMultimap.create(pushOptions);
int optionStart = ref.indexOf('%');
if (0 < optionStart) {
for (String s : COMMAS.split(ref.substring(optionStart + 1))) {
int e = s.indexOf('=');
if (0 < e) {
options.put(s.substring(0, e), s.substring(e + 1));
} else {
options.put(s, "");
}
}
ref = ref.substring(0, optionStart);
}
if (!options.isEmpty()) {
clp.parseOptionMap(options);
}
// Split the destination branch by branch and topic. The topic
// suffix is entirely optional, so it might not even exist.
String head = readHEAD(repo);
int split = ref.length();
for (; ; ) {
String name = ref.substring(0, split);
if (refs.contains(name) || name.equals(head)) {
break;
}
split = name.lastIndexOf('/', split - 1);
if (split <= Constants.R_REFS.length()) {
return ref;
}
}
if (split < ref.length()) {
topic = Strings.emptyToNull(ref.substring(split + 1));
}
return ref.substring(0, split);
}
NotifyHandling getNotify() {
if (notify != null) {
return notify;
}
if (workInProgress) {
return NotifyHandling.OWNER;
}
return NotifyHandling.ALL;
}
NotifyHandling getNotify(ChangeNotes notes) {
if (notify != null) {
return notify;
}
if (workInProgress || (!ready && notes.getChange().isWorkInProgress())) {
return NotifyHandling.OWNER;
}
return NotifyHandling.ALL;
}
}
/**
* Gets an unmodifiable view of the pushOptions.
*
* <p>The collection is empty if the client does not support push options, or if the client did
* not send any options.
*
* @return an unmodifiable view of pushOptions.
*/
@Nullable
ListMultimap<String, String> getPushOptions() {
return ImmutableListMultimap.copyOf(pushOptions);
}
private void parseMagicBranch(ReceiveCommand cmd) throws PermissionBackendException {
// Permit exactly one new change request per push.
if (magicBranch != null) {
reject(cmd, "duplicate request");
return;
}
logDebug("Found magic branch {}", cmd.getRefName());
magicBranch = new MagicBranchInput(user, cmd, labelTypes, notesMigration);
magicBranch.reviewer.addAll(extraReviewers.get(ReviewerStateInternal.REVIEWER));
magicBranch.cc.addAll(extraReviewers.get(ReviewerStateInternal.CC));
String ref;
CmdLineParser clp = optionParserFactory.create(magicBranch);
magicBranch.clp = clp;
try {
ref = magicBranch.parse(clp, repo, rp.getAdvertisedRefs().keySet(), pushOptions);
} catch (CmdLineException e) {
if (!clp.wasHelpRequestedByOption()) {
logDebug("Invalid branch syntax");
reject(cmd, e.getMessage());
return;
}
ref = null; // never happen
}
if (clp.wasHelpRequestedByOption()) {
StringWriter w = new StringWriter();
w.write("\nHelp for refs/for/branch:\n\n");
clp.printUsage(w, null);
addMessage(w.toString());
reject(cmd, "see help");
return;
}
if (projectControl.getProjectState().isAllUsers() && RefNames.REFS_USERS_SELF.equals(ref)) {
logDebug("Handling {}", RefNames.REFS_USERS_SELF);
ref = RefNames.refsUsers(user.getAccountId());
}
if (!rp.getAdvertisedRefs().containsKey(ref)
&& !ref.equals(readHEAD(repo))
&& !ref.equals(RefNames.REFS_CONFIG)) {
logDebug("Ref {} not found", ref);
if (ref.startsWith(Constants.R_HEADS)) {
String n = ref.substring(Constants.R_HEADS.length());
reject(cmd, "branch " + n + " not found");
} else {
reject(cmd, ref + " not found");
}
return;
}
magicBranch.dest = new Branch.NameKey(project.getNameKey(), ref);
magicBranch.perm = permissions.ref(ref);
if (!projectControl.getProject().getState().permitsWrite()) {
reject(cmd, "project state does not permit write");
return;
}
if (magicBranch.draft) {
if (!receiveConfig.allowDrafts) {
errors.put(Error.CODE_REVIEW, ref);
reject(cmd, "draft workflow is disabled");
return;
} else if (projectControl
.controlForRef(MagicBranch.NEW_DRAFT_CHANGE + ref)
.isBlocked(Permission.PUSH)) {
errors.put(Error.CODE_REVIEW, ref);
reject(cmd, "cannot upload drafts");
return;
}
}
try {
magicBranch.perm.check(RefPermission.CREATE_CHANGE);
} catch (AuthException denied) {
errors.put(Error.CODE_REVIEW, ref);
reject(cmd, denied.getMessage());
return;
}
if (magicBranch.isPrivate && magicBranch.removePrivate) {
reject(cmd, "the options 'private' and 'remove-private' are mutually exclusive");
return;
}
if (magicBranch.workInProgress && magicBranch.ready) {
reject(cmd, "the options 'wip' and 'ready' are mutually exclusive");
return;
}
if (magicBranch.publishComments && magicBranch.noPublishComments) {
reject(
cmd, "the options 'publish-comments' and 'no-publish-comments' are mutually exclusive");
return;
}
if (magicBranch.draft && magicBranch.submit) {
reject(cmd, "cannot submit draft");
return;
}
if (magicBranch.submit) {
try {
permissions.ref(ref).check(RefPermission.UPDATE_BY_SUBMIT);
} catch (AuthException e) {
reject(cmd, e.getMessage());
return;
}
}
RevWalk walk = rp.getRevWalk();
RevCommit tip;
try {
tip = walk.parseCommit(magicBranch.cmd.getNewId());
logDebug("Tip of push: {}", tip.name());
} catch (IOException ex) {
magicBranch.cmd.setResult(REJECTED_MISSING_OBJECT);
logError("Invalid pack upload; one or more objects weren't sent", ex);
return;
}
String destBranch = magicBranch.dest.get();
try {
if (magicBranch.merged) {
if (magicBranch.draft) {
reject(cmd, "cannot be draft & merged");
return;
}
if (magicBranch.base != null) {
reject(cmd, "cannot use merged with base");
return;
}
RevCommit branchTip = readBranchTip(cmd, magicBranch.dest);
if (branchTip == null) {
return; // readBranchTip already rejected cmd.
}
if (!walk.isMergedInto(tip, branchTip)) {
reject(cmd, "not merged into branch");
return;
}
}
// If tip is a merge commit, or the root commit or
// if %base or %merged was specified, ignore newChangeForAllNotInTarget.
if (tip.getParentCount() > 1
|| magicBranch.base != null
|| magicBranch.merged
|| tip.getParentCount() == 0) {
logDebug("Forcing newChangeForAllNotInTarget = false");
newChangeForAllNotInTarget = false;
}
if (magicBranch.base != null) {
logDebug("Handling %base: {}", magicBranch.base);
magicBranch.baseCommit = Lists.newArrayListWithCapacity(magicBranch.base.size());
for (ObjectId id : magicBranch.base) {
try {
magicBranch.baseCommit.add(walk.parseCommit(id));
} catch (IncorrectObjectTypeException notCommit) {
reject(cmd, "base must be a commit");
return;
} catch (MissingObjectException e) {
reject(cmd, "base not found");
return;
} catch (IOException e) {
logWarn(String.format("Project %s cannot read %s", project.getName(), id.name()), e);
reject(cmd, "internal server error");
return;
}
}
} else if (newChangeForAllNotInTarget) {
RevCommit branchTip = readBranchTip(cmd, magicBranch.dest);
if (branchTip == null) {
return; // readBranchTip already rejected cmd.
}
magicBranch.baseCommit = Collections.singletonList(branchTip);
logDebug("Set baseCommit = {}", magicBranch.baseCommit.get(0).name());
}
} catch (IOException ex) {
logWarn(
String.format("Error walking to %s in project %s", destBranch, project.getName()), ex);
reject(cmd, "internal server error");
return;
}
// Validate that the new commits are connected with the target
// branch. If they aren't, we want to abort. We do this check by
// looking to see if we can compute a merge base between the new
// commits and the target branch head.
//
try {
Ref targetRef = rp.getAdvertisedRefs().get(magicBranch.dest.get());
if (targetRef == null || targetRef.getObjectId() == null) {
// The destination branch does not yet exist. Assume the
// history being sent for review will start it and thus
// is "connected" to the branch.
logDebug("Branch is unborn");
return;
}
RevCommit h = walk.parseCommit(targetRef.getObjectId());
logDebug("Current branch tip: {}", h.name());
RevFilter oldRevFilter = walk.getRevFilter();
try {
walk.reset();
walk.setRevFilter(RevFilter.MERGE_BASE);
walk.markStart(tip);
walk.markStart(h);
if (walk.next() == null) {
reject(magicBranch.cmd, "no common ancestry");
}
} finally {
walk.reset();
walk.setRevFilter(oldRevFilter);
}
} catch (IOException e) {
magicBranch.cmd.setResult(REJECTED_MISSING_OBJECT);
logError("Invalid pack upload; one or more objects weren't sent", e);
}
}
private static String readHEAD(Repository repo) {
try {
return repo.getFullBranch();
} catch (IOException e) {
log.error("Cannot read HEAD symref", e);
return null;
}
}
private RevCommit readBranchTip(ReceiveCommand cmd, Branch.NameKey branch) throws IOException {
Ref r = allRefs().get(branch.get());
if (r == null) {
reject(cmd, branch.get() + " not found");
return null;
}
return rp.getRevWalk().parseCommit(r.getObjectId());
}
private void parseReplaceCommand(ReceiveCommand cmd, Change.Id changeId) {
logDebug("Parsing replace command");
if (cmd.getType() != ReceiveCommand.Type.CREATE) {
reject(cmd, "invalid usage");
return;
}
RevCommit newCommit;
try {
newCommit = rp.getRevWalk().parseCommit(cmd.getNewId());
logDebug("Replacing with {}", newCommit);
} catch (IOException e) {
logError("Cannot parse " + cmd.getNewId().name() + " as commit", e);
reject(cmd, "invalid commit");
return;
}
Change changeEnt;
try {
changeEnt = notesFactory.createChecked(db, project.getNameKey(), changeId).getChange();
} catch (NoSuchChangeException e) {
logError("Change not found " + changeId, e);
reject(cmd, "change " + changeId + " not found");
return;
} catch (OrmException e) {
logError("Cannot lookup existing change " + changeId, e);
reject(cmd, "database error");
return;
}
if (!project.getNameKey().equals(changeEnt.getProject())) {
reject(cmd, "change " + changeId + " does not belong to project " + project.getName());
return;
}
logDebug("Replacing change {}", changeEnt.getId());
requestReplace(cmd, true, changeEnt, newCommit);
}
private boolean requestReplace(
ReceiveCommand cmd, boolean checkMergedInto, Change change, RevCommit newCommit) {
if (change.getStatus().isClosed()) {
reject(cmd, "change " + canonicalWebUrl + change.getId() + " closed");
return false;
}
ReplaceRequest req = new ReplaceRequest(change.getId(), newCommit, cmd, checkMergedInto);
if (replaceByChange.containsKey(req.ontoChange)) {
reject(cmd, "duplicate request");
return false;
}
replaceByChange.put(req.ontoChange, req);
return true;
}
private void selectNewAndReplacedChangesFromMagicBranch() {
logDebug("Finding new and replaced changes");
newChanges = new ArrayList<>();
ListMultimap<ObjectId, Ref> existing = changeRefsById();
GroupCollector groupCollector =
GroupCollector.create(changeRefsById(), db, psUtil, notesFactory, project.getNameKey());
try {
RevCommit start = setUpWalkForSelectingChanges();
if (start == null) {
return;
}
LinkedHashMap<RevCommit, ChangeLookup> pending = new LinkedHashMap<>();
Set<Change.Key> newChangeIds = new HashSet<>();
int maxBatchChanges = receiveConfig.getEffectiveMaxBatchChangesLimit(user);
int total = 0;
int alreadyTracked = 0;
boolean rejectImplicitMerges =
start.getParentCount() == 1
&& projectCache.get(project.getNameKey()).isRejectImplicitMerges()
// Don't worry about implicit merges when creating changes for
// already-merged commits; they're already in history, so it's too
// late.
&& !magicBranch.merged;
Set<RevCommit> mergedParents;
if (rejectImplicitMerges) {
mergedParents = new HashSet<>();
} else {
mergedParents = null;
}
for (; ; ) {
RevCommit c = rp.getRevWalk().next();
if (c == null) {
break;
}
total++;
rp.getRevWalk().parseBody(c);
String name = c.name();
groupCollector.visit(c);
Collection<Ref> existingRefs = existing.get(c);
if (rejectImplicitMerges) {
Collections.addAll(mergedParents, c.getParents());
mergedParents.remove(c);
}
boolean commitAlreadyTracked = !existingRefs.isEmpty();
if (commitAlreadyTracked) {
alreadyTracked++;
// Corner cases where an existing commit might need a new group:
// A) Existing commit has a null group; wasn't assigned during schema
// upgrade, or schema upgrade is performed on a running server.
// B) Let A<-B<-C, then:
// 1. Push A to refs/heads/master
// 2. Push B to refs/for/master
// 3. Force push A~ to refs/heads/master
// 4. Push C to refs/for/master.
// B will be in existing so we aren't replacing the patch set. It
// used to have its own group, but now needs to to be changed to
// A's group.
// C) Commit is a PatchSet of a pre-existing change uploaded with a
// different target branch.
for (Ref ref : existingRefs) {
updateGroups.add(new UpdateGroupsRequest(ref, c));
}
if (!(newChangeForAllNotInTarget || magicBranch.base != null)) {
continue;
}
}
List<String> idList = c.getFooterLines(CHANGE_ID);
String idStr = !idList.isEmpty() ? idList.get(idList.size() - 1).trim() : null;
if (idStr != null) {
pending.put(c, new ChangeLookup(c, new Change.Key(idStr)));
} else {
pending.put(c, new ChangeLookup(c));
}
int n = pending.size() + newChanges.size();
if (maxBatchChanges != 0 && n > maxBatchChanges) {
logDebug("{} changes exceeds limit of {}", n, maxBatchChanges);
reject(
magicBranch.cmd,
"the number of pushed changes in a batch exceeds the max limit " + maxBatchChanges);
newChanges = Collections.emptyList();
return;
}
if (commitAlreadyTracked) {
boolean changeExistsOnDestBranch = false;
for (ChangeData cd : pending.get(c).destChanges) {
if (cd.change().getDest().equals(magicBranch.dest)) {
changeExistsOnDestBranch = true;
break;
}
}
if (changeExistsOnDestBranch) {
continue;
}
logDebug("Creating new change for {} even though it is already tracked", name);
}
if (!validCommit(rp.getRevWalk(), magicBranch.perm, magicBranch.dest, magicBranch.cmd, c)) {
// Not a change the user can propose? Abort as early as possible.
newChanges = Collections.emptyList();
logDebug("Aborting early due to invalid commit");
return;
}
// Don't allow merges to be uploaded in commit chain via all-not-in-target
if (newChangeForAllNotInTarget && c.getParentCount() > 1) {
reject(
magicBranch.cmd,
"Pushing merges in commit chains with 'all not in target' is not allowed,\n"
+ "to override please set the base manually");
logDebug("Rejecting merge commit {} with newChangeForAllNotInTarget", name);
// TODO(dborowitz): Should we early return here?
}
if (idList.isEmpty()) {
newChanges.add(new CreateRequest(c, magicBranch.dest.get()));
continue;
}
}
logDebug(
"Finished initial RevWalk with {} commits total: {} already"
+ " tracked, {} new changes with no Change-Id, and {} deferred"
+ " lookups",
total,
alreadyTracked,
newChanges.size(),
pending.size());
if (rejectImplicitMerges) {
rejectImplicitMerges(mergedParents);
}
for (Iterator<ChangeLookup> itr = pending.values().iterator(); itr.hasNext(); ) {
ChangeLookup p = itr.next();
if (p.changeKey == null) {
continue;
}
if (newChangeIds.contains(p.changeKey)) {
logDebug("Multiple commits with Change-Id {}", p.changeKey);
reject(magicBranch.cmd, SAME_CHANGE_ID_IN_MULTIPLE_CHANGES);
newChanges = Collections.emptyList();
return;
}
List<ChangeData> changes = p.destChanges;
if (changes.size() > 1) {
logDebug(
"Multiple changes in branch {} with Change-Id {}: {}",
magicBranch.dest,
p.changeKey,
changes.stream().map(cd -> cd.getId().toString()).collect(joining()));
// WTF, multiple changes in this branch have the same key?
// Since the commit is new, the user should recreate it with
// a different Change-Id. In practice, we should never see
// this error message as Change-Id should be unique per branch.
//
reject(magicBranch.cmd, p.changeKey.get() + " has duplicates");
newChanges = Collections.emptyList();
return;
}
if (changes.size() == 1) {
// Schedule as a replacement to this one matching change.
//
RevId currentPs = changes.get(0).currentPatchSet().getRevision();
// If Commit is already current PatchSet of target Change.
if (p.commit.name().equals(currentPs.get())) {
if (pending.size() == 1) {
// There are no commits left to check, all commits in pending were already
// current PatchSet of the corresponding target changes.
reject(magicBranch.cmd, "commit(s) already exists (as current patchset)");
} else {
// Commit is already current PatchSet.
// Remove from pending and try next commit.
itr.remove();
continue;
}
}
if (requestReplace(magicBranch.cmd, false, changes.get(0).change(), p.commit)) {
continue;
}
newChanges = Collections.emptyList();
return;
}
if (changes.size() == 0) {
if (!isValidChangeId(p.changeKey.get())) {
reject(magicBranch.cmd, "invalid Change-Id");
newChanges = Collections.emptyList();
return;
}
// In case the change look up from the index failed,
// double check against the existing refs
if (foundInExistingRef(existing.get(p.commit))) {
if (pending.size() == 1) {
reject(magicBranch.cmd, "commit(s) already exists (as current patchset)");
newChanges = Collections.emptyList();
return;
}
itr.remove();
continue;
}
newChangeIds.add(p.changeKey);
}
newChanges.add(new CreateRequest(p.commit, magicBranch.dest.get()));
}
logDebug(
"Finished deferred lookups with {} updates and {} new changes",
replaceByChange.size(),
newChanges.size());
} catch (IOException e) {
// Should never happen, the core receive process would have
// identified the missing object earlier before we got control.
//
magicBranch.cmd.setResult(REJECTED_MISSING_OBJECT);
logError("Invalid pack upload; one or more objects weren't sent", e);
newChanges = Collections.emptyList();
return;
} catch (OrmException e) {
logError("Cannot query database to locate prior changes", e);
reject(magicBranch.cmd, "database error");
newChanges = Collections.emptyList();
return;
}
if (newChanges.isEmpty() && replaceByChange.isEmpty()) {
reject(magicBranch.cmd, "no new changes");
return;
}
if (!newChanges.isEmpty() && magicBranch.edit) {
reject(magicBranch.cmd, "edit is not supported for new changes");
return;
}
try {
SortedSetMultimap<ObjectId, String> groups = groupCollector.getGroups();
List<Integer> newIds = seq.nextChangeIds(newChanges.size());
for (int i = 0; i < newChanges.size(); i++) {
CreateRequest create = newChanges.get(i);
create.setChangeId(newIds.get(i));
create.groups = ImmutableList.copyOf(groups.get(create.commit));
}
for (ReplaceRequest replace : replaceByChange.values()) {
replace.groups = ImmutableList.copyOf(groups.get(replace.newCommitId));
}
for (UpdateGroupsRequest update : updateGroups) {
update.groups = ImmutableList.copyOf((groups.get(update.commit)));
}
logDebug("Finished updating groups from GroupCollector");
} catch (OrmException e) {
logError("Error collecting groups for changes", e);
reject(magicBranch.cmd, "internal server error");
return;
}
}
private boolean foundInExistingRef(Collection<Ref> existingRefs) throws OrmException {
for (Ref ref : existingRefs) {
ChangeNotes notes =
notesFactory.create(db, project.getNameKey(), Change.Id.fromRef(ref.getName()));
Change change = notes.getChange();
if (change.getDest().equals(magicBranch.dest)) {
logDebug("Found change {} from existing refs.", change.getKey());
// Reindex the change asynchronously, ignoring errors.
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError = indexer.indexAsync(project.getNameKey(), change.getId());
return true;
}
}
return false;
}
private RevCommit setUpWalkForSelectingChanges() throws IOException {
RevWalk rw = rp.getRevWalk();
RevCommit start = rw.parseCommit(magicBranch.cmd.getNewId());
rw.reset();
rw.sort(RevSort.TOPO);
rw.sort(RevSort.REVERSE, true);
rp.getRevWalk().markStart(start);
if (magicBranch.baseCommit != null) {
markExplicitBasesUninteresting();
} else if (magicBranch.merged) {
logDebug("Marking parents of merged commit {} uninteresting", start.name());
for (RevCommit c : start.getParents()) {
rw.markUninteresting(c);
}
} else {
markHeadsAsUninteresting(rw, magicBranch.dest != null ? magicBranch.dest.get() : null);
}
return start;
}
private void markExplicitBasesUninteresting() throws IOException {
logDebug("Marking {} base commits uninteresting", magicBranch.baseCommit.size());
for (RevCommit c : magicBranch.baseCommit) {
rp.getRevWalk().markUninteresting(c);
}
Ref targetRef = allRefs().get(magicBranch.dest.get());
if (targetRef != null) {
logDebug(
"Marking target ref {} ({}) uninteresting",
magicBranch.dest.get(),
targetRef.getObjectId().name());
rp.getRevWalk().markUninteresting(rp.getRevWalk().parseCommit(targetRef.getObjectId()));
}
}
private void rejectImplicitMerges(Set<RevCommit> mergedParents) throws IOException {
if (!mergedParents.isEmpty()) {
Ref targetRef = allRefs().get(magicBranch.dest.get());
if (targetRef != null) {
RevWalk rw = rp.getRevWalk();
RevCommit tip = rw.parseCommit(targetRef.getObjectId());
boolean containsImplicitMerges = true;
for (RevCommit p : mergedParents) {
containsImplicitMerges &= !rw.isMergedInto(p, tip);
}
if (containsImplicitMerges) {
rw.reset();
for (RevCommit p : mergedParents) {
rw.markStart(p);
}
rw.markUninteresting(tip);
RevCommit c;
while ((c = rw.next()) != null) {
rw.parseBody(c);
messages.add(
new CommitValidationMessage(
"ERROR: Implicit Merge of "
+ c.abbreviate(7).name()
+ " "
+ c.getShortMessage(),
false));
}
reject(magicBranch.cmd, "implicit merges detected");
}
}
}
}
private void markHeadsAsUninteresting(RevWalk rw, @Nullable String forRef) {
int i = 0;
for (Ref ref : allRefs().values()) {
if ((ref.getName().startsWith(R_HEADS) || ref.getName().equals(forRef))
&& ref.getObjectId() != null) {
try {
rw.markUninteresting(rw.parseCommit(ref.getObjectId()));
i++;
} catch (IOException e) {
logWarn(String.format("Invalid ref %s in %s", ref.getName(), project.getName()), e);
}
}
}
logDebug("Marked {} heads as uninteresting", i);
}
private static boolean isValidChangeId(String idStr) {
return idStr.matches("^I[0-9a-fA-F]{40}$") && !idStr.matches("^I00*$");
}
private class ChangeLookup {
final RevCommit commit;
final Change.Key changeKey;
final List<ChangeData> destChanges;
ChangeLookup(RevCommit c, Change.Key key) throws OrmException {
commit = c;
changeKey = key;
destChanges = queryProvider.get().byBranchKey(magicBranch.dest, key);
}
ChangeLookup(RevCommit c) throws OrmException {
commit = c;
destChanges = queryProvider.get().byBranchCommit(magicBranch.dest, c.getName());
changeKey = null;
}
}
private class CreateRequest {
final RevCommit commit;
private final String refName;
Change.Id changeId;
ReceiveCommand cmd;
ChangeInserter ins;
List<String> groups = ImmutableList.of();
Change change;
CreateRequest(RevCommit commit, String refName) {
this.commit = commit;
this.refName = refName;
}
private void setChangeId(int id) {
boolean privateByDefault = projectCache.get(project.getNameKey()).isPrivateByDefault();
changeId = new Change.Id(id);
ins =
changeInserterFactory
.create(changeId, commit, refName)
.setTopic(magicBranch.topic)
.setPrivate(magicBranch.isPrivate || (privateByDefault && !magicBranch.removePrivate))
.setWorkInProgress(magicBranch.workInProgress)
// Changes already validated in validateNewCommits.
.setValidate(false);
if (magicBranch.draft) {
ins.setDraft(magicBranch.draft);
} else if (magicBranch.merged) {
ins.setStatus(Change.Status.MERGED);
}
cmd = new ReceiveCommand(ObjectId.zeroId(), commit, ins.getPatchSetId().toRefName());
if (rp.getPushCertificate() != null) {
ins.setPushCertificate(rp.getPushCertificate().toTextWithSignature());
}
}
private void addOps(BatchUpdate bu) throws RestApiException {
checkState(changeId != null, "must call setChangeId before addOps");
try {
RevWalk rw = rp.getRevWalk();
rw.parseBody(commit);
final PatchSet.Id psId = ins.setGroups(groups).getPatchSetId();
Account.Id me = user.getAccountId();
List<FooterLine> footerLines = commit.getFooterLines();
MailRecipients recipients = new MailRecipients();
Map<String, Short> approvals = new HashMap<>();
checkNotNull(magicBranch);
recipients.add(magicBranch.getMailRecipients());
approvals = magicBranch.labels;
recipients.add(
getRecipientsFromFooters(db, accountResolver, magicBranch.draft, footerLines));
recipients.remove(me);
StringBuilder msg =
new StringBuilder(
ApprovalsUtil.renderMessageWithApprovals(
psId.get(), approvals, Collections.<String, PatchSetApproval>emptyMap()));
msg.append('.');
if (!Strings.isNullOrEmpty(magicBranch.message)) {
msg.append("\n").append(magicBranch.message);
}
bu.insertChange(
ins.setReviewers(recipients.getReviewers())
.setExtraCC(recipients.getCcOnly())
.setApprovals(approvals)
.setMessage(msg.toString())
.setNotify(magicBranch.getNotify())
.setAccountsToNotify(magicBranch.getAccountsToNotify())
.setRequestScopePropagator(requestScopePropagator)
.setSendMail(true)
.setPatchSetDescription(magicBranch.message));
if (!magicBranch.hashtags.isEmpty()) {
// Any change owner is allowed to add hashtags when creating a change.
bu.addOp(
changeId,
hashtagsFactory.create(new HashtagsInput(magicBranch.hashtags)).setFireEvent(false));
}
if (!Strings.isNullOrEmpty(magicBranch.topic)) {
bu.addOp(
changeId,
new BatchUpdateOp() {
@Override
public boolean updateChange(ChangeContext ctx) {
ctx.getUpdate(psId).setTopic(magicBranch.topic);
return true;
}
});
}
bu.addOp(
changeId,
new BatchUpdateOp() {
@Override
public boolean updateChange(ChangeContext ctx) {
change = ctx.getChange();
return false;
}
});
bu.addOp(changeId, new ChangeProgressOp(newProgress));
} catch (Exception e) {
throw INSERT_EXCEPTION.apply(e);
}
}
}
private void submit(Collection<CreateRequest> create, Collection<ReplaceRequest> replace)
throws OrmException, RestApiException, UpdateException, IOException, ConfigInvalidException,
PermissionBackendException {
Map<ObjectId, Change> bySha = Maps.newHashMapWithExpectedSize(create.size() + replace.size());
for (CreateRequest r : create) {
checkNotNull(r.change, "cannot submit new change %s; op may not have run", r.changeId);
bySha.put(r.commit, r.change);
}
for (ReplaceRequest r : replace) {
bySha.put(r.newCommitId, r.notes.getChange());
}
Change tipChange = bySha.get(magicBranch.cmd.getNewId());
checkNotNull(
tipChange, "tip of push does not correspond to a change; found these changes: %s", bySha);
logDebug(
"Processing submit with tip change {} ({})", tipChange.getId(), magicBranch.cmd.getNewId());
try (MergeOp op = mergeOpProvider.get()) {
op.merge(db, tipChange, user, false, new SubmitInput(), false);
}
}
private void preparePatchSetsForReplace() {
try {
readChangesForReplace();
for (Iterator<ReplaceRequest> itr = replaceByChange.values().iterator(); itr.hasNext(); ) {
ReplaceRequest req = itr.next();
if (req.inputCommand.getResult() == NOT_ATTEMPTED) {
req.validate(false);
if (req.skip && req.cmd == null) {
itr.remove();
}
}
}
} catch (OrmException err) {
logError(
String.format(
"Cannot read database before replacement for project %s", project.getName()),
err);
for (ReplaceRequest req : replaceByChange.values()) {
if (req.inputCommand.getResult() == NOT_ATTEMPTED) {
req.inputCommand.setResult(REJECTED_OTHER_REASON, "internal server error");
}
}
} catch (IOException | PermissionBackendException err) {
logError(
String.format(
"Cannot read repository before replacement for project %s", project.getName()),
err);
for (ReplaceRequest req : replaceByChange.values()) {
if (req.inputCommand.getResult() == NOT_ATTEMPTED) {
req.inputCommand.setResult(REJECTED_OTHER_REASON, "internal server error");
}
}
}
logDebug("Read {} changes to replace", replaceByChange.size());
if (magicBranch != null && magicBranch.cmd.getResult() != NOT_ATTEMPTED) {
// Cancel creations tied to refs/for/ or refs/drafts/ command.
for (ReplaceRequest req : replaceByChange.values()) {
if (req.inputCommand == magicBranch.cmd && req.cmd != null) {
req.cmd.setResult(Result.REJECTED_OTHER_REASON, "aborted");
}
}
for (CreateRequest req : newChanges) {
req.cmd.setResult(Result.REJECTED_OTHER_REASON, "aborted");
}
}
}
private void readChangesForReplace() throws OrmException {
Collection<ChangeNotes> allNotes =
notesFactory.create(
db, replaceByChange.values().stream().map(r -> r.ontoChange).collect(toList()));
for (ChangeNotes notes : allNotes) {
replaceByChange.get(notes.getChangeId()).notes = notes;
}
}
private class ReplaceRequest {
final Change.Id ontoChange;
final ObjectId newCommitId;
final ReceiveCommand inputCommand;
final boolean checkMergedInto;
ChangeNotes notes;
BiMap<RevCommit, PatchSet.Id> revisions;
PatchSet.Id psId;
ReceiveCommand prev;
ReceiveCommand cmd;
PatchSetInfo info;
boolean skip;
private PatchSet.Id priorPatchSet;
List<String> groups = ImmutableList.of();
private ReplaceOp replaceOp;
ReplaceRequest(
Change.Id toChange, RevCommit newCommit, ReceiveCommand cmd, boolean checkMergedInto) {
this.ontoChange = toChange;
this.newCommitId = newCommit.copy();
this.inputCommand = checkNotNull(cmd);
this.checkMergedInto = checkMergedInto;
revisions = HashBiMap.create();
for (Ref ref : refs(toChange)) {
try {
revisions.forcePut(
rp.getRevWalk().parseCommit(ref.getObjectId()), PatchSet.Id.fromRef(ref.getName()));
} catch (IOException err) {
logWarn(
String.format(
"Project %s contains invalid change ref %s", project.getName(), ref.getName()),
err);
}
}
}
/**
* Validate the new patch set commit for this change.
*
* <p><strong>Side effects:</strong>
*
* <ul>
* <li>May add error or warning messages to the progress monitor
* <li>Will reject {@code cmd} prior to returning false
* <li>May reset {@code rp.getRevWalk()}; do not call in the middle of a walk.
* </ul>
*
* @param autoClose whether the caller intends to auto-close the change after adding a new patch
* set.
* @return whether the new commit is valid
* @throws IOException
* @throws OrmException
* @throws PermissionBackendException
*/
boolean validate(boolean autoClose)
throws IOException, OrmException, PermissionBackendException {
if (!autoClose && inputCommand.getResult() != NOT_ATTEMPTED) {
return false;
} else if (notes == null) {
reject(inputCommand, "change " + ontoChange + " not found");
return false;
}
Change change = notes.getChange();
priorPatchSet = change.currentPatchSetId();
if (!revisions.containsValue(priorPatchSet)) {
reject(inputCommand, "change " + ontoChange + " missing revisions");
return false;
}
RevCommit newCommit = rp.getRevWalk().parseCommit(newCommitId);
RevCommit priorCommit = revisions.inverse().get(priorPatchSet);
try {
permissions.change(notes).database(db).check(ChangePermission.ADD_PATCH_SET);
} catch (AuthException no) {
reject(inputCommand, "cannot add patch set to " + ontoChange + ".");
return false;
}
if (change.getStatus().isClosed()) {
reject(inputCommand, "change " + ontoChange + " closed");
return false;
} else if (revisions.containsKey(newCommit)) {
reject(inputCommand, "commit already exists (in the change)");
return false;
}
for (Ref r : rp.getRepository().getRefDatabase().getRefs("refs/changes").values()) {
if (r.getObjectId().equals(newCommit)) {
reject(inputCommand, "commit already exists (in the project)");
return false;
}
}
for (RevCommit prior : revisions.keySet()) {
// Don't allow a change to directly depend upon itself. This is a
// very common error due to users making a new commit rather than
// amending when trying to address review comments.
if (rp.getRevWalk().isMergedInto(prior, newCommit)) {
reject(inputCommand, SAME_CHANGE_ID_IN_MULTIPLE_CHANGES);
return false;
}
}
PermissionBackend.ForRef perm = permissions.ref(change.getDest().get());
if (!validCommit(rp.getRevWalk(), perm, change.getDest(), inputCommand, newCommit)) {
return false;
}
rp.getRevWalk().parseBody(priorCommit);
// Don't allow the same tree if the commit message is unmodified
// or no parents were updated (rebase), else warn that only part
// of the commit was modified.
if (newCommit.getTree().equals(priorCommit.getTree())) {
boolean messageEq = eq(newCommit.getFullMessage(), priorCommit.getFullMessage());
boolean parentsEq = parentsEqual(newCommit, priorCommit);
boolean authorEq = authorEqual(newCommit, priorCommit);
ObjectReader reader = rp.getRevWalk().getObjectReader();
if (messageEq && parentsEq && authorEq && !autoClose) {
addMessage(
String.format(
"(W) No changes between prior commit %s and new commit %s",
reader.abbreviate(priorCommit).name(), reader.abbreviate(newCommit).name()));
} else {
StringBuilder msg = new StringBuilder();
msg.append("(I) ");
msg.append(reader.abbreviate(newCommit).name());
msg.append(":");
msg.append(" no files changed");
if (!authorEq) {
msg.append(", author changed");
}
if (!messageEq) {
msg.append(", message updated");
}
if (!parentsEq) {
msg.append(", was rebased");
}
addMessage(msg.toString());
}
}
if (magicBranch != null
&& (magicBranch.workInProgress || magicBranch.ready)
&& magicBranch.workInProgress != change.isWorkInProgress()
&& !user.getAccountId().equals(change.getOwner())) {
reject(inputCommand, ONLY_OWNER_CAN_MODIFY_WIP);
return false;
}
if (magicBranch != null && magicBranch.edit) {
return newEdit();
}
newPatchSet();
return true;
}
private boolean newEdit() {
psId = notes.getChange().currentPatchSetId();
Optional<ChangeEdit> edit = null;
try {
edit = editUtil.byChange(notes, user);
} catch (AuthException | IOException e) {
logError("Cannot retrieve edit", e);
return false;
}
if (edit.isPresent()) {
if (edit.get().getBasePatchSet().getId().equals(psId)) {
// replace edit
cmd =
new ReceiveCommand(edit.get().getEditCommit(), newCommitId, edit.get().getRefName());
} else {
// delete old edit ref on rebase
prev =
new ReceiveCommand(
edit.get().getEditCommit(), ObjectId.zeroId(), edit.get().getRefName());
createEditCommand();
}
} else {
createEditCommand();
}
return true;
}
private void createEditCommand() {
// create new edit
cmd =
new ReceiveCommand(
ObjectId.zeroId(),
newCommitId,
RefNames.refsEdit(user.getAccountId(), notes.getChangeId(), psId));
}
private void newPatchSet() throws IOException, OrmException {
RevCommit newCommit = rp.getRevWalk().parseCommit(newCommitId);
psId =
ChangeUtil.nextPatchSetIdFromAllRefsMap(allRefs(), notes.getChange().currentPatchSetId());
info = patchSetInfoFactory.get(rp.getRevWalk(), newCommit, psId);
cmd = new ReceiveCommand(ObjectId.zeroId(), newCommitId, psId.toRefName());
}
void addOps(BatchUpdate bu, @Nullable Task progress) throws IOException {
if (magicBranch != null && magicBranch.edit) {
bu.addOp(notes.getChangeId(), new ReindexOnlyOp());
if (prev != null) {
bu.addRepoOnlyOp(new UpdateOneRefOp(prev));
}
bu.addRepoOnlyOp(new UpdateOneRefOp(cmd));
return;
}
RevWalk rw = rp.getRevWalk();
// TODO(dborowitz): Move to ReplaceOp#updateRepo.
RevCommit newCommit = rw.parseCommit(newCommitId);
rw.parseBody(newCommit);
RevCommit priorCommit = revisions.inverse().get(priorPatchSet);
replaceOp =
replaceOpFactory
.create(
projectControl,
notes.getChange().getDest(),
checkMergedInto,
priorPatchSet,
priorCommit,
psId,
newCommit,
info,
groups,
magicBranch,
rp.getPushCertificate())
.setRequestScopePropagator(requestScopePropagator);
bu.addOp(notes.getChangeId(), replaceOp);
if (progress != null) {
bu.addOp(notes.getChangeId(), new ChangeProgressOp(progress));
}
}
String getRejectMessage() {
return replaceOp != null ? replaceOp.getRejectMessage() : null;
}
}
private class UpdateGroupsRequest {
private final PatchSet.Id psId;
private final RevCommit commit;
List<String> groups = ImmutableList.of();
UpdateGroupsRequest(Ref ref, RevCommit commit) {
this.psId = checkNotNull(PatchSet.Id.fromRef(ref.getName()));
this.commit = commit;
}
private void addOps(BatchUpdate bu) {
bu.addOp(
psId.getParentKey(),
new BatchUpdateOp() {
@Override
public boolean updateChange(ChangeContext ctx) throws OrmException {
PatchSet ps = psUtil.get(ctx.getDb(), ctx.getNotes(), psId);
List<String> oldGroups = ps.getGroups();
if (oldGroups == null) {
if (groups == null) {
return false;
}
} else if (sameGroups(oldGroups, groups)) {
return false;
}
psUtil.setGroups(ctx.getDb(), ctx.getUpdate(psId), ps, groups);
return true;
}
});
}
private boolean sameGroups(List<String> a, List<String> b) {
return Sets.newHashSet(a).equals(Sets.newHashSet(b));
}
}
private class UpdateOneRefOp implements RepoOnlyOp {
private final ReceiveCommand cmd;
private UpdateOneRefOp(ReceiveCommand cmd) {
this.cmd = checkNotNull(cmd);
}
@Override
public void updateRepo(RepoContext ctx) throws IOException {
ctx.addRefUpdate(cmd);
}
@Override
public void postUpdate(Context ctx) {
String refName = cmd.getRefName();
if (cmd.getType() == ReceiveCommand.Type.UPDATE) { // aka fast-forward
logDebug("Updating tag cache on fast-forward of {}", cmd.getRefName());
tagCache.updateFastForward(project.getNameKey(), refName, cmd.getOldId(), cmd.getNewId());
}
if (isConfig(cmd)) {
logDebug("Reloading project in cache");
projectCache.evict(project);
ProjectState ps = projectCache.get(project.getNameKey());
try {
logDebug("Updating project description");
repo.setGitwebDescription(ps.getProject().getDescription());
} catch (IOException e) {
log.warn("cannot update description of " + project.getName(), e);
}
}
}
}
private static class ReindexOnlyOp implements BatchUpdateOp {
@Override
public boolean updateChange(ChangeContext ctx) {
// Trigger reindexing even though change isn't actually updated.
return true;
}
}
private List<Ref> refs(Change.Id changeId) {
return refsByChange().get(changeId);
}
private void initChangeRefMaps() {
if (refsByChange == null) {
int estRefsPerChange = 4;
refsById = MultimapBuilder.hashKeys().arrayListValues().build();
refsByChange =
MultimapBuilder.hashKeys(allRefs().size() / estRefsPerChange)
.arrayListValues(estRefsPerChange)
.build();
for (Ref ref : allRefs().values()) {
ObjectId obj = ref.getObjectId();
if (obj != null) {
PatchSet.Id psId = PatchSet.Id.fromRef(ref.getName());
if (psId != null) {
refsById.put(obj, ref);
refsByChange.put(psId.getParentKey(), ref);
}
}
}
}
}
private ListMultimap<Change.Id, Ref> refsByChange() {
initChangeRefMaps();
return refsByChange;
}
private ListMultimap<ObjectId, Ref> changeRefsById() {
initChangeRefMaps();
return refsById;
}
static boolean parentsEqual(RevCommit a, RevCommit b) {
if (a.getParentCount() != b.getParentCount()) {
return false;
}
for (int i = 0; i < a.getParentCount(); i++) {
if (!a.getParent(i).equals(b.getParent(i))) {
return false;
}
}
return true;
}
static boolean authorEqual(RevCommit a, RevCommit b) {
PersonIdent aAuthor = a.getAuthorIdent();
PersonIdent bAuthor = b.getAuthorIdent();
if (aAuthor == null && bAuthor == null) {
return true;
} else if (aAuthor == null || bAuthor == null) {
return false;
}
return eq(aAuthor.getName(), bAuthor.getName())
&& eq(aAuthor.getEmailAddress(), bAuthor.getEmailAddress());
}
static boolean eq(String a, String b) {
if (a == null && b == null) {
return true;
} else if (a == null || b == null) {
return false;
} else {
return a.equals(b);
}
}
private boolean validRefOperation(ReceiveCommand cmd) {
RefOperationValidators refValidators = refValidatorsFactory.create(getProject(), user, cmd);
try {
messages.addAll(refValidators.validateForRefOperation());
} catch (RefOperationValidationException e) {
messages.addAll(Lists.newArrayList(e.getMessages()));
reject(cmd, e.getMessage());
return false;
}
return true;
}
private void validateNewCommits(Branch.NameKey branch, ReceiveCommand cmd)
throws PermissionBackendException {
PermissionBackend.ForRef perm = permissions.ref(branch.get());
if (!RefNames.REFS_CONFIG.equals(cmd.getRefName())
&& !(MagicBranch.isMagicBranch(cmd.getRefName())
|| NEW_PATCHSET_PATTERN.matcher(cmd.getRefName()).matches())
&& pushOptions.containsKey(BYPASS_REVIEW)) {
try {
perm.check(RefPermission.BYPASS_REVIEW);
if (!Iterables.isEmpty(rejectCommits)) {
throw new AuthException("reject-commits prevents " + BYPASS_REVIEW);
}
logDebug("Short-circuiting new commit validation");
} catch (AuthException denied) {
reject(cmd, denied.getMessage());
}
return;
}
boolean defaultName = Strings.isNullOrEmpty(user.getAccount().getFullName());
RevWalk walk = rp.getRevWalk();
walk.reset();
walk.sort(RevSort.NONE);
try {
RevObject parsedObject = walk.parseAny(cmd.getNewId());
if (!(parsedObject instanceof RevCommit)) {
return;
}
ListMultimap<ObjectId, Ref> existing = changeRefsById();
walk.markStart((RevCommit) parsedObject);
markHeadsAsUninteresting(walk, cmd.getRefName());
int i = 0;
for (RevCommit c; (c = walk.next()) != null; ) {
i++;
if (existing.keySet().contains(c)) {
continue;
} else if (!validCommit(walk, perm, branch, cmd, c)) {
break;
}
if (defaultName && user.hasEmailAddress(c.getCommitterIdent().getEmailAddress())) {
try {
String committerName = c.getCommitterIdent().getName();
Account account =
accountsUpdate
.create()
.update(
user.getAccountId(),
a -> {
if (Strings.isNullOrEmpty(a.getFullName())) {
a.setFullName(committerName);
}
});
if (account != null && Strings.isNullOrEmpty(account.getFullName())) {
user.getAccount().setFullName(account.getFullName());
}
} catch (IOException | ConfigInvalidException e) {
logWarn("Cannot default full_name", e);
} finally {
defaultName = false;
}
}
}
logDebug("Validated {} new commits", i);
} catch (IOException err) {
cmd.setResult(REJECTED_MISSING_OBJECT);
logError("Invalid pack upload; one or more objects weren't sent", err);
}
}
private boolean validCommit(
RevWalk rw,
PermissionBackend.ForRef perm,
Branch.NameKey branch,
ReceiveCommand cmd,
ObjectId id)
throws IOException {
if (validCommits.contains(id)) {
return true;
}
RevCommit c = rw.parseCommit(id);
rw.parseBody(c);
try (CommitReceivedEvent receiveEvent =
new CommitReceivedEvent(cmd, project, branch.get(), rw.getObjectReader(), c, user)) {
boolean isMerged =
magicBranch != null
&& cmd.getRefName().equals(magicBranch.cmd.getRefName())
&& magicBranch.merged;
CommitValidators validators =
isMerged
? commitValidatorsFactory.forMergedCommits(perm, user.asIdentifiedUser())
: commitValidatorsFactory.forReceiveCommits(
perm, branch, user.asIdentifiedUser(), sshInfo, repo, rw);
messages.addAll(validators.validate(receiveEvent));
} catch (CommitValidationException e) {
logDebug("Commit validation failed on {}", c.name());
messages.addAll(e.getMessages());
reject(cmd, e.getMessage());
return false;
}
validCommits.add(c.copy());
return true;
}
private void autoCloseChanges(ReceiveCommand cmd) {
logDebug("Starting auto-closing of changes");
String refName = cmd.getRefName();
checkState(
!MagicBranch.isMagicBranch(refName),
"shouldn't be auto-closing changes on magic branch %s",
refName);
// TODO(dborowitz): Combine this BatchUpdate with the main one in
// insertChangesAndPatchSets.
try (BatchUpdate bu =
batchUpdateFactory.create(
db, projectControl.getProject().getNameKey(), user, TimeUtil.nowTs());
ObjectInserter ins = repo.newObjectInserter();
ObjectReader reader = ins.newReader();
RevWalk rw = new RevWalk(reader)) {
bu.setRepository(repo, rw, ins).updateChangesInParallel();
bu.setRequestId(receiveId);
// TODO(dborowitz): Teach BatchUpdate to ignore missing changes.
RevCommit newTip = rw.parseCommit(cmd.getNewId());
Branch.NameKey branch = new Branch.NameKey(project.getNameKey(), refName);
rw.reset();
rw.markStart(newTip);
if (!ObjectId.zeroId().equals(cmd.getOldId())) {
rw.markUninteresting(rw.parseCommit(cmd.getOldId()));
}
ListMultimap<ObjectId, Ref> byCommit = changeRefsById();
Map<Change.Key, ChangeNotes> byKey = null;
List<ReplaceRequest> replaceAndClose = new ArrayList<>();
int existingPatchSets = 0;
int newPatchSets = 0;
COMMIT:
for (RevCommit c; (c = rw.next()) != null; ) {
rw.parseBody(c);
for (Ref ref : byCommit.get(c.copy())) {
existingPatchSets++;
PatchSet.Id psId = PatchSet.Id.fromRef(ref.getName());
bu.addOp(
psId.getParentKey(),
mergedByPushOpFactory.create(requestScopePropagator, psId, refName));
continue COMMIT;
}
for (String changeId : c.getFooterLines(CHANGE_ID)) {
if (byKey == null) {
byKey = openChangesByBranch(branch);
}
ChangeNotes onto = byKey.get(new Change.Key(changeId.trim()));
if (onto != null) {
newPatchSets++;
// Hold onto this until we're done with the walk, as the call to
// req.validate below calls isMergedInto which resets the walk.
ReplaceRequest req = new ReplaceRequest(onto.getChangeId(), c, cmd, false);
req.notes = onto;
replaceAndClose.add(req);
continue COMMIT;
}
}
}
for (ReplaceRequest req : replaceAndClose) {
Change.Id id = req.notes.getChangeId();
if (!req.validate(true)) {
logDebug("Not closing {} because validation failed", id);
continue;
}
req.addOps(bu, null);
bu.addOp(
id,
mergedByPushOpFactory
.create(requestScopePropagator, req.psId, refName)
.setPatchSetProvider(
new Provider<PatchSet>() {
@Override
public PatchSet get() {
return req.replaceOp.getPatchSet();
}
}));
bu.addOp(id, new ChangeProgressOp(closeProgress));
}
logDebug(
"Auto-closing {} changes with existing patch sets and {} with new patch sets",
existingPatchSets,
newPatchSets);
bu.execute();
} catch (RestApiException e) {
logError("Can't insert patchset", e);
} catch (IOException | OrmException | UpdateException | PermissionBackendException e) {
logError("Can't scan for changes to close", e);
}
}
private Map<Change.Key, ChangeNotes> openChangesByBranch(Branch.NameKey branch)
throws OrmException {
Map<Change.Key, ChangeNotes> r = new HashMap<>();
for (ChangeData cd : queryProvider.get().byBranchOpen(branch)) {
r.put(cd.change().getKey(), cd.notes());
}
return r;
}
private Map<String, Ref> allRefs() {
return allRefsWatcher.getAllRefs();
}
private void reject(@Nullable ReceiveCommand cmd, String why) {
if (cmd != null) {
cmd.setResult(REJECTED_OTHER_REASON, why);
commandProgress.update(1);
}
}
private static boolean isHead(ReceiveCommand cmd) {
return cmd.getRefName().startsWith(Constants.R_HEADS);
}
private static boolean isConfig(ReceiveCommand cmd) {
return cmd.getRefName().equals(RefNames.REFS_CONFIG);
}
private void logDebug(String msg, Object... args) {
if (log.isDebugEnabled()) {
log.debug(receiveId + msg, args);
}
}
private void logWarn(String msg, Throwable t) {
if (log.isWarnEnabled()) {
if (t != null) {
log.warn(receiveId + msg, t);
} else {
log.warn(receiveId + msg);
}
}
}
private void logWarn(String msg) {
logWarn(msg, null);
}
private void logError(String msg, Throwable t) {
if (log.isErrorEnabled()) {
if (t != null) {
log.error(receiveId + msg, t);
} else {
log.error(receiveId + msg);
}
}
}
private void logError(String msg) {
logError(msg, null);
}
}
| Rename ReceiveCommits.Error since it clashes with java.lang.Error
ErrorProne reports this as an issue.
Change-Id: Ifaea16fe3f8b60232be3376a6c851be687aac488
Signed-off-by: Edwin Kempin <[email protected]>
| gerrit-server/src/main/java/com/google/gerrit/server/git/receive/ReceiveCommits.java | Rename ReceiveCommits.Error since it clashes with java.lang.Error |
|
Java | apache-2.0 | 81e3d562cfb0de4ab0028be26473ea0620e4759a | 0 | ptrd/jmeter-plugins,ptrd/jmeter-plugins,Sausageo/jmeter-plugins,ptrd/jmeter-plugins,ptrd/jmeter-plugins,Sausageo/jmeter-plugins,Sausageo/jmeter-plugins,Sausageo/jmeter-plugins,ptrd/jmeter-plugins,Sausageo/jmeter-plugins | package kg.apc.jmeter.charting;
import java.awt.AlphaComposite;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Composite;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.GradientPaint;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.Stroke;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.ClipboardOwner;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import java.util.AbstractMap;
import java.util.Iterator;
import java.util.Map.Entry;
import javax.swing.BorderFactory;
import javax.swing.JComponent;
import javax.swing.JMenuItem;
import javax.swing.JPopupMenu;
import javax.swing.border.BevelBorder;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.gui.NumberRenderer;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
/**
*
* @author apc
*/
public class GraphPanelChart
extends JComponent
implements ClipboardOwner
{
private static final String AD_TEXT = "http://apc.kg";
private static final String NO_SAMPLES = "Waiting for samples...";
private static final int spacing = 5;
/*
* Special type of graph were minY is forced to 0 and maxY is forced to 100
* to display percentage charts (eg cpu monitoring)
*/
public static final int CHART_PERCENTAGE = 0;
public static final int CHART_DEFAULT = -1;
private static final Logger log = LoggingManager.getLoggerForClass();
private Rectangle legendRect;
private Rectangle xAxisRect;
private Rectangle yAxisRect;
private Rectangle chartRect;
private static final Rectangle zeroRect = new Rectangle();
private AbstractMap<String, AbstractGraphRow> rows;
private double maxYVal;
private double minYVal;
private long maxXVal;
private long minXVal;
private long currentXVal;
private static final int gridLinesCount = 10;
private NumberRenderer yAxisLabelRenderer;
private NumberRenderer xAxisLabelRenderer;
private boolean drawStartFinalZeroingLines = false;
private boolean drawCurrentX = false;
private int forcedMinX = -1;
private int chartType = CHART_DEFAULT;
// The stroke used to paint Graph's dashed lines
private Stroke dashStroke = new BasicStroke(
1.0f, // Width
BasicStroke.CAP_SQUARE, // End cap
BasicStroke.JOIN_MITER, // Join style
10.0f, // Miter limit
new float[] {1.0f,4.0f}, // Dash pattern
0.0f); // Dash phase
// The stroke to paint thick Graph rows
private Stroke thickStroke = new BasicStroke(
AbstractGraphRow.LINE_THICKNESS_BIG,
BasicStroke.CAP_BUTT,
BasicStroke.JOIN_BEVEL);
// Message display in graphs. Used for perfmon error messages
private String errorMessage = null;
// Chart's gradient background end color
private Color gradientColor = new Color(229,236,246);
// Chart's Axis Color. For good results, use gradient color - (30, 30, 30)
private Color axisColor = new Color(199,206,216);
// Draw options - these are default values if no property is entered in user.properties
// List of possible properties (TODO: The explaination must be written in readme file)
// jmeterPlugin.drawGradient=(true/false)
// jmeterPlugin.neverDrawFinalZeroingLines=(true/false)
// jmeterPlugin.optimizeYAxis=(true/false)
// note to Andrey: Feel free to decide the default value!
private static boolean drawGradient = true;
private static boolean neverDrawFinalZeroingLines = false;
private static boolean optimizeYAxis = true;
// If user entered configuration items in user.properties, overide default values.
static {
String cfgDrawGradient = JMeterUtils.getProperty("jmeterPlugin.drawGradient");
if(cfgDrawGradient != null) {
GraphPanelChart.drawGradient = "true".equalsIgnoreCase(cfgDrawGradient);
}
String cfgNeverDrawFinalZeroingLines = JMeterUtils.getProperty("jmeterPlugin.neverDrawFinalZeroingLines");
if(cfgNeverDrawFinalZeroingLines != null) {
GraphPanelChart.neverDrawFinalZeroingLines = "true".equalsIgnoreCase(cfgNeverDrawFinalZeroingLines);
}
String cfgOptimizeYAxis = JMeterUtils.getProperty("jmeterPlugin.optimizeYAxis");
if(cfgOptimizeYAxis != null) {
GraphPanelChart.optimizeYAxis = "true".equalsIgnoreCase(cfgOptimizeYAxis);
}
}
/**
* Creates new chart object with default parameters
*/
public GraphPanelChart()
{
setBackground(Color.white);
setBorder(BorderFactory.createBevelBorder(BevelBorder.LOWERED, Color.lightGray, Color.darkGray));
yAxisLabelRenderer = new NumberRenderer("#.#");
xAxisLabelRenderer = new NumberRenderer("#.#");
legendRect = new Rectangle();
yAxisRect = new Rectangle();
xAxisRect = new Rectangle();
chartRect = new Rectangle();
setDefaultDimensions();
registerPopup();
}
public void setChartType(int type) {
chartType = type;
}
private void getMinMaxDataValues()
{
maxXVal = 0L;
maxYVal = 0L;
minXVal = Long.MAX_VALUE;
minYVal = Double.MAX_VALUE;
Iterator<Entry<String, AbstractGraphRow>> it = rows.entrySet().iterator();
Entry<String, AbstractGraphRow> row = null;
AbstractGraphRow rowValue;
while (it.hasNext())
{
row = it.next();
rowValue = row.getValue();
if (!rowValue.isDrawOnChart())
{
continue;
}
if (rowValue.getMaxY() > maxYVal)
{
maxYVal = rowValue.getMaxY();
}
if (rowValue.getMaxX() > maxXVal)
{
maxXVal = rowValue.getMaxX();
}
if (rowValue.getMinX() < minXVal)
{
minXVal = rowValue.getMinX();
}
if(rowValue.getMinY() < minYVal)
{
//we draw only positives values
minYVal = rowValue.getMinY() >= 0 ? rowValue.getMinY() : 0;
}
}
//maxYVal *= 1 + (double) 1 / (double) gridLinesCount;
if (forcedMinX >= 0L)
{
minXVal = forcedMinX;
}
//prevent X axis not initialized in case of no row displayed
//we use last known row
if(minXVal == Long.MAX_VALUE && maxXVal == 0L && row != null) {
maxXVal = row.getValue().getMaxX();
minXVal = row.getValue().getMinX();
minYVal = 0;
maxYVal = 10;
} else if(optimizeYAxis)
{
computeChartSteps();
} else
{
minYVal = 0;
}
}
/**
* compute minY and step value to have better readable charts
*/
private void computeChartSteps() {
//if special type
if(chartType == GraphPanelChart.CHART_PERCENTAGE) {
minYVal = 0;
maxYVal = 100;
return;
}
//try to find the best range...
//first, avoid special cases where maxY equal or close to minY
if (maxYVal - minYVal < 0.1)
{
maxYVal = minYVal + 1;
}
//real step
double step = (maxYVal - minYVal) / gridLinesCount;
int pow = -1;
double factor = -1;
boolean found = false;
double testStep;
double testFactor;
//find a step close to the real one
while(!found)
{
pow++;
//for small range (<10), don't use .5 factor.
//we try to find integer steps as it is more easy to read
if(pow > 0)
{
testFactor = 0.5;
} else
{
testFactor = 1;
}
for (double f = 0; f <= 5; f = f + testFactor) {
testStep = Math.pow(10, pow) * f;
if(testStep >= step) {
factor = f;
found = true;
break;
}
}
}
//first proposal
double foundStep = Math.pow(10, pow) * factor;
//we shit to the closest lower minval to align with the step
minYVal = minYVal - minYVal % foundStep;
//check if step is still good with minY trimed. If not, use next factor.
if(minYVal + foundStep * gridLinesCount < maxYVal)
{
foundStep = Math.pow(10, pow) * (factor + (pow > 0 ? 0.5:1));
}
//last visual optimization: find the optimal minYVal
double trim = 10;
while((minYVal-minYVal%trim) + foundStep * gridLinesCount >= maxYVal && minYVal > 0)
{
minYVal = minYVal-minYVal%trim;
trim = trim*10;
}
//final calculation
maxYVal = minYVal + foundStep * gridLinesCount;
}
private void setDefaultDimensions()
{
chartRect.setBounds(spacing, spacing, getWidth() - spacing * 2, getHeight() - spacing * 2);
legendRect.setBounds(zeroRect);
xAxisRect.setBounds(zeroRect);
yAxisRect.setBounds(zeroRect);
}
private void calculateYAxisDimensions(FontMetrics fm)
{
// TODO: middle value labels often wider than max
yAxisLabelRenderer.setValue(maxYVal);
int axisWidth = fm.stringWidth(yAxisLabelRenderer.getText()) + spacing * 3;
yAxisRect.setBounds(chartRect.x, chartRect.y, axisWidth, chartRect.height);
chartRect.setBounds(chartRect.x + axisWidth, chartRect.y, chartRect.width - axisWidth, chartRect.height);
}
private void calculateXAxisDimensions(FontMetrics fm)
{
// FIXME: first value on X axis may take negative X coord,
// we need to handle this and make Y axis wider
int axisHeight = fm.getHeight() + spacing;
xAxisLabelRenderer.setValue(maxXVal);
int axisEndSpace = fm.stringWidth(xAxisLabelRenderer.getText()) / 2;
xAxisRect.setBounds(chartRect.x, chartRect.y + chartRect.height - axisHeight, chartRect.width, axisHeight);
chartRect.setBounds(chartRect.x, chartRect.y, chartRect.width - axisEndSpace, chartRect.height - axisHeight);
yAxisRect.setBounds(yAxisRect.x, yAxisRect.y, yAxisRect.width, chartRect.height);
}
@Override
public void paintComponent(Graphics g)
{
super.paintComponent(g);
BufferedImage image = new BufferedImage(this.getWidth(), this.getHeight(), BufferedImage.TYPE_INT_ARGB);
Graphics2D g2d = image.createGraphics();
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
drawPanel(g2d);
g.drawImage(image, 0, 0, this);
}
private void drawPanel(Graphics2D g)
{
g.setColor(Color.white);
if(GraphPanelChart.drawGradient) {
GradientPaint gdp = new GradientPaint(0,0, Color.white, 0, getHeight(), gradientColor);
g.setPaint(gdp);
}
g.fillRect(0, 0, getWidth(), getHeight());
paintAd(g);
if (errorMessage != null) {
g.setColor(Color.RED);
g.drawString(errorMessage,
getWidth() / 2 - g.getFontMetrics(g.getFont()).stringWidth(errorMessage) / 2,
getHeight() / 2);
return;
}
if (rows.isEmpty())
{
g.setColor(Color.BLACK);
g.drawString(NO_SAMPLES,
getWidth() / 2 - g.getFontMetrics(g.getFont()).stringWidth(NO_SAMPLES) / 2,
getHeight() / 2);
return;
}
setDefaultDimensions();
getMinMaxDataValues();
try
{
paintLegend(g);
calculateYAxisDimensions(g.getFontMetrics(g.getFont()));
calculateXAxisDimensions(g.getFontMetrics(g.getFont()));
paintYAxis(g);
paintXAxis(g);
paintChart(g);
}
catch (Exception e)
{
log.error("Error in paintComponent", e);
}
}
private void paintLegend(Graphics g)
{
FontMetrics fm = g.getFontMetrics(g.getFont());
int rectH = fm.getHeight();
int rectW = rectH;
Iterator<Entry<String, AbstractGraphRow>> it = rows.entrySet().iterator();
Entry<String, AbstractGraphRow> row;
int currentX = chartRect.x;
int currentY = chartRect.y;
int legendHeight = it.hasNext() ? rectH + spacing : 0;
while (it.hasNext())
{
row = it.next();
if (!row.getValue().isShowInLegend() || !row.getValue().isDrawOnChart())
{
continue;
}
// wrap row if overflowed
if (currentX + rectW + spacing / 2 + fm.stringWidth(row.getKey()) > getWidth())
{
currentY += rectH + spacing / 2;
legendHeight += rectH + spacing / 2;
currentX = chartRect.x;
}
// draw legend color box
g.setColor(row.getValue().getColor());
g.fillRect(currentX, currentY, rectW, rectH);
g.setColor(Color.black);
g.drawRect(currentX, currentY, rectW, rectH);
// draw legend item label
currentX += rectW + spacing / 2;
g.drawString(row.getKey(), currentX, (int) (currentY + rectH * 0.9));
currentX += fm.stringWidth(row.getKey()) + spacing;
}
legendRect.setBounds(chartRect.x, chartRect.y, chartRect.width, legendHeight);
chartRect.setBounds(chartRect.x, chartRect.y + legendHeight + spacing, chartRect.width, chartRect.height - legendHeight - spacing);
}
private void paintYAxis(Graphics g)
{
FontMetrics fm = g.getFontMetrics(g.getFont());
String valueLabel;
int labelXPos;
int gridLineY;
// shift 2nd and more lines
int shift = 0;
// for strokes swapping
Stroke oldStroke = ((Graphics2D) g).getStroke();
//draw markers
g.setColor(axisColor);
for (int n = 0; n <= gridLinesCount; n++)
{
gridLineY = chartRect.y + (int) ((gridLinesCount - n) * (double) chartRect.height / gridLinesCount);
g.drawLine(chartRect.x - 3, gridLineY, chartRect.x + 3, gridLineY);
}
for (int n = 0; n <= gridLinesCount; n++)
{
//draw 2nd and more axis dashed and shifted
if(n!=0) {
((Graphics2D) g).setStroke(dashStroke);
shift = 7;
}
gridLineY = chartRect.y + (int) ((gridLinesCount - n) * (double) chartRect.height / gridLinesCount);
// draw grid line with tick
g.setColor(axisColor);
g.drawLine(chartRect.x + shift, gridLineY, chartRect.x + chartRect.width, gridLineY);
g.setColor(Color.black);
// draw label
yAxisLabelRenderer.setValue((minYVal * gridLinesCount + n * (maxYVal-minYVal)) / gridLinesCount);
valueLabel = yAxisLabelRenderer.getText();
labelXPos = yAxisRect.x + yAxisRect.width - fm.stringWidth(valueLabel) - spacing - spacing / 2;
g.drawString(valueLabel, labelXPos, gridLineY + fm.getAscent() / 2);
}
//restore stroke
((Graphics2D) g).setStroke(oldStroke);
}
private void paintXAxis(Graphics g)
{
FontMetrics fm = g.getFontMetrics(g.getFont());
String valueLabel;
int labelXPos;
int gridLineX;
// shift 2nd and more lines
int shift = 0;
// for strokes swapping
Stroke oldStroke = ((Graphics2D) g).getStroke();
g.setColor(axisColor);
//draw markers
for (int n = 0; n <= gridLinesCount; n++)
{
gridLineX = chartRect.x + (int) (n * ((double) chartRect.width / gridLinesCount));
g.drawLine(gridLineX, chartRect.y + chartRect.height - 3, gridLineX, chartRect.y + chartRect.height + 3);
}
for (int n = 0; n <= gridLinesCount; n++)
{
//draw 2nd and more axis dashed and shifted
if(n!=0) {
((Graphics2D) g).setStroke(dashStroke);
shift = 7;
}
gridLineX = chartRect.x + (int) (n * ((double) chartRect.width / gridLinesCount));
// draw grid line with tick
g.setColor(axisColor);
g.drawLine(gridLineX, chartRect.y + chartRect.height - shift, gridLineX, chartRect.y);
g.setColor(Color.black);
// draw label
xAxisLabelRenderer.setValue(minXVal + n * (double)(maxXVal - minXVal) / gridLinesCount);
valueLabel = xAxisLabelRenderer.getText();
labelXPos = gridLineX - fm.stringWidth(valueLabel) / 2;
g.drawString(valueLabel, labelXPos, xAxisRect.y + fm.getAscent() + spacing);
}
//restore stroke
((Graphics2D) g).setStroke(oldStroke);
if (drawCurrentX)
{
gridLineX = chartRect.x + (int) ((currentXVal - minXVal) * (double) chartRect.width / (maxXVal - minXVal));
g.setColor(Color.GRAY);
g.drawLine(gridLineX, chartRect.y, gridLineX, chartRect.y + chartRect.height);
g.setColor(Color.black);
}
}
private void paintChart(Graphics g)
{
g.setColor(Color.yellow);
Iterator<Entry<String, AbstractGraphRow>> it = rows.entrySet().iterator();
while (it.hasNext())
{
Entry<String, AbstractGraphRow> row = it.next();
if (row.getValue().isDrawOnChart())
{
paintRow(g, row.getValue());
}
}
}
private void paintRow(Graphics g, AbstractGraphRow row)
{
FontMetrics fm = g.getFontMetrics(g.getFont());
Iterator<Entry<Long, AbstractGraphPanelChartElement>> it = row.iterator();
Entry<Long, AbstractGraphPanelChartElement> element;
int radius = row.getMarkerSize();
int x, y;
int prevX = drawStartFinalZeroingLines ? chartRect.x : -1;
int prevY = chartRect.y + chartRect.height;
final double dxForDVal = (maxXVal <= minXVal) ? 0 : (double) chartRect.width / (maxXVal - minXVal);
final double dyForDVal = (maxYVal <= minYVal) ? 0 : (double) chartRect.height / (maxYVal - minYVal);
Stroke oldStroke = null;
if(row.isDrawThickLines()) {
oldStroke = ((Graphics2D) g).getStroke();
}
while (it.hasNext())
{
element = it.next();
if (!row.isDrawOnChart())
{
continue;
}
x = chartRect.x + (int) ((element.getKey() - minXVal) * dxForDVal);
AbstractGraphPanelChartElement elementValue = (AbstractGraphPanelChartElement) element.getValue();
y = chartRect.y + chartRect.height - (int) ((elementValue.getValue() - minYVal) * dyForDVal);
if(row.isDrawThickLines()) {
((Graphics2D) g).setStroke(thickStroke);
}
// draw lines
if (row.isDrawLine())
{
if (prevX > 0)
{
g.setColor(row.getColor());
g.drawLine(prevX, prevY, x, y);
}
prevX = x;
prevY = y;
}
if(row.isDrawThickLines()) {
((Graphics2D) g).setStroke(oldStroke);
}
if (row.isDrawValueLabel())
{
g.setColor(Color.DARK_GRAY);
yAxisLabelRenderer.setValue(elementValue.getValue());
g.drawString(yAxisLabelRenderer.getText(),
x + row.getMarkerSize() + spacing,
y + fm.getAscent() / 2);
}
// draw markers
if (radius != AbstractGraphRow.MARKER_SIZE_NONE)
{
g.setColor(row.getColor());
g.fillOval(x - radius, y - radius, (radius) * 2, (radius) * 2);
//g.setColor(Color.black);
//g.drawOval(x - radius, y - radius, radius * 2, radius * 2);
}
}
// draw final lines
if (row.isDrawLine() && drawStartFinalZeroingLines)
{
if(row.isDrawThickLines()) {
((Graphics2D) g).setStroke(thickStroke);
}
g.setColor(row.getColor());
g.drawLine(prevX, prevY, (int) (prevX + dxForDVal), chartRect.y + chartRect.height);
if(row.isDrawThickLines()) {
((Graphics2D) g).setStroke(oldStroke);
}
}
}
/**
*
* @param aRows
*/
public void setRows(AbstractMap<String, AbstractGraphRow> aRows)
{
rows = aRows;
}
/**
* @param yAxisLabelRenderer the yAxisLabelRenderer to set
*/
public void setyAxisLabelRenderer(NumberRenderer yAxisLabelRenderer)
{
this.yAxisLabelRenderer = yAxisLabelRenderer;
}
/**
* @param xAxisLabelRenderer the xAxisLabelRenderer to set
*/
public void setxAxisLabelRenderer(NumberRenderer xAxisLabelRenderer)
{
this.xAxisLabelRenderer = xAxisLabelRenderer;
}
/**
* @param drawFinalZeroingLines the drawFinalZeroingLines to set
*/
public void setDrawFinalZeroingLines(boolean drawFinalZeroingLines)
{
this.drawStartFinalZeroingLines = drawFinalZeroingLines && !neverDrawFinalZeroingLines;
}
/**
* @param drawCurrentX the drawCurrentX to set
*/
public void setDrawCurrentX(boolean drawCurrentX)
{
this.drawCurrentX = drawCurrentX;
}
/**
* @param currentX the currentX to set
*/
public void setCurrentX(long currentX)
{
this.currentXVal = currentX;
}
/**
*
* @param i
*/
public void setForcedMinX(int i)
{
forcedMinX = i;
}
//Paint the add the same color of the axis but with transparency
private void paintAd(Graphics2D g)
{
Font oldFont = g.getFont();
g.setFont(g.getFont().deriveFont(10F));
g.setColor(axisColor);
Composite oldComposite = g.getComposite();
g.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.7f));
g.drawString(AD_TEXT,
getWidth() - g.getFontMetrics().stringWidth(AD_TEXT) - spacing,
g.getFontMetrics().getHeight() - spacing + 1);
g.setComposite(oldComposite);
g.setFont(oldFont);
}
/*
* Clear error messages
*/
public void clearErrorMessage() {
errorMessage = null;
}
/*
* Set error message if not null and not empty
* @param msg the error message to set
*/
public void setErrorMessage(String msg) {
if(msg != null && msg.trim().length()>0) {
errorMessage = msg;
}
}
// Adding a popup menu to copy image in clipboard
@Override
public void lostOwnership(Clipboard clipboard, Transferable contents)
{
// do nothing
}
private Image getImage()
{
BufferedImage image = new BufferedImage(this.getWidth(), this.getHeight(), BufferedImage.TYPE_INT_ARGB);
Graphics2D g2 = image.createGraphics();
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
this.drawPanel(g2);
return image;
}
/**
* Thanks to stephane.hoblingre
*/
private void registerPopup()
{
JPopupMenu popup = new JPopupMenu();
this.setComponentPopupMenu(popup);
JMenuItem item = new JMenuItem("Copy Image to Clipboard");
item.addActionListener(new CopyAction());
popup.add(item);
}
private class CopyAction
implements ActionListener
{
@Override
public void actionPerformed(final ActionEvent e)
{
Clipboard clipboard = getToolkit().getSystemClipboard();
Transferable transferable = new Transferable()
{
@Override
public Object getTransferData(DataFlavor flavor)
{
if (isDataFlavorSupported(flavor))
{
return getImage();
}
return null;
}
@Override
public DataFlavor[] getTransferDataFlavors()
{
return new DataFlavor[]
{
DataFlavor.imageFlavor
};
}
@Override
public boolean isDataFlavorSupported(DataFlavor flavor)
{
return DataFlavor.imageFlavor.equals(flavor);
}
};
clipboard.setContents(transferable, GraphPanelChart.this);
}
}
}
| src/kg/apc/jmeter/charting/GraphPanelChart.java | package kg.apc.jmeter.charting;
import java.awt.AlphaComposite;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Composite;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.GradientPaint;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.Stroke;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.ClipboardOwner;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.image.BufferedImage;
import java.util.AbstractMap;
import java.util.Iterator;
import java.util.Map.Entry;
import javax.swing.BorderFactory;
import javax.swing.JComponent;
import javax.swing.JMenuItem;
import javax.swing.JPopupMenu;
import javax.swing.border.BevelBorder;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.gui.NumberRenderer;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
/**
*
* @author apc
*/
public class GraphPanelChart
extends JComponent
implements ClipboardOwner
{
private static final String AD_TEXT = "http://apc.kg";
private static final String NO_SAMPLES = "Waiting for samples...";
private static final int spacing = 5;
/*
* Special type of graph were minY is forced to 0 and maxY is forced to 100
* to display percentage charts (eg cpu monitoring)
*/
public static final int CHART_PERCENTAGE = 0;
public static final int CHART_DEFAULT = -1;
private static final Logger log = LoggingManager.getLoggerForClass();
private Rectangle legendRect;
private Rectangle xAxisRect;
private Rectangle yAxisRect;
private Rectangle chartRect;
private static final Rectangle zeroRect = new Rectangle();
private AbstractMap<String, AbstractGraphRow> rows;
private double maxYVal;
private double minYVal;
private long maxXVal;
private long minXVal;
private long currentXVal;
private static final int gridLinesCount = 10;
private NumberRenderer yAxisLabelRenderer;
private NumberRenderer xAxisLabelRenderer;
private boolean drawStartFinalZeroingLines = false;
private boolean drawCurrentX = false;
private int forcedMinX = -1;
private int chartType = CHART_DEFAULT;
// The stroke used to paint Graph's dashed lines
private Stroke dashStroke = new BasicStroke(
1.0f, // Width
BasicStroke.CAP_SQUARE, // End cap
BasicStroke.JOIN_MITER, // Join style
10.0f, // Miter limit
new float[] {1.0f,4.0f}, // Dash pattern
0.0f); // Dash phase
// The stroke to paint thick Graph rows
private Stroke thickStroke = new BasicStroke(
AbstractGraphRow.LINE_THICKNESS_BIG,
BasicStroke.CAP_BUTT,
BasicStroke.JOIN_BEVEL);
// Message display in graphs. Used for perfmon error messages
private String errorMessage = null;
// Chart's gradient background end color
private Color gradientColor = new Color(229,236,246);
// Chart's Axis Color. For good results, use gradient color - (30, 30, 30)
private Color axisColor = new Color(199,206,216);
// Draw options - these are default values if no property is entered in user.properties
// List of possible properties (TODO: The explaination must be written in readme file)
// jmeterPlugin.drawGradient=(true/false)
// jmeterPlugin.neverDrawFinalZeroingLines=(true/false)
// jmeterPlugin.optimizeYAxis=(true/false)
// note to Andrey: Feel free to decide the default value!
private static boolean drawGradient = true;
private static boolean neverDrawFinalZeroingLines = false;
private static boolean optimizeYAxis = true;
// If user entered configuration items in user.properties, overide default values.
static {
String cfgDrawGradient = JMeterUtils.getProperty("jmeterPlugin.drawGradient");
if(cfgDrawGradient != null) {
GraphPanelChart.drawGradient = "true".equalsIgnoreCase(cfgDrawGradient);
}
String cfgNeverDrawFinalZeroingLines = JMeterUtils.getProperty("jmeterPlugin.neverDrawFinalZeroingLines");
if(cfgNeverDrawFinalZeroingLines != null) {
GraphPanelChart.neverDrawFinalZeroingLines = "true".equalsIgnoreCase(cfgNeverDrawFinalZeroingLines);
}
String cfgOptimizeYAxis = JMeterUtils.getProperty("jmeterPlugin.optimizeYAxis");
if(cfgOptimizeYAxis != null) {
GraphPanelChart.optimizeYAxis = "true".equalsIgnoreCase(cfgOptimizeYAxis);
}
}
/**
* Creates new chart object with default parameters
*/
public GraphPanelChart()
{
setBackground(Color.white);
setBorder(BorderFactory.createBevelBorder(BevelBorder.LOWERED, Color.lightGray, Color.darkGray));
yAxisLabelRenderer = new NumberRenderer("#.#");
xAxisLabelRenderer = new NumberRenderer("#.#");
legendRect = new Rectangle();
yAxisRect = new Rectangle();
xAxisRect = new Rectangle();
chartRect = new Rectangle();
setDefaultDimensions();
registerPopup();
}
public void setChartType(int type) {
chartType = type;
}
private void getMinMaxDataValues()
{
maxXVal = 0L;
maxYVal = 0L;
minXVal = Long.MAX_VALUE;
minYVal = Double.MAX_VALUE;
Iterator<Entry<String, AbstractGraphRow>> it = rows.entrySet().iterator();
Entry<String, AbstractGraphRow> row = null;
AbstractGraphRow rowValue;
while (it.hasNext())
{
row = it.next();
rowValue = row.getValue();
if (!rowValue.isDrawOnChart())
{
continue;
}
if (rowValue.getMaxY() > maxYVal)
{
maxYVal = rowValue.getMaxY();
}
if (rowValue.getMaxX() > maxXVal)
{
maxXVal = rowValue.getMaxX();
}
if (rowValue.getMinX() < minXVal)
{
minXVal = rowValue.getMinX();
}
if(rowValue.getMinY() < minYVal)
{
//we draw only positives values
minYVal = rowValue.getMinY() >= 0 ? rowValue.getMinY() : 0;
}
}
//maxYVal *= 1 + (double) 1 / (double) gridLinesCount;
if (forcedMinX >= 0L)
{
minXVal = forcedMinX;
}
//prevent X axis not initialized in case of no row displayed
//we use last known row
if(minXVal == Long.MAX_VALUE && maxXVal == 0L && row != null) {
maxXVal = row.getValue().getMaxX();
minXVal = row.getValue().getMinX();
minYVal = 0;
maxYVal = 10;
} else if(optimizeYAxis)
{
computeChartSteps();
} else
{
minYVal = 0;
}
}
/**
* compute minY and step value to have better readable charts
*/
private void computeChartSteps() {
//if special type
if(chartType == GraphPanelChart.CHART_PERCENTAGE) {
minYVal = 0;
maxYVal = 100;
return;
}
//try to find the best range...
//first, avoid special cases where maxY equal or close to minY
if (maxYVal - minYVal < 0.1)
{
maxYVal = minYVal + 1;
}
//real step
double step = (maxYVal - minYVal) / gridLinesCount;
int pow = -1;
double factor = -1;
boolean found = false;
double testStep;
double testFactor;
//find a step close to the real one
while(!found)
{
pow++;
//for small range (<10), don't use .5 factor.
//we try to find integer steps as it is more easy to read
if(pow > 0)
{
testFactor = 0.5;
} else
{
testFactor = 1;
}
for (double f = 0; f <= 5; f = f + testFactor) {
testStep = Math.pow(10, pow) * f;
if(testStep >= step) {
factor = f;
found = true;
break;
}
}
}
//first proposal
double foundStep = Math.pow(10, pow) * factor;
//we shit to the closest lower minval to align with the step
minYVal = minYVal - minYVal % foundStep;
//check if step is still good with minY trimed. If not, use next factor.
if(minYVal + foundStep * gridLinesCount < maxYVal)
{
foundStep = Math.pow(10, pow) * (factor + (pow > 0 ? 0.5:1));
}
//last visual optimization: find the optimal minYVal
double trim = 10;
while((minYVal-minYVal%trim) + foundStep * gridLinesCount >= maxYVal && minYVal > 0)
{
minYVal = minYVal-minYVal%trim;
trim = trim*10;
}
//final calculation
maxYVal = minYVal + foundStep * gridLinesCount;
}
private void setDefaultDimensions()
{
chartRect.setBounds(spacing, spacing, getWidth() - spacing * 2, getHeight() - spacing * 2);
legendRect.setBounds(zeroRect);
xAxisRect.setBounds(zeroRect);
yAxisRect.setBounds(zeroRect);
}
private void calculateYAxisDimensions(FontMetrics fm)
{
// TODO: middle value labels often wider than max
yAxisLabelRenderer.setValue(maxYVal);
int axisWidth = fm.stringWidth(yAxisLabelRenderer.getText()) + spacing * 3;
yAxisRect.setBounds(chartRect.x, chartRect.y, axisWidth, chartRect.height);
chartRect.setBounds(chartRect.x + axisWidth, chartRect.y, chartRect.width - axisWidth, chartRect.height);
}
private void calculateXAxisDimensions(FontMetrics fm)
{
// FIXME: first value on X axis may take negative X coord,
// we need to handle this and make Y axis wider
int axisHeight = fm.getHeight() + spacing;
xAxisLabelRenderer.setValue(maxXVal);
int axisEndSpace = fm.stringWidth(xAxisLabelRenderer.getText()) / 2;
xAxisRect.setBounds(chartRect.x, chartRect.y + chartRect.height - axisHeight, chartRect.width, axisHeight);
chartRect.setBounds(chartRect.x, chartRect.y, chartRect.width - axisEndSpace, chartRect.height - axisHeight);
yAxisRect.setBounds(yAxisRect.x, yAxisRect.y, yAxisRect.width, chartRect.height);
}
@Override
public void paintComponent(Graphics g)
{
super.paintComponent(g);
BufferedImage image = new BufferedImage(this.getWidth(), this.getHeight(), BufferedImage.TYPE_INT_ARGB);
Graphics2D g2d = image.createGraphics();
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
drawPanel(g2d);
g.drawImage(image, 0, 0, this);
}
private void drawPanel(Graphics2D g)
{
g.setColor(Color.white);
if(GraphPanelChart.drawGradient) {
GradientPaint gdp = new GradientPaint(0,0, Color.white, 0, getHeight(), gradientColor);
g.setPaint(gdp);
}
g.fillRect(0, 0, getWidth(), getHeight());
paintAd(g);
if (errorMessage != null) {
g.setColor(Color.RED);
g.drawString(errorMessage,
getWidth() / 2 - g.getFontMetrics(g.getFont()).stringWidth(errorMessage) / 2,
getHeight() / 2);
return;
}
if (rows.isEmpty())
{
g.setColor(Color.BLACK);
g.drawString(NO_SAMPLES,
getWidth() / 2 - g.getFontMetrics(g.getFont()).stringWidth(NO_SAMPLES) / 2,
getHeight() / 2);
return;
}
setDefaultDimensions();
getMinMaxDataValues();
try
{
paintLegend(g);
calculateYAxisDimensions(g.getFontMetrics(g.getFont()));
calculateXAxisDimensions(g.getFontMetrics(g.getFont()));
paintYAxis(g);
paintXAxis(g);
paintChart(g);
}
catch (Exception e)
{
log.error("Error in paintComponent", e);
}
}
private void paintLegend(Graphics g)
{
FontMetrics fm = g.getFontMetrics(g.getFont());
int rectH = fm.getHeight();
int rectW = rectH;
Iterator<Entry<String, AbstractGraphRow>> it = rows.entrySet().iterator();
Entry<String, AbstractGraphRow> row;
int currentX = chartRect.x;
int currentY = chartRect.y;
int legendHeight = it.hasNext() ? rectH + spacing : 0;
while (it.hasNext())
{
row = it.next();
if (!row.getValue().isShowInLegend() || !row.getValue().isDrawOnChart())
{
continue;
}
// wrap row if overflowed
if (currentX + rectW + spacing / 2 + fm.stringWidth(row.getKey()) > getWidth())
{
currentY += rectH + spacing / 2;
legendHeight += rectH + spacing / 2;
currentX = chartRect.x;
}
// draw legend color box
g.setColor(row.getValue().getColor());
g.fillRect(currentX, currentY, rectW, rectH);
g.setColor(Color.black);
g.drawRect(currentX, currentY, rectW, rectH);
// draw legend item label
currentX += rectW + spacing / 2;
g.drawString(row.getKey(), currentX, (int) (currentY + rectH * 0.9));
currentX += fm.stringWidth(row.getKey()) + spacing;
}
legendRect.setBounds(chartRect.x, chartRect.y, chartRect.width, legendHeight);
chartRect.setBounds(chartRect.x, chartRect.y + legendHeight + spacing, chartRect.width, chartRect.height - legendHeight - spacing);
}
private void paintYAxis(Graphics g)
{
FontMetrics fm = g.getFontMetrics(g.getFont());
String valueLabel;
int labelXPos;
int gridLineY;
// shift 2nd and more lines
int shift = 0;
// for strokes swapping
Stroke oldStroke = ((Graphics2D) g).getStroke();
//draw markers
g.setColor(axisColor);
for (int n = 0; n <= gridLinesCount; n++)
{
gridLineY = chartRect.y + (int) ((gridLinesCount - n) * (double) chartRect.height / gridLinesCount);
g.drawLine(chartRect.x - 3, gridLineY, chartRect.x + 3, gridLineY);
}
for (int n = 0; n <= gridLinesCount; n++)
{
//draw 2nd and more axis dashed and shifted
if(n!=0) {
((Graphics2D) g).setStroke(dashStroke);
shift = 7;
}
gridLineY = chartRect.y + (int) ((gridLinesCount - n) * (double) chartRect.height / gridLinesCount);
// draw grid line with tick
g.setColor(axisColor);
g.drawLine(chartRect.x + shift, gridLineY, chartRect.x + chartRect.width, gridLineY);
g.setColor(Color.black);
// draw label
yAxisLabelRenderer.setValue((minYVal * gridLinesCount + n * (maxYVal-minYVal)) / gridLinesCount);
valueLabel = yAxisLabelRenderer.getText();
labelXPos = yAxisRect.x + yAxisRect.width - fm.stringWidth(valueLabel) - spacing - spacing / 2;
g.drawString(valueLabel, labelXPos, gridLineY + fm.getAscent() / 2);
}
//restore stroke
((Graphics2D) g).setStroke(oldStroke);
}
private void paintXAxis(Graphics g)
{
FontMetrics fm = g.getFontMetrics(g.getFont());
String valueLabel;
int labelXPos;
int gridLineX;
// shift 2nd and more lines
int shift = 0;
// for strokes swapping
Stroke oldStroke = ((Graphics2D) g).getStroke();
g.setColor(axisColor);
//draw markers
for (int n = 0; n <= gridLinesCount; n++)
{
gridLineX = chartRect.x + (int) (n * ((double) chartRect.width / gridLinesCount));
g.drawLine(gridLineX, chartRect.y + chartRect.height - 3, gridLineX, chartRect.y + chartRect.height + 3);
}
for (int n = 0; n <= gridLinesCount; n++)
{
//draw 2nd and more axis dashed and shifted
if(n!=0) {
((Graphics2D) g).setStroke(dashStroke);
shift = 7;
}
gridLineX = chartRect.x + (int) (n * ((double) chartRect.width / gridLinesCount));
// draw grid line with tick
g.setColor(axisColor);
g.drawLine(gridLineX, chartRect.y + chartRect.height - shift, gridLineX, chartRect.y);
g.setColor(Color.black);
// draw label
xAxisLabelRenderer.setValue(minXVal + n * (maxXVal - minXVal) / gridLinesCount);
valueLabel = xAxisLabelRenderer.getText();
labelXPos = gridLineX - fm.stringWidth(valueLabel) / 2;
g.drawString(valueLabel, labelXPos, xAxisRect.y + fm.getAscent() + spacing);
}
//restore stroke
((Graphics2D) g).setStroke(oldStroke);
if (drawCurrentX)
{
gridLineX = chartRect.x + (int) ((currentXVal - minXVal) * (double) chartRect.width / (maxXVal - minXVal));
g.setColor(Color.GRAY);
g.drawLine(gridLineX, chartRect.y, gridLineX, chartRect.y + chartRect.height);
g.setColor(Color.black);
}
}
private void paintChart(Graphics g)
{
g.setColor(Color.yellow);
Iterator<Entry<String, AbstractGraphRow>> it = rows.entrySet().iterator();
while (it.hasNext())
{
Entry<String, AbstractGraphRow> row = it.next();
if (row.getValue().isDrawOnChart())
{
paintRow(g, row.getValue());
}
}
}
private void paintRow(Graphics g, AbstractGraphRow row)
{
FontMetrics fm = g.getFontMetrics(g.getFont());
Iterator<Entry<Long, AbstractGraphPanelChartElement>> it = row.iterator();
Entry<Long, AbstractGraphPanelChartElement> element;
int radius = row.getMarkerSize();
int x, y;
int prevX = drawStartFinalZeroingLines ? chartRect.x : -1;
int prevY = chartRect.y + chartRect.height;
final double dxForDVal = (maxXVal <= minXVal) ? 0 : (double) chartRect.width / (maxXVal - minXVal);
final double dyForDVal = (maxYVal <= minYVal) ? 0 : (double) chartRect.height / (maxYVal - minYVal);
Stroke oldStroke = null;
if(row.isDrawThickLines()) {
oldStroke = ((Graphics2D) g).getStroke();
}
while (it.hasNext())
{
element = it.next();
if (!row.isDrawOnChart())
{
continue;
}
x = chartRect.x + (int) ((element.getKey() - minXVal) * dxForDVal);
AbstractGraphPanelChartElement elementValue = (AbstractGraphPanelChartElement) element.getValue();
y = chartRect.y + chartRect.height - (int) ((elementValue.getValue() - minYVal) * dyForDVal);
if(row.isDrawThickLines()) {
((Graphics2D) g).setStroke(thickStroke);
}
// draw lines
if (row.isDrawLine())
{
if (prevX > 0)
{
g.setColor(row.getColor());
g.drawLine(prevX, prevY, x, y);
}
prevX = x;
prevY = y;
}
if(row.isDrawThickLines()) {
((Graphics2D) g).setStroke(oldStroke);
}
if (row.isDrawValueLabel())
{
g.setColor(Color.DARK_GRAY);
yAxisLabelRenderer.setValue(elementValue.getValue());
g.drawString(yAxisLabelRenderer.getText(),
x + row.getMarkerSize() + spacing,
y + fm.getAscent() / 2);
}
// draw markers
if (radius != AbstractGraphRow.MARKER_SIZE_NONE)
{
g.setColor(row.getColor());
g.fillOval(x - radius, y - radius, (radius) * 2, (radius) * 2);
//g.setColor(Color.black);
//g.drawOval(x - radius, y - radius, radius * 2, radius * 2);
}
}
// draw final lines
if (row.isDrawLine() && drawStartFinalZeroingLines)
{
if(row.isDrawThickLines()) {
((Graphics2D) g).setStroke(thickStroke);
}
g.setColor(row.getColor());
g.drawLine(prevX, prevY, (int) (prevX + dxForDVal), chartRect.y + chartRect.height);
if(row.isDrawThickLines()) {
((Graphics2D) g).setStroke(oldStroke);
}
}
}
/**
*
* @param aRows
*/
public void setRows(AbstractMap<String, AbstractGraphRow> aRows)
{
rows = aRows;
}
/**
* @param yAxisLabelRenderer the yAxisLabelRenderer to set
*/
public void setyAxisLabelRenderer(NumberRenderer yAxisLabelRenderer)
{
this.yAxisLabelRenderer = yAxisLabelRenderer;
}
/**
* @param xAxisLabelRenderer the xAxisLabelRenderer to set
*/
public void setxAxisLabelRenderer(NumberRenderer xAxisLabelRenderer)
{
this.xAxisLabelRenderer = xAxisLabelRenderer;
}
/**
* @param drawFinalZeroingLines the drawFinalZeroingLines to set
*/
public void setDrawFinalZeroingLines(boolean drawFinalZeroingLines)
{
this.drawStartFinalZeroingLines = drawFinalZeroingLines && !neverDrawFinalZeroingLines;
}
/**
* @param drawCurrentX the drawCurrentX to set
*/
public void setDrawCurrentX(boolean drawCurrentX)
{
this.drawCurrentX = drawCurrentX;
}
/**
* @param currentX the currentX to set
*/
public void setCurrentX(long currentX)
{
this.currentXVal = currentX;
}
/**
*
* @param i
*/
public void setForcedMinX(int i)
{
forcedMinX = i;
}
//Paint the add the same color of the axis but with transparency
private void paintAd(Graphics2D g)
{
Font oldFont = g.getFont();
g.setFont(g.getFont().deriveFont(10F));
g.setColor(axisColor);
Composite oldComposite = g.getComposite();
g.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.7f));
g.drawString(AD_TEXT,
getWidth() - g.getFontMetrics().stringWidth(AD_TEXT) - spacing,
g.getFontMetrics().getHeight() - spacing + 1);
g.setComposite(oldComposite);
g.setFont(oldFont);
}
/*
* Clear error messages
*/
public void clearErrorMessage() {
errorMessage = null;
}
/*
* Set error message if not null and not empty
* @param msg the error message to set
*/
public void setErrorMessage(String msg) {
if(msg != null && msg.trim().length()>0) {
errorMessage = msg;
}
}
// Adding a popup menu to copy image in clipboard
@Override
public void lostOwnership(Clipboard clipboard, Transferable contents)
{
// do nothing
}
private Image getImage()
{
BufferedImage image = new BufferedImage(this.getWidth(), this.getHeight(), BufferedImage.TYPE_INT_ARGB);
Graphics2D g2 = image.createGraphics();
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
this.drawPanel(g2);
return image;
}
/**
* Thanks to stephane.hoblingre
*/
private void registerPopup()
{
JPopupMenu popup = new JPopupMenu();
this.setComponentPopupMenu(popup);
JMenuItem item = new JMenuItem("Copy Image to Clipboard");
item.addActionListener(new CopyAction());
popup.add(item);
}
private class CopyAction
implements ActionListener
{
@Override
public void actionPerformed(final ActionEvent e)
{
Clipboard clipboard = getToolkit().getSystemClipboard();
Transferable transferable = new Transferable()
{
@Override
public Object getTransferData(DataFlavor flavor)
{
if (isDataFlavorSupported(flavor))
{
return getImage();
}
return null;
}
@Override
public DataFlavor[] getTransferDataFlavors()
{
return new DataFlavor[]
{
DataFlavor.imageFlavor
};
}
@Override
public boolean isDataFlavorSupported(DataFlavor flavor)
{
return DataFlavor.imageFlavor.equals(flavor);
}
};
clipboard.setContents(transferable, GraphPanelChart.this);
}
}
}
| Fix bug of precision loss for x axis labels do to integer implicit cast
| src/kg/apc/jmeter/charting/GraphPanelChart.java | Fix bug of precision loss for x axis labels do to integer implicit cast |
|
Java | apache-2.0 | 630aff69cbedcf0f48254506769414d6701d45c5 | 0 | jollygeorge/camel,tlehoux/camel,lasombra/camel,onders86/camel,cunningt/camel,josefkarasek/camel,sabre1041/camel,atoulme/camel,allancth/camel,noelo/camel,sirlatrom/camel,haku/camel,lowwool/camel,iweiss/camel,MrCoder/camel,veithen/camel,cunningt/camel,noelo/camel,davidwilliams1978/camel,rparree/camel,neoramon/camel,pkletsko/camel,bdecoste/camel,stravag/camel,erwelch/camel,dsimansk/camel,FingolfinTEK/camel,trohovsky/camel,rmarting/camel,CandleCandle/camel,dmvolod/camel,mike-kukla/camel,christophd/camel,prashant2402/camel,skinzer/camel,w4tson/camel,acartapanis/camel,joakibj/camel,jpav/camel,jmandawg/camel,ekprayas/camel,sverkera/camel,jamesnetherton/camel,Thopap/camel,lburgazzoli/camel,sverkera/camel,chanakaudaya/camel,grange74/camel,veithen/camel,jollygeorge/camel,onders86/camel,jarst/camel,lowwool/camel,partis/camel,mcollovati/camel,dvankleef/camel,rmarting/camel,punkhorn/camel-upstream,isururanawaka/camel,jameszkw/camel,iweiss/camel,alvinkwekel/camel,jollygeorge/camel,stalet/camel,snurmine/camel,CandleCandle/camel,bhaveshdt/camel,nicolaferraro/camel,mgyongyosi/camel,objectiser/camel,lasombra/camel,royopa/camel,johnpoth/camel,jarst/camel,grgrzybek/camel,bfitzpat/camel,mzapletal/camel,prashant2402/camel,pkletsko/camel,grange74/camel,onders86/camel,oscerd/camel,johnpoth/camel,qst-jdc-labs/camel,curso007/camel,allancth/camel,allancth/camel,ekprayas/camel,manuelh9r/camel,isururanawaka/camel,gautric/camel,logzio/camel,coderczp/camel,erwelch/camel,nboukhed/camel,driseley/camel,grgrzybek/camel,dsimansk/camel,neoramon/camel,lburgazzoli/apache-camel,pplatek/camel,qst-jdc-labs/camel,nboukhed/camel,tkopczynski/camel,alvinkwekel/camel,duro1/camel,nikvaessen/camel,Thopap/camel,maschmid/camel,hqstevenson/camel,sebi-hgdata/camel,onders86/camel,Thopap/camel,tkopczynski/camel,YMartsynkevych/camel,curso007/camel,oalles/camel,rparree/camel,trohovsky/camel,bdecoste/camel,rmarting/camel,dmvolod/camel,royopa/camel,YoshikiHigo/camel,stalet/camel,jlpedrosa/camel,eformat/camel,igarashitm/camel,pmoerenhout/camel,oalles/camel,bgaudaen/camel,borcsokj/camel,woj-i/camel,josefkarasek/camel,stravag/camel,nikvaessen/camel,jollygeorge/camel,NickCis/camel,mnki/camel,askannon/camel,YMartsynkevych/camel,nicolaferraro/camel,davidwilliams1978/camel,tarilabs/camel,jonmcewen/camel,bdecoste/camel,tarilabs/camel,Fabryprog/camel,neoramon/camel,christophd/camel,sebi-hgdata/camel,maschmid/camel,apache/camel,acartapanis/camel,eformat/camel,anoordover/camel,CodeSmell/camel,scranton/camel,logzio/camel,oscerd/camel,haku/camel,dkhanolkar/camel,koscejev/camel,snurmine/camel,adessaigne/camel,MohammedHammam/camel,veithen/camel,erwelch/camel,drsquidop/camel,sirlatrom/camel,jmandawg/camel,mzapletal/camel,iweiss/camel,gilfernandes/camel,MohammedHammam/camel,NickCis/camel,jameszkw/camel,kevinearls/camel,erwelch/camel,snadakuduru/camel,alvinkwekel/camel,yury-vashchyla/camel,maschmid/camel,sabre1041/camel,edigrid/camel,jamesnetherton/camel,satishgummadelli/camel,gnodet/camel,atoulme/camel,yuruki/camel,sverkera/camel,jlpedrosa/camel,partis/camel,FingolfinTEK/camel,CandleCandle/camel,akhettar/camel,snadakuduru/camel,tarilabs/camel,MohammedHammam/camel,arnaud-deprez/camel,royopa/camel,skinzer/camel,borcsokj/camel,brreitme/camel,drsquidop/camel,jlpedrosa/camel,dvankleef/camel,anton-k11/camel,YMartsynkevych/camel,w4tson/camel,jameszkw/camel,sverkera/camel,jonmcewen/camel,curso007/camel,coderczp/camel,satishgummadelli/camel,chirino/camel,mike-kukla/camel,gautric/camel,askannon/camel,gyc567/camel,bgaudaen/camel,igarashitm/camel,mcollovati/camel,NickCis/camel,christophd/camel,duro1/camel,isavin/camel,pkletsko/camel,pplatek/camel,satishgummadelli/camel,oscerd/camel,jmandawg/camel,iweiss/camel,kevinearls/camel,partis/camel,rparree/camel,neoramon/camel,grgrzybek/camel,askannon/camel,allancth/camel,koscejev/camel,ekprayas/camel,tlehoux/camel,veithen/camel,NetNow/camel,edigrid/camel,ge0ffrey/camel,yogamaha/camel,MrCoder/camel,grgrzybek/camel,brreitme/camel,gnodet/camel,grgrzybek/camel,nikvaessen/camel,mnki/camel,ekprayas/camel,jonmcewen/camel,borcsokj/camel,oalles/camel,anton-k11/camel,ramonmaruko/camel,haku/camel,dkhanolkar/camel,josefkarasek/camel,tdiesler/camel,scranton/camel,kevinearls/camel,veithen/camel,curso007/camel,MohammedHammam/camel,nikhilvibhav/camel,pplatek/camel,mnki/camel,YoshikiHigo/camel,NetNow/camel,ullgren/camel,punkhorn/camel-upstream,nicolaferraro/camel,MrCoder/camel,alvinkwekel/camel,ekprayas/camel,nikhilvibhav/camel,noelo/camel,sebi-hgdata/camel,tadayosi/camel,snadakuduru/camel,dpocock/camel,chirino/camel,bhaveshdt/camel,cunningt/camel,nboukhed/camel,ssharma/camel,zregvart/camel,isavin/camel,tarilabs/camel,jpav/camel,erwelch/camel,lburgazzoli/camel,eformat/camel,jamesnetherton/camel,snurmine/camel,grange74/camel,jamesnetherton/camel,pmoerenhout/camel,bdecoste/camel,partis/camel,kevinearls/camel,YoshikiHigo/camel,lburgazzoli/apache-camel,coderczp/camel,jarst/camel,yury-vashchyla/camel,bfitzpat/camel,woj-i/camel,haku/camel,satishgummadelli/camel,qst-jdc-labs/camel,dmvolod/camel,atoulme/camel,ssharma/camel,JYBESSON/camel,brreitme/camel,mnki/camel,dsimansk/camel,mike-kukla/camel,jarst/camel,ssharma/camel,duro1/camel,akhettar/camel,gilfernandes/camel,mike-kukla/camel,w4tson/camel,yury-vashchyla/camel,davidkarlsen/camel,anton-k11/camel,trohovsky/camel,DariusX/camel,oalles/camel,dsimansk/camel,jkorab/camel,dkhanolkar/camel,askannon/camel,duro1/camel,nikhilvibhav/camel,coderczp/camel,scranton/camel,mike-kukla/camel,jkorab/camel,coderczp/camel,mohanaraosv/camel,prashant2402/camel,davidkarlsen/camel,gautric/camel,adessaigne/camel,RohanHart/camel,trohovsky/camel,bfitzpat/camel,stravag/camel,snadakuduru/camel,hqstevenson/camel,JYBESSON/camel,yuruki/camel,drsquidop/camel,prashant2402/camel,tdiesler/camel,CodeSmell/camel,tdiesler/camel,objectiser/camel,mzapletal/camel,duro1/camel,jlpedrosa/camel,jollygeorge/camel,FingolfinTEK/camel,jlpedrosa/camel,ge0ffrey/camel,stravag/camel,stalet/camel,christophd/camel,manuelh9r/camel,dsimansk/camel,ge0ffrey/camel,gyc567/camel,bgaudaen/camel,snurmine/camel,josefkarasek/camel,tlehoux/camel,tadayosi/camel,rmarting/camel,pax95/camel,lasombra/camel,ge0ffrey/camel,FingolfinTEK/camel,apache/camel,ge0ffrey/camel,jameszkw/camel,YMartsynkevych/camel,anton-k11/camel,yuruki/camel,noelo/camel,gautric/camel,joakibj/camel,chirino/camel,skinzer/camel,lowwool/camel,adessaigne/camel,mcollovati/camel,atoulme/camel,salikjan/camel,grange74/camel,ssharma/camel,haku/camel,ramonmaruko/camel,pmoerenhout/camel,koscejev/camel,davidwilliams1978/camel,erwelch/camel,tkopczynski/camel,dmvolod/camel,driseley/camel,tkopczynski/camel,skinzer/camel,ullgren/camel,davidkarlsen/camel,nikhilvibhav/camel,RohanHart/camel,jonmcewen/camel,iweiss/camel,maschmid/camel,rparree/camel,sverkera/camel,mnki/camel,Thopap/camel,dpocock/camel,pmoerenhout/camel,igarashitm/camel,bhaveshdt/camel,driseley/camel,mnki/camel,dkhanolkar/camel,jmandawg/camel,acartapanis/camel,isavin/camel,mohanaraosv/camel,oscerd/camel,pax95/camel,eformat/camel,edigrid/camel,partis/camel,sirlatrom/camel,lowwool/camel,qst-jdc-labs/camel,mgyongyosi/camel,logzio/camel,chirino/camel,tdiesler/camel,noelo/camel,jkorab/camel,tadayosi/camel,edigrid/camel,partis/camel,davidwilliams1978/camel,arnaud-deprez/camel,mcollovati/camel,isavin/camel,sirlatrom/camel,NetNow/camel,CodeSmell/camel,tkopczynski/camel,bgaudaen/camel,bfitzpat/camel,borcsokj/camel,NetNow/camel,koscejev/camel,ullgren/camel,nikvaessen/camel,isururanawaka/camel,prashant2402/camel,maschmid/camel,logzio/camel,haku/camel,eformat/camel,tdiesler/camel,johnpoth/camel,davidkarlsen/camel,skinzer/camel,woj-i/camel,arnaud-deprez/camel,tlehoux/camel,arnaud-deprez/camel,YoshikiHigo/camel,dpocock/camel,tadayosi/camel,pplatek/camel,pmoerenhout/camel,lburgazzoli/camel,isavin/camel,anoordover/camel,akhettar/camel,mgyongyosi/camel,JYBESSON/camel,apache/camel,jpav/camel,qst-jdc-labs/camel,royopa/camel,lburgazzoli/apache-camel,isavin/camel,sverkera/camel,objectiser/camel,Fabryprog/camel,allancth/camel,gyc567/camel,akhettar/camel,anoordover/camel,atoulme/camel,rparree/camel,NickCis/camel,joakibj/camel,drsquidop/camel,yuruki/camel,gyc567/camel,pax95/camel,tlehoux/camel,akhettar/camel,pkletsko/camel,nikvaessen/camel,driseley/camel,neoramon/camel,stalet/camel,gnodet/camel,jlpedrosa/camel,ramonmaruko/camel,ssharma/camel,joakibj/camel,bdecoste/camel,jamesnetherton/camel,jkorab/camel,cunningt/camel,RohanHart/camel,YMartsynkevych/camel,pplatek/camel,lasombra/camel,NickCis/camel,chanakaudaya/camel,MohammedHammam/camel,eformat/camel,chanakaudaya/camel,isururanawaka/camel,qst-jdc-labs/camel,joakibj/camel,hqstevenson/camel,acartapanis/camel,veithen/camel,tarilabs/camel,w4tson/camel,arnaud-deprez/camel,kevinearls/camel,tdiesler/camel,woj-i/camel,CandleCandle/camel,tadayosi/camel,jpav/camel,w4tson/camel,jpav/camel,borcsokj/camel,curso007/camel,oalles/camel,kevinearls/camel,adessaigne/camel,bfitzpat/camel,anton-k11/camel,coderczp/camel,bfitzpat/camel,tadayosi/camel,lasombra/camel,pplatek/camel,jkorab/camel,edigrid/camel,lburgazzoli/apache-camel,mgyongyosi/camel,johnpoth/camel,lasombra/camel,RohanHart/camel,yogamaha/camel,jarst/camel,ramonmaruko/camel,sirlatrom/camel,Fabryprog/camel,dmvolod/camel,dpocock/camel,dvankleef/camel,lowwool/camel,anton-k11/camel,akhettar/camel,Thopap/camel,FingolfinTEK/camel,CandleCandle/camel,gilfernandes/camel,chirino/camel,joakibj/camel,mgyongyosi/camel,ramonmaruko/camel,jpav/camel,nboukhed/camel,jameszkw/camel,neoramon/camel,DariusX/camel,zregvart/camel,stalet/camel,dsimansk/camel,grange74/camel,tkopczynski/camel,chanakaudaya/camel,yogamaha/camel,ssharma/camel,ekprayas/camel,koscejev/camel,anoordover/camel,hqstevenson/camel,adessaigne/camel,edigrid/camel,nboukhed/camel,acartapanis/camel,ramonmaruko/camel,noelo/camel,christophd/camel,zregvart/camel,oscerd/camel,manuelh9r/camel,adessaigne/camel,chanakaudaya/camel,YoshikiHigo/camel,igarashitm/camel,lburgazzoli/camel,pax95/camel,gyc567/camel,YoshikiHigo/camel,snadakuduru/camel,brreitme/camel,mzapletal/camel,anoordover/camel,arnaud-deprez/camel,pkletsko/camel,nicolaferraro/camel,mohanaraosv/camel,maschmid/camel,DariusX/camel,nikvaessen/camel,stalet/camel,jmandawg/camel,snurmine/camel,bgaudaen/camel,yuruki/camel,stravag/camel,ge0ffrey/camel,jameszkw/camel,CodeSmell/camel,RohanHart/camel,driseley/camel,yuruki/camel,anoordover/camel,woj-i/camel,allancth/camel,gilfernandes/camel,NetNow/camel,brreitme/camel,manuelh9r/camel,bhaveshdt/camel,isururanawaka/camel,satishgummadelli/camel,cunningt/camel,apache/camel,Thopap/camel,askannon/camel,MrCoder/camel,grange74/camel,rmarting/camel,gnodet/camel,yogamaha/camel,bhaveshdt/camel,igarashitm/camel,dmvolod/camel,punkhorn/camel-upstream,sebi-hgdata/camel,acartapanis/camel,JYBESSON/camel,nboukhed/camel,satishgummadelli/camel,objectiser/camel,tarilabs/camel,atoulme/camel,drsquidop/camel,YMartsynkevych/camel,royopa/camel,Fabryprog/camel,yogamaha/camel,FingolfinTEK/camel,onders86/camel,sabre1041/camel,stravag/camel,jonmcewen/camel,snurmine/camel,brreitme/camel,josefkarasek/camel,salikjan/camel,josefkarasek/camel,lowwool/camel,dpocock/camel,jkorab/camel,NetNow/camel,DariusX/camel,RohanHart/camel,dpocock/camel,oscerd/camel,mohanaraosv/camel,MrCoder/camel,trohovsky/camel,davidwilliams1978/camel,hqstevenson/camel,oalles/camel,iweiss/camel,woj-i/camel,MrCoder/camel,JYBESSON/camel,mohanaraosv/camel,CandleCandle/camel,bdecoste/camel,yury-vashchyla/camel,drsquidop/camel,sebi-hgdata/camel,onders86/camel,mike-kukla/camel,bhaveshdt/camel,tlehoux/camel,gautric/camel,duro1/camel,grgrzybek/camel,curso007/camel,christophd/camel,igarashitm/camel,royopa/camel,lburgazzoli/apache-camel,apache/camel,pplatek/camel,mohanaraosv/camel,dvankleef/camel,jonmcewen/camel,sabre1041/camel,pax95/camel,dvankleef/camel,borcsokj/camel,logzio/camel,chirino/camel,logzio/camel,MohammedHammam/camel,dkhanolkar/camel,snadakuduru/camel,davidwilliams1978/camel,hqstevenson/camel,lburgazzoli/camel,manuelh9r/camel,jmandawg/camel,gyc567/camel,lburgazzoli/apache-camel,yogamaha/camel,zregvart/camel,pkletsko/camel,driseley/camel,NickCis/camel,apache/camel,koscejev/camel,w4tson/camel,jollygeorge/camel,rmarting/camel,scranton/camel,jarst/camel,askannon/camel,lburgazzoli/camel,gilfernandes/camel,JYBESSON/camel,mzapletal/camel,sirlatrom/camel,mzapletal/camel,pax95/camel,dvankleef/camel,trohovsky/camel,gautric/camel,pmoerenhout/camel,punkhorn/camel-upstream,gnodet/camel,skinzer/camel,johnpoth/camel,sebi-hgdata/camel,sabre1041/camel,yury-vashchyla/camel,scranton/camel,logzio/camel,gilfernandes/camel,scranton/camel,manuelh9r/camel,bgaudaen/camel,cunningt/camel,dkhanolkar/camel,prashant2402/camel,johnpoth/camel,jamesnetherton/camel,rparree/camel,sabre1041/camel,ullgren/camel,mgyongyosi/camel,isururanawaka/camel,chanakaudaya/camel,yury-vashchyla/camel | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.karaf.commands;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.Route;
import org.apache.camel.spi.ManagementAgent;
import org.apache.felix.gogo.commands.Argument;
import org.apache.felix.gogo.commands.Command;
import org.apache.karaf.shell.console.OsgiCommandSupport;
import org.apache.karaf.util.StringEscapeUtils;
/**
* Command to display detailed information about a Camel context.
*/
@Command(scope = "camel", name = "context-info", description = "Display detailed information about a Camel context.")
public class ContextInfo extends OsgiCommandSupport {
@Argument(index = 0, name = "name", description = "The name of the Camel context", required = true, multiValued = false)
String name;
@Argument(index = 1, name = "mode", description = "Allows for different display modes (--verbose, etc)", required = false, multiValued = false)
String mode;
private CamelController camelController;
public void setCamelController(CamelController camelController) {
this.camelController = camelController;
}
public Object doExecute() throws Exception {
CamelContext camelContext = camelController.getCamelContext(name);
if (camelContext == null) {
System.err.println("Camel context " + name + " not found.");
return null;
}
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1m\u001B[33mCamel Context " + name + "\u001B[0m"));
System.out.println(StringEscapeUtils.unescapeJava("\tName: " + camelContext.getName()));
System.out.println(StringEscapeUtils.unescapeJava("\tManagementName: " + camelContext.getManagementName()));
System.out.println(StringEscapeUtils.unescapeJava("\tVersion: " + camelContext.getVersion()));
System.out.println(StringEscapeUtils.unescapeJava("\tStatus: " + camelContext.getStatus()));
System.out.println(StringEscapeUtils.unescapeJava("\tUptime: " + camelContext.getUptime()));
// the statistics are in the mbeans
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mStatistics\u001B[0m"));
ObjectName contextMBean = null;
ManagementAgent agent = camelContext.getManagementStrategy().getManagementAgent();
if (agent != null) {
MBeanServer mBeanServer = agent.getMBeanServer();
Set<ObjectName> set = mBeanServer.queryNames(new ObjectName(agent.getMBeanObjectDomainName() + ":type=context,name=\"" + name + "\",*"), null);
Iterator<ObjectName> iterator = set.iterator();
if (iterator.hasNext()) {
contextMBean = iterator.next();
}
if (mBeanServer.isRegistered(contextMBean)) {
Long exchangesTotal = (Long) mBeanServer.getAttribute(contextMBean, "ExchangesTotal");
System.out.println(StringEscapeUtils.unescapeJava("\tExchanges Total: " + exchangesTotal));
Long exchangesCompleted = (Long) mBeanServer.getAttribute(contextMBean, "ExchangesCompleted");
System.out.println(StringEscapeUtils.unescapeJava("\tExchanges Completed: " + exchangesCompleted));
Long exchangesFailed = (Long) mBeanServer.getAttribute(contextMBean, "ExchangesFailed");
System.out.println(StringEscapeUtils.unescapeJava("\tExchanges Failed: " + exchangesFailed));
Long minProcessingTime = (Long) mBeanServer.getAttribute(contextMBean, "MinProcessingTime");
System.out.println(StringEscapeUtils.unescapeJava("\tMin Processing Time: " + minProcessingTime + "ms"));
Long maxProcessingTime = (Long) mBeanServer.getAttribute(contextMBean, "MaxProcessingTime");
System.out.println(StringEscapeUtils.unescapeJava("\tMax Processing Time: " + maxProcessingTime + "ms"));
Long meanProcessingTime = (Long) mBeanServer.getAttribute(contextMBean, "MeanProcessingTime");
System.out.println(StringEscapeUtils.unescapeJava("\tMean Processing Time: " + meanProcessingTime + "ms"));
Long totalProcessingTime = (Long) mBeanServer.getAttribute(contextMBean, "TotalProcessingTime");
System.out.println(StringEscapeUtils.unescapeJava("\tTotal Processing Time: " + totalProcessingTime + "ms"));
Long lastProcessingTime = (Long) mBeanServer.getAttribute(contextMBean, "LastProcessingTime");
System.out.println(StringEscapeUtils.unescapeJava("\tLast Processing Time: " + lastProcessingTime + "ms"));
Long deltaProcessingTime = (Long) mBeanServer.getAttribute(contextMBean, "DeltaProcessingTime");
System.out.println(StringEscapeUtils.unescapeJava("\tDelta Processing Time: " + deltaProcessingTime + "ms"));
String load01 = (String) mBeanServer.getAttribute(contextMBean, "Load01");
String load05 = (String) mBeanServer.getAttribute(contextMBean, "Load05");
String load15 = (String) mBeanServer.getAttribute(contextMBean, "Load15");
System.out.println(StringEscapeUtils.unescapeJava("\tLoad Avg: " + load01 + ", " + load05 + ", " + load15));
// Test for null to see if a any exchanges have been processed first to avoid NPE
Object resetTimestampObj = mBeanServer.getAttribute(contextMBean, "ResetTimestamp");
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
if (resetTimestampObj == null) {
// Print an empty value for scripting
System.out.println(StringEscapeUtils.unescapeJava("\tReset Statistics Date:"));
} else {
Date firstExchangeTimestamp = (Date) resetTimestampObj;
System.out.println(StringEscapeUtils.unescapeJava("\tReset Statistics Date: " + format.format(firstExchangeTimestamp)));
}
// Test for null to see if a any exchanges have been processed first to avoid NPE
Object firstExchangeTimestampObj = mBeanServer.getAttribute(contextMBean, "FirstExchangeCompletedTimestamp");
if (firstExchangeTimestampObj == null) {
// Print an empty value for scripting
System.out.println(StringEscapeUtils.unescapeJava("\tFirst Exchange Date:"));
} else {
Date firstExchangeTimestamp = (Date) firstExchangeTimestampObj;
System.out.println(StringEscapeUtils.unescapeJava("\tFirst Exchange Date: " + format.format(firstExchangeTimestamp)));
}
// Again, check for null to avoid NPE
Object lastExchangeCompletedTimestampObj = mBeanServer.getAttribute(contextMBean, "LastExchangeCompletedTimestamp");
if (lastExchangeCompletedTimestampObj == null) {
// Print an empty value for scripting
System.out.println(StringEscapeUtils.unescapeJava("\tLast Exchange Completed Date:"));
} else {
Date lastExchangeCompletedTimestamp = (Date) lastExchangeCompletedTimestampObj;
System.out.println(StringEscapeUtils.unescapeJava("\tLast Exchange Completed Date: " + format.format(lastExchangeCompletedTimestamp)));
}
// add type converter statistics if enabled
if (camelContext.getTypeConverterRegistry().getStatistics().isStatisticsEnabled()) {
System.out.println(StringEscapeUtils.unescapeJava(String.format("\tTypeConverterRegistry utilization: [attempts=%s, hits=%s, misses=%s, failures=%s]",
camelContext.getTypeConverterRegistry().getStatistics().getAttemptCounter(),
camelContext.getTypeConverterRegistry().getStatistics().getHitCounter(),
camelContext.getTypeConverterRegistry().getStatistics().getMissCounter(),
camelContext.getTypeConverterRegistry().getStatistics().getFailedCounter())));
}
// add stream caching details if enabled
if (camelContext.getStreamCachingStrategy().isEnabled()) {
System.out.println(StringEscapeUtils.unescapeJava(String.format("\tStreamCachingStrategy: [spoolDirectory=%s, spoolChiper=%s, spoolThreshold=%s, spoolUsedHeapMemoryThreshold=%s, anySpoolRules=%s, bufferSize=%s, removeSpoolDirectoryWhenStopping=%s, statisticsEnabled=%s]",
camelContext.getStreamCachingStrategy().getSpoolDirectory(),
camelContext.getStreamCachingStrategy().getSpoolChiper(),
camelContext.getStreamCachingStrategy().getSpoolThreshold(),
camelContext.getStreamCachingStrategy().getSpoolUsedHeapMemoryThreshold(),
camelContext.getStreamCachingStrategy().isAnySpoolRules(),
camelContext.getStreamCachingStrategy().getBufferSize(),
camelContext.getStreamCachingStrategy().isRemoveSpoolDirectoryWhenStopping(),
camelContext.getStreamCachingStrategy().getStatistics().isStatisticsEnabled())));
if (camelContext.getStreamCachingStrategy().getStatistics().isStatisticsEnabled()) {
System.out.println(StringEscapeUtils.unescapeJava(String.format("\t [cacheMemoryCounter=%s, cacheMemorySize=%s, cacheMemoryAverageSize=%s, cacheSpoolCounter=%s, cacheSpoolSize=%s, cacheSpoolAverageSize=%s]",
camelContext.getStreamCachingStrategy().getStatistics().getCacheMemoryCounter(),
camelContext.getStreamCachingStrategy().getStatistics().getCacheMemorySize(),
camelContext.getStreamCachingStrategy().getStatistics().getCacheMemoryAverageSize(),
camelContext.getStreamCachingStrategy().getStatistics().getCacheSpoolCounter(),
camelContext.getStreamCachingStrategy().getStatistics().getCacheSpoolSize(),
camelContext.getStreamCachingStrategy().getStatistics().getCacheSpoolAverageSize())));
}
}
long activeRoutes = 0;
long inactiveRoutes = 0;
List<Route> routeList = camelContext.getRoutes();
for (Route route : routeList) {
if (camelContext.getRouteStatus(route.getId()).isStarted()) {
activeRoutes++;
} else {
inactiveRoutes++;
}
}
System.out.println(StringEscapeUtils.unescapeJava("\tNumber of running routes: " + activeRoutes));
System.out.println(StringEscapeUtils.unescapeJava("\tNumber of not running routes: " + inactiveRoutes));
}
} else {
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[31mJMX Agent of Camel is not reachable. Maybe it has been disabled on the Camel context"));
System.out.println(StringEscapeUtils.unescapeJava("In consequence, some statistics are not available.\u001B[0m"));
}
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mMiscellaneous\u001B[0m"));
System.out.println(StringEscapeUtils.unescapeJava("\tAuto Startup: " + camelContext.isAutoStartup()));
System.out.println(StringEscapeUtils.unescapeJava("\tStarting Routes: " + camelContext.isStartingRoutes()));
System.out.println(StringEscapeUtils.unescapeJava("\tSuspended: " + camelContext.isSuspended()));
System.out.println(StringEscapeUtils.unescapeJava("\tMessage History: " + camelContext.isMessageHistory()));
System.out.println(StringEscapeUtils.unescapeJava("\tTracing: " + camelContext.isTracing()));
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mProperties\u001B[0m"));
for (String property : camelContext.getProperties().keySet()) {
System.out.println(StringEscapeUtils.unescapeJava("\t" + property + " = " + camelContext.getProperty(property)));
}
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mAdvanced\u001B[0m"));
System.out.println(StringEscapeUtils.unescapeJava("\tClassResolver: " + camelContext.getClassResolver()));
System.out.println(StringEscapeUtils.unescapeJava("\tPackageScanClassResolver: " + camelContext.getPackageScanClassResolver()));
System.out.println(StringEscapeUtils.unescapeJava("\tApplicationContextClassLoader: " + camelContext.getApplicationContextClassLoader()));
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mComponents\u001B[0m"));
for (String component : camelContext.getComponentNames()) {
System.out.println(StringEscapeUtils.unescapeJava("\t" + component));
}
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mDataformats\u001B[0m"));
for (String names : camelContext.getDataFormats().keySet()) {
System.out.println(StringEscapeUtils.unescapeJava("\t" + names));
}
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mLanguages\u001B[0m"));
for (String language : camelContext.getLanguageNames()) {
System.out.println(StringEscapeUtils.unescapeJava("\t" + language));
}
if (mode != null && mode.equals("--verbose")) {
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mEndpoints\u001B[0m"));
for (Endpoint endpoint : camelContext.getEndpoints()) {
System.out.println(StringEscapeUtils.unescapeJava("\t" + endpoint.getEndpointUri()));
}
}
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mRoutes\u001B[0m"));
for (Route route : camelContext.getRoutes()) {
System.out.println(StringEscapeUtils.unescapeJava("\t" + route.getId()));
}
return null;
}
}
| platforms/karaf/commands/src/main/java/org/apache/camel/karaf/commands/ContextInfo.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.karaf.commands;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.Route;
import org.apache.camel.spi.ManagementAgent;
import org.apache.felix.gogo.commands.Argument;
import org.apache.felix.gogo.commands.Command;
import org.apache.karaf.shell.console.OsgiCommandSupport;
import org.apache.karaf.util.StringEscapeUtils;
/**
* Command to display detailed information about a Camel context.
*/
@Command(scope = "camel", name = "context-info", description = "Display detailed information about a Camel context.")
public class ContextInfo extends OsgiCommandSupport {
@Argument(index = 0, name = "name", description = "The name of the Camel context", required = true, multiValued = false)
String name;
@Argument(index = 1, name = "mode", description = "Allows for different display modes (--verbose, etc)", required = false, multiValued = false)
String mode;
private CamelController camelController;
public void setCamelController(CamelController camelController) {
this.camelController = camelController;
}
public Object doExecute() throws Exception {
CamelContext camelContext = camelController.getCamelContext(name);
if (camelContext == null) {
System.err.println("Camel context " + name + " not found.");
return null;
}
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1m\u001B[33mCamel Context " + name + "\u001B[0m"));
System.out.println(StringEscapeUtils.unescapeJava("\tName: " + camelContext.getName()));
System.out.println(StringEscapeUtils.unescapeJava("\tVersion: " + camelContext.getVersion()));
System.out.println(StringEscapeUtils.unescapeJava("\tStatus: " + camelContext.getStatus()));
System.out.println(StringEscapeUtils.unescapeJava("\tUptime: " + camelContext.getUptime()));
// the statistics are in the mbeans
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mStatistics\u001B[0m"));
ObjectName contextMBean = null;
ManagementAgent agent = camelContext.getManagementStrategy().getManagementAgent();
if (agent != null) {
MBeanServer mBeanServer = agent.getMBeanServer();
Set<ObjectName> set = mBeanServer.queryNames(new ObjectName(agent.getMBeanObjectDomainName() + ":type=context,name=\"" + name + "\",*"), null);
Iterator<ObjectName> iterator = set.iterator();
if (iterator.hasNext()) {
contextMBean = iterator.next();
}
if (mBeanServer.isRegistered(contextMBean)) {
Long exchangesTotal = (Long) mBeanServer.getAttribute(contextMBean, "ExchangesTotal");
System.out.println(StringEscapeUtils.unescapeJava("\tExchanges Total: " + exchangesTotal));
Long exchangesCompleted = (Long) mBeanServer.getAttribute(contextMBean, "ExchangesCompleted");
System.out.println(StringEscapeUtils.unescapeJava("\tExchanges Completed: " + exchangesCompleted));
Long exchangesFailed = (Long) mBeanServer.getAttribute(contextMBean, "ExchangesFailed");
System.out.println(StringEscapeUtils.unescapeJava("\tExchanges Failed: " + exchangesFailed));
Long minProcessingTime = (Long) mBeanServer.getAttribute(contextMBean, "MinProcessingTime");
System.out.println(StringEscapeUtils.unescapeJava("\tMin Processing Time: " + minProcessingTime + "ms"));
Long maxProcessingTime = (Long) mBeanServer.getAttribute(contextMBean, "MaxProcessingTime");
System.out.println(StringEscapeUtils.unescapeJava("\tMax Processing Time: " + maxProcessingTime + "ms"));
Long meanProcessingTime = (Long) mBeanServer.getAttribute(contextMBean, "MeanProcessingTime");
System.out.println(StringEscapeUtils.unescapeJava("\tMean Processing Time: " + meanProcessingTime + "ms"));
Long totalProcessingTime = (Long) mBeanServer.getAttribute(contextMBean, "TotalProcessingTime");
System.out.println(StringEscapeUtils.unescapeJava("\tTotal Processing Time: " + totalProcessingTime + "ms"));
Long lastProcessingTime = (Long) mBeanServer.getAttribute(contextMBean, "LastProcessingTime");
System.out.println(StringEscapeUtils.unescapeJava("\tLast Processing Time: " + lastProcessingTime + "ms"));
Long deltaProcessingTime = (Long) mBeanServer.getAttribute(contextMBean, "DeltaProcessingTime");
System.out.println(StringEscapeUtils.unescapeJava("\tDelta Processing Time: " + deltaProcessingTime + "ms"));
String load01 = (String) mBeanServer.getAttribute(contextMBean, "Load01");
String load05 = (String) mBeanServer.getAttribute(contextMBean, "Load05");
String load15 = (String) mBeanServer.getAttribute(contextMBean, "Load15");
System.out.println(StringEscapeUtils.unescapeJava("\tLoad Avg: " + load01 + ", " + load05 + ", " + load15));
// Test for null to see if a any exchanges have been processed first to avoid NPE
Object resetTimestampObj = mBeanServer.getAttribute(contextMBean, "ResetTimestamp");
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
if (resetTimestampObj == null) {
// Print an empty value for scripting
System.out.println(StringEscapeUtils.unescapeJava("\tReset Statistics Date:"));
} else {
Date firstExchangeTimestamp = (Date) resetTimestampObj;
System.out.println(StringEscapeUtils.unescapeJava("\tReset Statistics Date: " + format.format(firstExchangeTimestamp)));
}
// Test for null to see if a any exchanges have been processed first to avoid NPE
Object firstExchangeTimestampObj = mBeanServer.getAttribute(contextMBean, "FirstExchangeCompletedTimestamp");
if (firstExchangeTimestampObj == null) {
// Print an empty value for scripting
System.out.println(StringEscapeUtils.unescapeJava("\tFirst Exchange Date:"));
} else {
Date firstExchangeTimestamp = (Date) firstExchangeTimestampObj;
System.out.println(StringEscapeUtils.unescapeJava("\tFirst Exchange Date: " + format.format(firstExchangeTimestamp)));
}
// Again, check for null to avoid NPE
Object lastExchangeCompletedTimestampObj = mBeanServer.getAttribute(contextMBean, "LastExchangeCompletedTimestamp");
if (lastExchangeCompletedTimestampObj == null) {
// Print an empty value for scripting
System.out.println(StringEscapeUtils.unescapeJava("\tLast Exchange Completed Date:"));
} else {
Date lastExchangeCompletedTimestamp = (Date) lastExchangeCompletedTimestampObj;
System.out.println(StringEscapeUtils.unescapeJava("\tLast Exchange Completed Date: " + format.format(lastExchangeCompletedTimestamp)));
}
// add type converter statistics if enabled
if (camelContext.getTypeConverterRegistry().getStatistics().isStatisticsEnabled()) {
System.out.println(StringEscapeUtils.unescapeJava(String.format("\tTypeConverterRegistry utilization: [attempts=%s, hits=%s, misses=%s, failures=%s]",
camelContext.getTypeConverterRegistry().getStatistics().getAttemptCounter(),
camelContext.getTypeConverterRegistry().getStatistics().getHitCounter(),
camelContext.getTypeConverterRegistry().getStatistics().getMissCounter(),
camelContext.getTypeConverterRegistry().getStatistics().getFailedCounter())));
}
// add stream caching details if enabled
if (camelContext.getStreamCachingStrategy().isEnabled()) {
System.out.println(StringEscapeUtils.unescapeJava(String.format("\tStreamCachingStrategy: [spoolDirectory=%s, spoolChiper=%s, spoolThreshold=%s, spoolUsedHeapMemoryThreshold=%s, anySpoolRules=%s, bufferSize=%s, removeSpoolDirectoryWhenStopping=%s, statisticsEnabled=%s]",
camelContext.getStreamCachingStrategy().getSpoolDirectory(),
camelContext.getStreamCachingStrategy().getSpoolChiper(),
camelContext.getStreamCachingStrategy().getSpoolThreshold(),
camelContext.getStreamCachingStrategy().getSpoolUsedHeapMemoryThreshold(),
camelContext.getStreamCachingStrategy().isAnySpoolRules(),
camelContext.getStreamCachingStrategy().getBufferSize(),
camelContext.getStreamCachingStrategy().isRemoveSpoolDirectoryWhenStopping(),
camelContext.getStreamCachingStrategy().getStatistics().isStatisticsEnabled())));
if (camelContext.getStreamCachingStrategy().getStatistics().isStatisticsEnabled()) {
System.out.println(StringEscapeUtils.unescapeJava(String.format("\t [cacheMemoryCounter=%s, cacheMemorySize=%s, cacheMemoryAverageSize=%s, cacheSpoolCounter=%s, cacheSpoolSize=%s, cacheSpoolAverageSize=%s]",
camelContext.getStreamCachingStrategy().getStatistics().getCacheMemoryCounter(),
camelContext.getStreamCachingStrategy().getStatistics().getCacheMemorySize(),
camelContext.getStreamCachingStrategy().getStatistics().getCacheMemoryAverageSize(),
camelContext.getStreamCachingStrategy().getStatistics().getCacheSpoolCounter(),
camelContext.getStreamCachingStrategy().getStatistics().getCacheSpoolSize(),
camelContext.getStreamCachingStrategy().getStatistics().getCacheSpoolAverageSize())));
}
}
long activeRoutes = 0;
long inactiveRoutes = 0;
List<Route> routeList = camelContext.getRoutes();
for (Route route : routeList) {
if (camelContext.getRouteStatus(route.getId()).isStarted()) {
activeRoutes++;
} else {
inactiveRoutes++;
}
}
System.out.println(StringEscapeUtils.unescapeJava("\tNumber of running routes: " + activeRoutes));
System.out.println(StringEscapeUtils.unescapeJava("\tNumber of not running routes: " + inactiveRoutes));
}
} else {
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[31mJMX Agent of Camel is not reachable. Maybe it has been disabled on the Camel context"));
System.out.println(StringEscapeUtils.unescapeJava("In consequence, some statistics are not available.\u001B[0m"));
}
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mAdvanced\u001B[0m"));
System.out.println(StringEscapeUtils.unescapeJava("\tAuto Startup: " + camelContext.isAutoStartup()));
System.out.println(StringEscapeUtils.unescapeJava("\tStarting Routes: " + camelContext.isStartingRoutes()));
System.out.println(StringEscapeUtils.unescapeJava("\tSuspended: " + camelContext.isSuspended()));
System.out.println(StringEscapeUtils.unescapeJava("\tMessage History: " + camelContext.isMessageHistory()));
System.out.println(StringEscapeUtils.unescapeJava("\tTracing: " + camelContext.isTracing()));
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mProperties\u001B[0m"));
for (String property : camelContext.getProperties().keySet()) {
System.out.println(StringEscapeUtils.unescapeJava("\t" + property + " = " + camelContext.getProperty(property)));
}
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mComponents\u001B[0m"));
for (String component : camelContext.getComponentNames()) {
System.out.println(StringEscapeUtils.unescapeJava("\t" + component));
}
if (mode != null && mode.equals("--verbose")) {
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mEndpoints\u001B[0m"));
for (Endpoint endpoint : camelContext.getEndpoints()) {
System.out.println(StringEscapeUtils.unescapeJava("\t" + endpoint.getEndpointUri()));
}
}
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mRoutes\u001B[0m"));
for (Route route : camelContext.getRoutes()) {
System.out.println(StringEscapeUtils.unescapeJava("\t" + route.getId()));
}
System.out.println("");
System.out.println(StringEscapeUtils.unescapeJava("\u001B[1mUsed Languages\u001B[0m"));
for (String language : camelContext.getLanguageNames()) {
System.out.println(StringEscapeUtils.unescapeJava("\t" + language));
}
return null;
}
}
| Polished camel:context-info command to show more details.
| platforms/karaf/commands/src/main/java/org/apache/camel/karaf/commands/ContextInfo.java | Polished camel:context-info command to show more details. |
|
Java | apache-2.0 | b36bd9d92350576e4f02aa57ae30b944144379f5 | 0 | Danny-Hazelcast/hazelcast-stabilizer,hasancelik/hazelcast-stabilizer,hazelcast/hazelcast-simulator,hazelcast/hazelcast-simulator,hasancelik/hazelcast-stabilizer,Danny-Hazelcast/hazelcast-stabilizer,jerrinot/hazelcast-stabilizer,eminn/hazelcast-simulator,fengshao0907/hazelcast-simulator,fengshao0907/hazelcast-simulator,Donnerbart/hazelcast-simulator,hazelcast/hazelcast-simulator,gAmUssA/hazelcast-simulator,gAmUssA/hazelcast-simulator,pveentjer/hazelcast-simulator,Donnerbart/hazelcast-simulator,pveentjer/hazelcast-simulator,eminn/hazelcast-simulator,jerrinot/hazelcast-stabilizer | package com.hazelcast.stabilizer.agent.workerjvm;
import com.hazelcast.stabilizer.Utils;
import com.hazelcast.stabilizer.agent.Agent;
import com.hazelcast.stabilizer.agent.SpawnWorkerFailedException;
import com.hazelcast.stabilizer.worker.ClientWorker;
import com.hazelcast.stabilizer.worker.MemberWorker;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import static com.hazelcast.stabilizer.Utils.getHostAddress;
import static com.hazelcast.stabilizer.Utils.getStablizerHome;
import static com.hazelcast.stabilizer.Utils.writeText;
import static java.lang.String.format;
import static java.util.Arrays.asList;
public class WorkerJvmLauncher {
private final static Logger log = Logger.getLogger(WorkerJvmLauncher.class);
private final AtomicBoolean javaHomePrinted = new AtomicBoolean();
private final static String CLASSPATH = System.getProperty("java.class.path");
private final static File STABILIZER_HOME = getStablizerHome();
private final static String CLASSPATH_SEPARATOR = System.getProperty("path.separator");
private final static AtomicLong WORKER_ID_GENERATOR = new AtomicLong();
private final WorkerJvmSettings settings;
private final Agent agent;
private final ConcurrentMap<String, WorkerJvm> workerJvms;
private File hzFile;
private File clientHzFile;
private final List<WorkerJvm> workersInProgress = new LinkedList<WorkerJvm>();
private File testSuiteDir;
public WorkerJvmLauncher(Agent agent, ConcurrentMap<String, WorkerJvm> workerJvms, WorkerJvmSettings settings) {
this.settings = settings;
this.workerJvms = workerJvms;
this.agent = agent;
}
public void launch() throws Exception {
hzFile = createHzConfigFile();
clientHzFile = createClientHzConfigFile();
testSuiteDir = agent.getTestSuiteDir();
if (!testSuiteDir.exists()) {
if (!testSuiteDir.mkdirs()) {
throw new SpawnWorkerFailedException("Couldn't create testSuiteDir: " + testSuiteDir.getAbsolutePath());
}
}
log.info("Spawning Worker JVM using settings: " + settings);
spawn(settings.memberWorkerCount, "server");
spawn(settings.clientWorkerCount, "client");
spawn(settings.mixedWorkerCount, "mixed");
}
private void spawn(int count, String mode) throws Exception {
log.info(format("Starting %s %s worker Java Virtual Machines", count, mode));
for (int k = 0; k < count; k++) {
WorkerJvm worker = startWorkerJvm(mode);
workersInProgress.add(worker);
}
log.info(format("Finished starting %s %s worker Java Virtual Machines", count, mode));
waitForWorkersStartup(workersInProgress, settings.workerStartupTimeout);
workersInProgress.clear();
}
private File createHzConfigFile() throws IOException {
File hzConfigFile = File.createTempFile("hazelcast", "xml");
hzConfigFile.deleteOnExit();
writeText(settings.hzConfig, hzConfigFile);
return hzConfigFile;
}
private File createClientHzConfigFile() throws IOException {
File clientHzConfigFile = File.createTempFile("client-hazelcast", "xml");
clientHzConfigFile.deleteOnExit();
writeText(settings.clientHzConfig, clientHzConfigFile);
return clientHzConfigFile;
}
private String getJavaHome(String javaVendor, String javaVersion) {
String javaHome = System.getProperty("java.home");
if (javaHomePrinted.compareAndSet(false, true)) {
log.info("java.home=" + javaHome);
}
return javaHome;
}
private WorkerJvm startWorkerJvm(String mode) throws IOException {
String workerId = "worker-" + getHostAddress() + "-" + WORKER_ID_GENERATOR.incrementAndGet() + "-" + mode;
File workerHome = new File(testSuiteDir, workerId);
if (!workerHome.exists()) {
if (!workerHome.mkdir()) {
throw new SpawnWorkerFailedException("Could not create workerhome: " + workerHome.getAbsolutePath());
}
}
String javaHome = getJavaHome(settings.javaVendor, settings.javaVersion);
WorkerJvm workerJvm = new WorkerJvm(workerId);
workerJvm.workerHome = workerHome;
generateWorkerStartScript(mode, workerJvm);
ProcessBuilder processBuilder = new ProcessBuilder(new String[]{"bash","worker.sh"})
.directory(workerHome)
.redirectErrorStream(true);
Map<String, String> environment = processBuilder.environment();
String path = javaHome + File.pathSeparator + "bin:" + environment.get("PATH");
environment.put("PATH", path);
environment.put("JAVA_HOME", javaHome);
Process process = processBuilder.start();
File logFile = new File(workerHome, "out.log");
new WorkerJvmProcessOutputGobbler(process.getInputStream(), new FileOutputStream(logFile)).start();
workerJvm.process = process;
workerJvm.mode = WorkerJvm.Mode.valueOf(mode.toUpperCase());
workerJvms.put(workerId, workerJvm);
return workerJvm;
}
private void generateWorkerStartScript(String mode, WorkerJvm workerJvm) {
String[] args = buildArgs(workerJvm, mode);
File startScript = new File("worker.sh");
StringBuffer sb = new StringBuffer("#!/bin/bash");
sb.append("\n");
for (String arg : args) {
sb.append(arg).append(" ");
}
sb.append("\n");
Utils.writeText(sb.toString(), startScript);
}
private String getClasspath() {
File libDir = new File(agent.getTestSuiteDir(), "lib");
return CLASSPATH + CLASSPATH_SEPARATOR + new File(libDir, "*").getAbsolutePath();
}
private List<String> getJvmOptions(WorkerJvmSettings settings, String mode) {
String workerVmOptions;
if ("client".equals(mode)) {
workerVmOptions = settings.clientVmOptions;
} else {
workerVmOptions = settings.vmOptions;
}
String[] vmOptionsArray = new String[]{};
if (workerVmOptions != null && !workerVmOptions.trim().isEmpty()) {
vmOptionsArray = workerVmOptions.split("\\s+");
}
return asList(vmOptionsArray);
}
private String[] buildArgs(WorkerJvm workerJvm, String mode) {
List<String> args = new LinkedList<String>();
String profiler = settings.profiler;
if ("perf".equals(profiler)) {
String[] perfSettings = settings.perfSettings.split("\\s+");
// perf command always need to be in front of the java command.
args.addAll(asList(perfSettings));
args.add("java");
} else if ("yourkit".equals(profiler)) {
args.add("java");
String agentSetting = settings.yourkitConfig
.replace("${STABILIZER_HOME}", STABILIZER_HOME.getAbsolutePath())
.replace("${WORKER_HOME}", workerJvm.workerHome.getAbsolutePath());
args.add(agentSetting);
} else if ("hprof".equals(profiler)) {
args.add("java");
args.add(settings.hprofSettings);
} else {
args.add("java");
}
args.add("-XX:OnOutOfMemoryError=\"\"touch worker.oome\"\"");
args.add("-DSTABILIZER_HOME=" + STABILIZER_HOME);
args.add("-Dhazelcast.logging.type=log4j");
args.add("-DworkerId=" + workerJvm.id);
args.add("-DworkerMode=" + mode);
args.add("-Dlog4j.configuration=file:" + STABILIZER_HOME + File.separator + "conf" + File.separator + "worker-log4j.xml");
args.add("-classpath");
args.add(getClasspath());
args.addAll(getJvmOptions(settings, mode));
// if it is a client, we start the ClientWorker.
if ("client".equals(mode)) {
args.add(ClientWorker.class.getName());
} else {
args.add(MemberWorker.class.getName());
}
args.add(hzFile.getAbsolutePath());
args.add(clientHzFile.getAbsolutePath());
log.info("worker args:" + args);
return args.toArray(new String[args.size()]);
}
private boolean hasExited(WorkerJvm workerJvm) {
try {
workerJvm.process.exitValue();
return true;
} catch (IllegalThreadStateException e) {
return false;
}
}
private void waitForWorkersStartup(List<WorkerJvm> workers, int workerTimeoutSec) throws InterruptedException {
List<WorkerJvm> todo = new ArrayList<WorkerJvm>(workers);
for (int l = 0; l < workerTimeoutSec; l++) {
for (Iterator<WorkerJvm> it = todo.iterator(); it.hasNext(); ) {
WorkerJvm jvm = it.next();
if (hasExited(jvm)) {
String message = format("Startup failure: worker on host %s failed during startup, " +
"check '%s/out.log' for more info",
getHostAddress(), jvm.workerHome
);
throw new SpawnWorkerFailedException(message);
}
String address = readAddress(jvm);
if (address != null) {
jvm.memberAddress = address;
it.remove();
log.info(format("Worker: %s Started %s of %s",
jvm.id, workers.size() - todo.size(), workers.size()));
}
}
if (todo.isEmpty()) {
return;
}
Utils.sleepSeconds(1);
}
workerTimeout(workerTimeoutSec, todo);
}
private void workerTimeout(int workerTimeoutSec, List<WorkerJvm> todo) {
StringBuffer sb = new StringBuffer();
sb.append("[");
sb.append(todo.get(0).id);
for (int l = 1; l < todo.size(); l++) {
sb.append(",").append(todo.get(l).id);
}
sb.append("]");
throw new SpawnWorkerFailedException(format("Timeout: workers %s of testsuite %s on host %s didn't start within %s seconds",
sb, agent.getTestSuite().id, getHostAddress(),
workerTimeoutSec));
}
private String readAddress(WorkerJvm jvm) {
File file = new File(jvm.workerHome, "worker.address");
if (!file.exists()) {
return null;
}
String address = Utils.readObject(file);
file.delete();
return address;
}
}
| stabilizer/src/main/java/com/hazelcast/stabilizer/agent/workerjvm/WorkerJvmLauncher.java | package com.hazelcast.stabilizer.agent.workerjvm;
import com.hazelcast.stabilizer.Utils;
import com.hazelcast.stabilizer.agent.Agent;
import com.hazelcast.stabilizer.agent.SpawnWorkerFailedException;
import com.hazelcast.stabilizer.worker.ClientWorker;
import com.hazelcast.stabilizer.worker.MemberWorker;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import static com.hazelcast.stabilizer.Utils.getHostAddress;
import static com.hazelcast.stabilizer.Utils.getStablizerHome;
import static com.hazelcast.stabilizer.Utils.writeText;
import static java.lang.String.format;
import static java.util.Arrays.asList;
public class WorkerJvmLauncher {
private final static Logger log = Logger.getLogger(WorkerJvmLauncher.class);
private final AtomicBoolean javaHomePrinted = new AtomicBoolean();
private final static String CLASSPATH = System.getProperty("java.class.path");
private final static File STABILIZER_HOME = getStablizerHome();
private final static String CLASSPATH_SEPARATOR = System.getProperty("path.separator");
private final static AtomicLong WORKER_ID_GENERATOR = new AtomicLong();
private final WorkerJvmSettings settings;
private final Agent agent;
private final ConcurrentMap<String, WorkerJvm> workerJvms;
private File hzFile;
private File clientHzFile;
private final List<WorkerJvm> workersInProgress = new LinkedList<WorkerJvm>();
private File testSuiteDir;
public WorkerJvmLauncher(Agent agent, ConcurrentMap<String, WorkerJvm> workerJvms, WorkerJvmSettings settings) {
this.settings = settings;
this.workerJvms = workerJvms;
this.agent = agent;
}
public void launch() throws Exception {
hzFile = createHzConfigFile();
clientHzFile = createClientHzConfigFile();
testSuiteDir = agent.getTestSuiteDir();
if (!testSuiteDir.exists()) {
if (!testSuiteDir.mkdirs()) {
throw new SpawnWorkerFailedException("Couldn't create testSuiteDir: " + testSuiteDir.getAbsolutePath());
}
}
log.info("Spawning Worker JVM using settings: " + settings);
spawn(settings.memberWorkerCount, "server");
spawn(settings.clientWorkerCount, "client");
spawn(settings.mixedWorkerCount, "mixed");
}
private void spawn(int count, String mode) throws Exception {
log.info(format("Starting %s %s worker Java Virtual Machines", count, mode));
for (int k = 0; k < count; k++) {
WorkerJvm worker = startWorkerJvm(mode);
workersInProgress.add(worker);
}
log.info(format("Finished starting %s %s worker Java Virtual Machines", count, mode));
waitForWorkersStartup(workersInProgress, settings.workerStartupTimeout);
workersInProgress.clear();
}
private File createHzConfigFile() throws IOException {
File hzConfigFile = File.createTempFile("hazelcast", "xml");
hzConfigFile.deleteOnExit();
writeText(settings.hzConfig, hzConfigFile);
return hzConfigFile;
}
private File createClientHzConfigFile() throws IOException {
File clientHzConfigFile = File.createTempFile("client-hazelcast", "xml");
clientHzConfigFile.deleteOnExit();
writeText(settings.clientHzConfig, clientHzConfigFile);
return clientHzConfigFile;
}
private String getJavaHome(String javaVendor, String javaVersion) {
String javaHome = System.getProperty("java.home");
if (javaHomePrinted.compareAndSet(false, true)) {
log.info("java.home=" + javaHome);
}
return javaHome;
}
private WorkerJvm startWorkerJvm(String mode) throws IOException {
String workerId = "worker-" + getHostAddress() + "-" + WORKER_ID_GENERATOR.incrementAndGet() + "-" + mode;
File workerHome = new File(testSuiteDir, workerId);
if (!workerHome.exists()) {
if (!workerHome.mkdir()) {
throw new SpawnWorkerFailedException("Could not create workerhome: " + workerHome.getAbsolutePath());
}
}
String javaHome = getJavaHome(settings.javaVendor, settings.javaVersion);
WorkerJvm workerJvm = new WorkerJvm(workerId);
workerJvm.workerHome = workerHome;
generateWorkerStartScript(mode, workerJvm);
ProcessBuilder processBuilder = new ProcessBuilder("bash worker.sh")
.directory(workerHome)
.redirectErrorStream(true);
Map<String, String> environment = processBuilder.environment();
String path = javaHome + File.pathSeparator + "bin:" + environment.get("PATH");
environment.put("PATH", path);
environment.put("JAVA_HOME", javaHome);
Process process = processBuilder.start();
File logFile = new File(workerHome, "out.log");
new WorkerJvmProcessOutputGobbler(process.getInputStream(), new FileOutputStream(logFile)).start();
workerJvm.process = process;
workerJvm.mode = WorkerJvm.Mode.valueOf(mode.toUpperCase());
workerJvms.put(workerId, workerJvm);
return workerJvm;
}
private void generateWorkerStartScript(String mode, WorkerJvm workerJvm) {
String[] args = buildArgs(workerJvm, mode);
File startScript = new File("worker.sh");
StringBuffer sb = new StringBuffer("#!/bin/bash");
sb.append("\n");
for (String arg : args) {
sb.append(arg).append(" ");
}
sb.append("\n");
Utils.writeText(sb.toString(), startScript);
}
private String getClasspath() {
File libDir = new File(agent.getTestSuiteDir(), "lib");
return CLASSPATH + CLASSPATH_SEPARATOR + new File(libDir, "*").getAbsolutePath();
}
private List<String> getJvmOptions(WorkerJvmSettings settings, String mode) {
String workerVmOptions;
if ("client".equals(mode)) {
workerVmOptions = settings.clientVmOptions;
} else {
workerVmOptions = settings.vmOptions;
}
String[] vmOptionsArray = new String[]{};
if (workerVmOptions != null && !workerVmOptions.trim().isEmpty()) {
vmOptionsArray = workerVmOptions.split("\\s+");
}
return asList(vmOptionsArray);
}
private String[] buildArgs(WorkerJvm workerJvm, String mode) {
List<String> args = new LinkedList<String>();
String profiler = settings.profiler;
if ("perf".equals(profiler)) {
String[] perfSettings = settings.perfSettings.split("\\s+");
// perf command always need to be in front of the java command.
args.addAll(asList(perfSettings));
args.add("java");
} else if ("yourkit".equals(profiler)) {
args.add("java");
String agentSetting = settings.yourkitConfig
.replace("${STABILIZER_HOME}", STABILIZER_HOME.getAbsolutePath())
.replace("${WORKER_HOME}", workerJvm.workerHome.getAbsolutePath());
args.add(agentSetting);
} else if ("hprof".equals(profiler)) {
args.add("java");
args.add(settings.hprofSettings);
} else {
args.add("java");
}
args.add("-XX:OnOutOfMemoryError=\"\"touch worker.oome\"\"");
args.add("-DSTABILIZER_HOME=" + STABILIZER_HOME);
args.add("-Dhazelcast.logging.type=log4j");
args.add("-DworkerId=" + workerJvm.id);
args.add("-DworkerMode=" + mode);
args.add("-Dlog4j.configuration=file:" + STABILIZER_HOME + File.separator + "conf" + File.separator + "worker-log4j.xml");
args.add("-classpath");
args.add(getClasspath());
args.addAll(getJvmOptions(settings, mode));
// if it is a client, we start the ClientWorker.
if ("client".equals(mode)) {
args.add(ClientWorker.class.getName());
} else {
args.add(MemberWorker.class.getName());
}
args.add(hzFile.getAbsolutePath());
args.add(clientHzFile.getAbsolutePath());
log.info("worker args:" + args);
return args.toArray(new String[args.size()]);
}
private boolean hasExited(WorkerJvm workerJvm) {
try {
workerJvm.process.exitValue();
return true;
} catch (IllegalThreadStateException e) {
return false;
}
}
private void waitForWorkersStartup(List<WorkerJvm> workers, int workerTimeoutSec) throws InterruptedException {
List<WorkerJvm> todo = new ArrayList<WorkerJvm>(workers);
for (int l = 0; l < workerTimeoutSec; l++) {
for (Iterator<WorkerJvm> it = todo.iterator(); it.hasNext(); ) {
WorkerJvm jvm = it.next();
if (hasExited(jvm)) {
String message = format("Startup failure: worker on host %s failed during startup, " +
"check '%s/out.log' for more info",
getHostAddress(), jvm.workerHome
);
throw new SpawnWorkerFailedException(message);
}
String address = readAddress(jvm);
if (address != null) {
jvm.memberAddress = address;
it.remove();
log.info(format("Worker: %s Started %s of %s",
jvm.id, workers.size() - todo.size(), workers.size()));
}
}
if (todo.isEmpty()) {
return;
}
Utils.sleepSeconds(1);
}
workerTimeout(workerTimeoutSec, todo);
}
private void workerTimeout(int workerTimeoutSec, List<WorkerJvm> todo) {
StringBuffer sb = new StringBuffer();
sb.append("[");
sb.append(todo.get(0).id);
for (int l = 1; l < todo.size(); l++) {
sb.append(",").append(todo.get(l).id);
}
sb.append("]");
throw new SpawnWorkerFailedException(format("Timeout: workers %s of testsuite %s on host %s didn't start within %s seconds",
sb, agent.getTestSuite().id, getHostAddress(),
workerTimeoutSec));
}
private String readAddress(WorkerJvm jvm) {
File file = new File(jvm.workerHome, "worker.address");
if (!file.exists()) {
return null;
}
String address = Utils.readObject(file);
file.delete();
return address;
}
}
| Worker created using bash script
| stabilizer/src/main/java/com/hazelcast/stabilizer/agent/workerjvm/WorkerJvmLauncher.java | Worker created using bash script |
|
Java | apache-2.0 | 7cf2a9f6a5ae634bdea75c528442d0d26d1fe0bd | 0 | OpenConext/OpenConext-api,OpenConext/OpenConext-api | /*
* Copyright 2012 SURFnet bv, The Netherlands
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.surfnet.coin.janus.domain;
import java.util.Map;
/**
* Subset of the response you get for "getEntity" in the Janus Rest API
* <p/>
* Full response:
* <pre>Entity {eid=1087, entityid=http://mujina-sp-1087, revision=0, parent=null,
* revisionnote=No revision note, type=saml20-sp, allowedall=no, workflow=prodaccepted,
* metadataurl=null, prettyname=http://mujina-sp-1087, arp=3184, user=0}</pre>
*/
public class JanusEntity {
private String entityId;
private int revision;
private String workflowStatus;
private String type;
private boolean allowAll;
private int eid;
private String prettyName;
private JanusEntity() {
}
public JanusEntity(int eid, int revision) {
this.eid = eid;
this.revision = revision;
}
public JanusEntity(int eid, String entityId) {
super();
this.eid = eid;
this.entityId = entityId;
}
public JanusEntity(int eid, int revision, String entityId) {
this.eid = eid;
this.revision = revision;
this.entityId = entityId;
}
public static JanusEntity fromJanusResponse(Map<String, Object> janusResponse) {
JanusEntity janusEntity = new JanusEntity();
janusEntity.setEntityId((String) janusResponse.get("entityid"));
janusEntity.setRevision((String) janusResponse.get("revision"));
janusEntity.setType((String) janusResponse.get("type"));
janusEntity.setWorkflowStatus((String) janusResponse.get("workflow"));
janusEntity.setAllowAll((String) janusResponse.get("allowedall"));
janusEntity.setEid((String) janusResponse.get("eid"));
janusEntity.setPrettyName((String) janusResponse.get("prettyname"));
return janusEntity;
}
public String getEntityId() {
return entityId;
}
public void setEntityId(String entityId) {
this.entityId = entityId;
}
public int getRevision() {
return revision;
}
public void setRevision(int revision) {
this.revision = revision;
}
private void setRevision(String revision) {
this.revision = Integer.parseInt(revision);
}
public String getWorkflowStatus() {
return workflowStatus;
}
public void setWorkflowStatus(String workflowStatus) {
this.workflowStatus = workflowStatus;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public boolean isAllowAll() {
return allowAll;
}
public void setAllowAll(boolean allowAll) {
this.allowAll = allowAll;
}
private void setAllowAll(String allowedall) {
this.allowAll = "yes".equals(allowedall);
}
public int getEid() {
return eid;
}
public void setEid(int eid) {
this.eid = eid;
}
private void setEid(String eid) {
this.eid = Integer.parseInt(eid);
}
public String getPrettyName() {
return prettyName;
}
public void setPrettyName(String prettyName) {
this.prettyName = prettyName;
}
}
| coin-api-serviceregistry-client/src/main/java/nl/surfnet/coin/janus/domain/JanusEntity.java | /*
* Copyright 2012 SURFnet bv, The Netherlands
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.surfnet.coin.janus.domain;
import java.util.Map;
/**
* Subset of the response you get for "getEntity" in the Janus Rest API
* <p/>
* Full response:
* <pre>Entity {eid=1087, entityid=http://mujina-sp-1087, revision=0, parent=null,
* revisionnote=No revision note, type=saml20-sp, allowedall=no, workflow=prodaccepted,
* metadataurl=null, prettyname=http://mujina-sp-1087, arp=3184, user=0}</pre>
*/
public class JanusEntity {
private String entityId;
private int revision;
private String workflowStatus;
private String type;
private boolean allowAll;
private int eid;
private String prettyName;
private JanusEntity() {
}
public JanusEntity(int eid, int revision) {
this.eid = eid;
this.revision = revision;
}
public JanusEntity(int eid, String entityId) {
super();
this.eid = eid;
this.entityId = entityId;
}
public static JanusEntity fromJanusResponse(Map<String, Object> janusResponse) {
JanusEntity janusEntity = new JanusEntity();
janusEntity.setEntityId((String) janusResponse.get("entityid"));
janusEntity.setRevision((String) janusResponse.get("revision"));
janusEntity.setType((String) janusResponse.get("type"));
janusEntity.setWorkflowStatus((String) janusResponse.get("workflow"));
janusEntity.setAllowAll((String) janusResponse.get("allowedall"));
janusEntity.setEid((String) janusResponse.get("eid"));
janusEntity.setPrettyName((String) janusResponse.get("prettyname"));
return janusEntity;
}
public String getEntityId() {
return entityId;
}
public void setEntityId(String entityId) {
this.entityId = entityId;
}
public int getRevision() {
return revision;
}
public void setRevision(int revision) {
this.revision = revision;
}
private void setRevision(String revision) {
this.revision = Integer.parseInt(revision);
}
public String getWorkflowStatus() {
return workflowStatus;
}
public void setWorkflowStatus(String workflowStatus) {
this.workflowStatus = workflowStatus;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public boolean isAllowAll() {
return allowAll;
}
public void setAllowAll(boolean allowAll) {
this.allowAll = allowAll;
}
private void setAllowAll(String allowedall) {
this.allowAll = "yes".equals(allowedall);
}
public int getEid() {
return eid;
}
public void setEid(int eid) {
this.eid = eid;
}
private void setEid(String eid) {
this.eid = Integer.parseInt(eid);
}
public String getPrettyName() {
return prettyName;
}
public void setPrettyName(String prettyName) {
this.prettyName = prettyName;
}
}
| Convenience constructir for JanusEntity
| coin-api-serviceregistry-client/src/main/java/nl/surfnet/coin/janus/domain/JanusEntity.java | Convenience constructir for JanusEntity |
|
Java | apache-2.0 | e0e9f83b160444798ede9572a22c7fb4ec31b0d1 | 0 | web-education/web-utils,web-education/web-utils | /*
* Copyright © WebServices pour l'Éducation, 2014
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.wseduc.webutils.security;
import org.vertx.java.core.MultiMap;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
public final class XSSUtils {
private XSSUtils() {}
private static final Pattern[] patterns = new Pattern[]{
Pattern.compile("<script>(.*?)</script>", Pattern.CASE_INSENSITIVE),
// Pattern.compile("src[\r\n]*=[\r\n]*\\\'(.*?)\\\'", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
// Pattern.compile("src[\r\n]*=[\r\n]*\\\"(.*?)\\\"", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("<script>", Pattern.CASE_INSENSITIVE),
Pattern.compile("</script>", Pattern.CASE_INSENSITIVE),
Pattern.compile("<script(.*?)>", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("eval\\((.*?)\\)", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("expression\\((.*?)\\)", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("javascript:", Pattern.CASE_INSENSITIVE),
Pattern.compile("vbscript:", Pattern.CASE_INSENSITIVE),
Pattern.compile("('|\")\\s*on(click|context|mouse|dblclick|key|abort|error|before|hash|load|page|" +
"resize|scroll|unload|blur|change|focus|in|reset|search|select|submit|drag|drop|copy|cut|paste|" +
"after| before|can|end|duration|emp|play|progress|seek|stall|sus|time|volume|waiting|message|open|touch|" +
"on|off|pop|show|storage|toggle|wheel)(.*?)=",
Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
};
public static MultiMap safeMultiMap(MultiMap m) {
for (String name : m.names()) {
List<String> values = m.getAll(name);
List<String> safeValues = new ArrayList<>();
if (values == null) continue;
for (String value: values) {
safeValues.add(stripXSS(value));
}
m.set(name, safeValues);
}
return m;
}
public static String stripXSS(String value) {
if (value != null) {
//value = ESAPI.encoder().canonicalize(value);
value = value.replaceAll("\0", "");
for (Pattern scriptPattern : patterns){
value = scriptPattern.matcher(value).replaceAll("");
}
}
return value;
}
}
| src/main/java/fr/wseduc/webutils/security/XSSUtils.java | /*
* Copyright © WebServices pour l'Éducation, 2014
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.wseduc.webutils.security;
import org.vertx.java.core.MultiMap;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
public final class XSSUtils {
private XSSUtils() {}
private static final Pattern[] patterns = new Pattern[]{
Pattern.compile("<script>(.*?)</script>", Pattern.CASE_INSENSITIVE),
// Pattern.compile("src[\r\n]*=[\r\n]*\\\'(.*?)\\\'", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
// Pattern.compile("src[\r\n]*=[\r\n]*\\\"(.*?)\\\"", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("<script>", Pattern.CASE_INSENSITIVE),
Pattern.compile("</script>", Pattern.CASE_INSENSITIVE),
Pattern.compile("<script(.*?)>", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("eval\\((.*?)\\)", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("expression\\((.*?)\\)", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
Pattern.compile("javascript:", Pattern.CASE_INSENSITIVE),
Pattern.compile("vbscript:", Pattern.CASE_INSENSITIVE),
Pattern.compile("on(click|context|mouse|dblclick|key|abort|error|before|hash|load|page|" +
"resize|scroll|unload|blur|change|focus|in|reset|se|submit|drag|drop|copy|cut|paste|" +
"after| before|can|end|duration|emp|p|seek|stall|sus|time|volume|waiting|message|open|touch|" +
"on|off|pop|show|storage|toggle|wheel)(.*?)=",
Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL),
};
public static MultiMap safeMultiMap(MultiMap m) {
for (String name : m.names()) {
List<String> values = m.getAll(name);
List<String> safeValues = new ArrayList<>();
if (values == null) continue;
for (String value: values) {
safeValues.add(stripXSS(value));
}
m.set(name, safeValues);
}
return m;
}
public static String stripXSS(String value) {
if (value != null) {
//value = ESAPI.encoder().canonicalize(value);
value = value.replaceAll("\0", "");
for (Pattern scriptPattern : patterns){
value = scriptPattern.matcher(value).replaceAll("");
}
}
return value;
}
}
| update xss filter rules
| src/main/java/fr/wseduc/webutils/security/XSSUtils.java | update xss filter rules |
|
Java | apache-2.0 | d93d2160455f4d53a1f6be907a5c37f343d4d909 | 0 | jimma/xerces,jimma/xerces,RackerWilliams/xercesj,ronsigal/xerces,ronsigal/xerces,RackerWilliams/xercesj,RackerWilliams/xercesj,jimma/xerces,ronsigal/xerces | /*
* The Apache Software License, Version 1.1
*
*
* Copyright (c) 1999-2002 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Xerces" and "Apache Software Foundation" must
* not be used to endorse or promote products derived from this
* software without prior written permission. For written
* permission, please contact [email protected].
*
* 5. Products derived from this software may not be called "Apache",
* nor may "Apache" appear in their name, without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation and was
* originally based on software copyright (c) 1999, International
* Business Machines, Inc., http://www.apache.org. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
package dom.dom3;
import org.apache.xerces.dom.*;
import org.w3c.dom.*;
import org.w3c.dom.ls.*;
import java.io.Reader;
import java.io.StringReader;
import dom.util.Assertion;
/**
* The program tests vacarious DOM Level 3 functionality
*/
public class Test implements DOMErrorHandler{
public static void main( String[] argv) {
try {
boolean namespaces = true;
System.out.println("Running dom.dom3.Test...");
System.setProperty(DOMImplementationRegistry.PROPERTY,"org.apache.xerces.dom.DOMImplementationSourceImpl");
DOMImplementationLS impl = (DOMImplementationLS)DOMImplementationRegistry.newInstance().getDOMImplementation("LS-Load");
Assertion.assert(impl!=null, "domImplementation != null");
DOMBuilder builder = impl.createDOMBuilder(DOMImplementationLS.MODE_SYNCHRONOUS,
null);
DOMWriter writer = impl.createDOMWriter();
builder.setFeature("http://xml.org/sax/features/namespaces",namespaces);
builder.setFeature("http://xml.org/sax/features/validation",false);
//************************
// TEST: lookupNamespacePrefix
// isDefaultNamespace
// lookupNamespaceURI
//************************
System.out.println("TEST #1: lookupNamespacePrefix, isDefaultNamespace, lookupNamespaceURI, input: tests/dom/dom3/input.xml");
{
Document doc = builder.parseURI("tests/dom/dom3/input.xml");
NodeList ls = doc.getElementsByTagName("a:elem_a");
NodeImpl elem = (NodeImpl)ls.item(0);
if (namespaces) {
//System.out.println("[a:elem_a].lookupNamespacePrefix('http://www.example.com', true) == null");
Assertion.assert(elem.lookupNamespacePrefix(
"http://www.example.com", true) == null,
"[a:elem_a].lookupNamespacePrefix(http://www.example.com)==null");
//System.out.println("[a:elem_a].isDefaultNamespace('http://www.example.com') == true");
Assertion.assert(elem.isDefaultNamespace("http://www.example.com") == true,
"[a:elem_a].isDefaultNamespace(http://www.example.com)==true");
//System.out.println("[a:elem_a].lookupNamespacePrefix('http://www.example.com', false) == ns1");
Assertion.assert(elem.lookupNamespacePrefix(
"http://www.example.com", false).equals("ns1"),
"[a:elem_a].lookupNamespacePrefix(http://www.example.com)==ns1");
Assertion.assert(elem.lookupNamespaceURI("xsi").equals("http://www.w3.org/2001/XMLSchema-instance"),
"[a:elem_a].lookupNamespaceURI('xsi') == 'http://www.w3.org/2001/XMLSchema-instance'" );
} else {
Assertion.assert( elem.lookupNamespacePrefix(
"http://www.example.com", false) == null,"lookupNamespacePrefix(http://www.example.com)==null");
}
ls = doc.getElementsByTagName("bar:leaf");
elem = (NodeImpl)ls.item(0);
Assertion.assert(elem.lookupNamespacePrefix("url1:",false).equals("foo"),
"[bar:leaf].lookupNamespacePrefix('url1:', false) == foo");
//System.out.println("[bar:leaf].lookupNamespacePrefix('url1:', false) == "+ );
//System.out.println("==>Create b:baz with namespace 'b:' and xmlns:x='b:'");
ls = doc.getElementsByTagName("baz");
elem = (NodeImpl)ls.item(0);
ls = doc.getElementsByTagName("elem8");
elem = (NodeImpl)ls.item(0);
Element e1 = doc.createElementNS("b:","p:baz");
e1.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns:x", "b:");
elem.appendChild(e1);
Assertion.assert(((NodeImpl)e1).lookupNamespacePrefix("b:",false).equals("p"),
"[p:baz].lookupNamespacePrefix('b:', false) == p");
//System.out.println("[p:baz].lookupNamespacePrefix('b:', false) == "+ ((NodeImpl)e1).lookupNamespacePrefix("b:",false));
Assertion.assert(elem.lookupNamespaceURI("xsi").equals("http://www.w3.org/2001/XMLSchema-instance"),
"[bar:leaf].lookupNamespaceURI('xsi') == 'http://www.w3.org/2001/XMLSchema-instance'" );
}
//************************
//* Test normalizeDocument()
//************************
System.out.println("TEST #2: normalizeDocumention() - 3 errors, input: tests/dom/dom3/schema.xml");
{
builder.setFeature("validate", true);
DocumentImpl core = (DocumentImpl)builder.parseURI("tests/dom/dom3/schema.xml");
NodeList ls2 = core.getElementsByTagName("decVal");
Element testElem = (Element)ls2.item(0);
testElem.removeAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns");
ls2 = core.getElementsByTagName("v02:decVal");
testElem = (Element)ls2.item(0);
testElem.setPrefix("myPrefix");
Element root = core.getDocumentElement();
Element newElem = core.createElementNS(null, "decVal");
String data="4.5";
if (true) {
data = "string";
}
newElem.appendChild(core.createTextNode(data));
root.insertBefore(newElem, testElem);
newElem = core.createElementNS(null, "notInSchema");
newElem.appendChild(core.createTextNode("added new element"));
root.insertBefore(newElem, testElem);
root.appendChild(core.createElementNS("UndefinedNamespace", "NS1:foo"));
core.setErrorHandler(new Test());
core.setNormalizationFeature("validate", true);
core.normalizeDocument();
core.setNormalizationFeature("validate", false);
core.setNormalizationFeature("comments", false);
core.normalizeDocument();
builder.setFeature("validate", false);
}
//************************
//* Test normalizeDocument(): core tests
//************************
System.out.println("TEST #3: normalizeDocument() core");
{
Document doc= new DocumentImpl();
Element root = doc.createElementNS("http://www.w3.org/1999/XSL/Transform", "xsl:stylesheet");
doc.appendChild(root);
root.setAttributeNS("http://attr1", "xsl:attr1","");
Element child1 = doc.createElementNS("http://child1", "NS2:child1");
child1.setAttributeNS("http://attr2", "NS2:attr2","");
root.appendChild(child1);
Element child2 = doc.createElementNS("http://child2","NS4:child2");
child2.setAttributeNS("http://attr3","attr3", "");
root.appendChild(child2);
Element child3 = doc.createElementNS("http://www.w3.org/1999/XSL/Transform","xsl:child3");
child3.setAttributeNS("http://a1","attr1", "");
child3.setAttributeNS("http://a2","xsl:attr2", "");
child3.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns:a1", "http://a1");
child3.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns:xsl", "http://a2");
Element child4 = doc.createElementNS(null, "child4");
child4.setAttributeNS("http://a1", "xsl:attr1", "");
child4.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns", "default");
child3.appendChild(child4);
root.appendChild(child3);
doc.normalizeDocument();
//
// make sure algorithm works correctly
//
// xsl:stylesheet should include 2 namespace declarations
String name = root.getNodeName();
Assertion.assert(name.equals("xsl:stylesheet"), "xsl:stylesheet");
String value = root.getAttributeNS("http://www.w3.org/2000/xmlns/", "xsl");
Assertion.assert(value!=null, "xmlns:xsl != null");
Assertion.assert(value.equals("http://www.w3.org/1999/XSL/Transform"), "xmlns:xsl="+value);
value = root.getAttributeNS("http://www.w3.org/2000/xmlns/", "NS1");
Assertion.assert(value!=null &&
value.equals("http://attr1"), "xmlns:NS1="+value);
// child includes 2 namespace decls
Assertion.assert(child1.getNodeName().equals("NS2:child1"), "NS2:child1");
value = child1.getAttributeNS("http://www.w3.org/2000/xmlns/", "NS2");
Assertion.assert(value!=null &&
value.equals("http://child1"), "xmlns:NS2="+value);
value = child1.getAttributeNS("http://www.w3.org/2000/xmlns/", "NS3");
Assertion.assert(value!=null &&
value.equals("http://attr2"), "xmlns:NS3="+value);
// child3
Assertion.assert(child3.getNodeName().equals("xsl:child3"), "xsl:child3");
value = child3.getAttributeNS("http://www.w3.org/2000/xmlns/", "NS6");
Assertion.assert(value!=null &&
value.equals("http://a2"), "xmlns:NS6="+value);
value = child3.getAttributeNS("http://www.w3.org/2000/xmlns/", "a1");
Assertion.assert(value!=null &&
value.equals("http://a1"), "xmlns:a1="+value);
value = child3.getAttributeNS("http://www.w3.org/2000/xmlns/", "xsl");
Assertion.assert(value!=null &&
value.equals("http://www.w3.org/1999/XSL/Transform"), "xmlns:xsl="+value);
Attr attr = child3.getAttributeNodeNS("http://a2", "attr2");
Assertion.assert(attr != null, "NS6:attr2 !=null");
Assertion.assert(child3.getAttributes().getLength() == 5, "xsl:child3 has 5 attrs");
}
//************************
//* Test normalizeDocument(): core tests
//************************
System.out.println("TEST #4: namespace fixup during serialization");
{
Document doc= new DocumentImpl();
Element root = doc.createElementNS("http://www.w3.org/1999/XSL/Transform", "xsl:stylesheet");
doc.appendChild(root);
root.setAttributeNS("http://attr1", "xsl:attr1","");
Element child1 = doc.createElementNS("http://child1", "NS2:child1");
child1.setAttributeNS("http://attr2", "NS2:attr2","");
root.appendChild(child1);
Element child2 = doc.createElementNS("http://child2","NS4:child2");
child2.setAttributeNS("http://attr3","attr3", "");
root.appendChild(child2);
Element child3 = doc.createElementNS("http://www.w3.org/1999/XSL/Transform","xsl:child3");
child3.setAttributeNS("http://a1","attr1", "");
child3.setAttributeNS("http://a2","xsl:attr2", "");
child3.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns:a1", "http://a1");
child3.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns:xsl", "http://a2");
Element child4 = doc.createElementNS(null, "child4");
child4.setAttributeNS("http://a1", "xsl:attr1", "");
child4.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns", "default");
child3.appendChild(child4);
root.appendChild(child3);
// serialize data
String xmlData = writer.writeToString(doc);
Reader r = new StringReader(xmlData);
DOMInputSource in = impl.createDOMInputSource();
in.setCharacterStream(r);
doc = builder.parse(in);
//
// make sure algorithm works correctly
//
root = doc.getDocumentElement();
child1 = (Element)root.getFirstChild();
child2 = (Element)child1.getNextSibling();
child3 = (Element)child2.getNextSibling();
// xsl:stylesheet should include 2 namespace declarations
String name = root.getNodeName();
Assertion.assert(name.equals("xsl:stylesheet"), "xsl:stylesheet");
String value = root.getAttributeNS("http://www.w3.org/2000/xmlns/", "xsl");
Assertion.assert(value!=null, "xmlns:xsl != null");
Assertion.assert(value.equals("http://www.w3.org/1999/XSL/Transform"), "xmlns:xsl="+value);
value = root.getAttributeNS("http://www.w3.org/2000/xmlns/", "NS1");
Assertion.assert(value!=null &&
value.equals("http://attr1"), "xmlns:NS1="+value);
// child includes 2 namespace decls
Assertion.assert(child1.getNodeName().equals("NS2:child1"), "NS2:child1");
value = child1.getAttributeNS("http://www.w3.org/2000/xmlns/", "NS2");
Assertion.assert(value!=null &&
value.equals("http://child1"), "xmlns:NS2="+value);
value = child1.getAttributeNS("http://www.w3.org/2000/xmlns/", "NS3");
Assertion.assert(value!=null &&
value.equals("http://attr2"), "xmlns:NS3="+value);
// child3
Assertion.assert(child3.getNodeName().equals("xsl:child3"), "xsl:child3");
value = child3.getAttributeNS("http://www.w3.org/2000/xmlns/", "NS6");
Assertion.assert(value!=null &&
value.equals("http://a2"), "xmlns:NS6="+value);
value = child3.getAttributeNS("http://www.w3.org/2000/xmlns/", "a1");
Assertion.assert(value!=null &&
value.equals("http://a1"), "xmlns:a1="+value);
value = child3.getAttributeNS("http://www.w3.org/2000/xmlns/", "xsl");
Assertion.assert(value!=null &&
value.equals("http://www.w3.org/1999/XSL/Transform"), "xmlns:xsl="+value);
Attr attr = child3.getAttributeNodeNS("http://a2", "attr2");
Assertion.assert(attr != null, "NS6:attr2 !=null");
Assertion.assert(child3.getAttributes().getLength() == 5, "xsl:child3 has 5 attrs");
//OutputFormat format = new OutputFormat((Document)doc);
//format.setLineSeparator(LineSeparator.Windows);
//format.setIndenting(true);
//format.setLineWidth(0);
//format.setPreserveSpace(true);
//XMLSerializer serializer = new XMLSerializer(System.out, format);
//serializer.serialize(doc);
}
//************************
// TEST: replaceWholeText()
// getWholeText()
//
//************************
System.out.println("TEST #4: wholeText, input: tests/dom/dom3/wholeText.xml");
{
builder.setFeature("validate", false);
builder.setFeature("entities", true);
DocumentImpl doc = (DocumentImpl)builder.parseURI("tests/dom/dom3/wholeText.xml");
Element root = doc.getDocumentElement();
Element test = (Element)doc.getElementsByTagName("elem").item(0);
test.appendChild(doc.createTextNode("Address: "));
test.appendChild(doc.createEntityReference("ent2"));
test.appendChild(doc.createTextNode("City: "));
test.appendChild(doc.createEntityReference("ent1"));
DocumentType doctype = doc.getDoctype();
Node entity = doctype.getEntities().getNamedItem("ent3");
NodeList ls = test.getChildNodes();
Assertion.assert(ls.getLength()==5, "List length");
String compare1 = "Home Address: 1900 Dallas Road (East) City: Dallas. California. USA PO #5668";
Assertion.assert(((TextImpl)ls.item(0)).getWholeText().equals(compare1), "Compare1");
String compare2 = "Address: 1900 Dallas Road (East) City: Dallas. California. USA PO #5668";
Assertion.assert(((TextImpl)ls.item(1)).getWholeText().equals(compare2), "Compare2");
//TEST replaceWholeText()
((NodeImpl)ls.item(0)).setReadOnly(true, true);
TextImpl original = (TextImpl)ls.item(0);
Node newNode = original.replaceWholeText("Replace with this text");
ls = test.getChildNodes();
Assertion.assert(ls.getLength() == 1, "Length == 1");
Assertion.assert(ls.item(0).getNodeValue().equals("Replace with this text"), "Replacement works");
Assertion.assert(newNode != original, "New node created");
// replace text for node which is not yet attached to the tree
Text text = doc.createTextNode("readonly");
((NodeImpl)text).setReadOnly(true, true);
text = text.replaceWholeText("Data");
Assertion.assert(text.getNodeValue().equals("Data"), "New value 'Data'");
// test with second child that does not have any content
test = (Element)doc.getElementsByTagName("elem").item(1);
try {
((Text)test.getFirstChild()).replaceWholeText("can't replace");
} catch (DOMException e){
Assertion.assert(e !=null);
}
String compare3 = "Test: The Content ends here. ";
//Assertion.assert(((Text)test.getFirstChild()).getWholeText().equals(compare3), "Compare3");
}
} catch ( Exception ex ) {
ex.printStackTrace();
}
}
public boolean handleError(DOMError error){
short severity = error.getSeverity();
if (severity == error.SEVERITY_ERROR) {
System.out.println(error.getMessage());
}
if (severity == error.SEVERITY_WARNING) {
System.out.println("[Warning]: "+error.getMessage());
}
return true;
}
}
| tests/dom/dom3/Test.java | /*
* The Apache Software License, Version 1.1
*
*
* Copyright (c) 1999-2002 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Xerces" and "Apache Software Foundation" must
* not be used to endorse or promote products derived from this
* software without prior written permission. For written
* permission, please contact [email protected].
*
* 5. Products derived from this software may not be called "Apache",
* nor may "Apache" appear in their name, without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation and was
* originally based on software copyright (c) 1999, International
* Business Machines, Inc., http://www.apache.org. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
package dom.dom3;
import org.apache.xerces.dom.*;
import org.w3c.dom.*;
import org.w3c.dom.ls.*;
import dom.util.Assertion;
/**
* The program tests vacarious DOM Level 3 functionality
*/
public class Test implements DOMErrorHandler{
public static void main( String[] argv) {
try {
boolean namespaces = true;
System.out.println("Test DOM Level 3...");
System.setProperty(DOMImplementationRegistry.PROPERTY,"org.apache.xerces.dom.DOMImplementationSourceImpl");
DOMImplementationLS impl = (DOMImplementationLS)DOMImplementationRegistry.getDOMImplementation("LS-Load");
Assertion.assert(impl!=null, "domImplementation != null");
DOMBuilder builder = impl.createDOMBuilder(DOMImplementationLS.MODE_SYNCHRONOUS,
null);
builder.setFeature("http://xml.org/sax/features/namespaces",namespaces);
builder.setFeature("http://xml.org/sax/features/validation",false);
//************************
// TEST: lookupNamespacePrefix
// isDefaultNamespace
// lookupNamespaceURI
//************************
System.out.println("TEST #1: lookupNamespacePrefix, isDefaultNamespace, lookupNamespaceURI, input: tests/dom/dom3/input.xml");
{
Document doc = builder.parseURI("tests/dom/dom3/input.xml");
NodeList ls = doc.getElementsByTagName("a:elem_a");
NodeImpl elem = (NodeImpl)ls.item(0);
if (namespaces) {
//System.out.println("[a:elem_a].lookupNamespacePrefix('http://www.example.com', true) == null");
Assertion.assert(elem.lookupNamespacePrefix(
"http://www.example.com", true) == null,
"[a:elem_a].lookupNamespacePrefix(http://www.example.com)==null");
//System.out.println("[a:elem_a].isDefaultNamespace('http://www.example.com') == true");
Assertion.assert(elem.isDefaultNamespace("http://www.example.com") == true,
"[a:elem_a].isDefaultNamespace(http://www.example.com)==true");
//System.out.println("[a:elem_a].lookupNamespacePrefix('http://www.example.com', false) == ns1");
Assertion.assert(elem.lookupNamespacePrefix(
"http://www.example.com", false).equals("ns1"),
"[a:elem_a].lookupNamespacePrefix(http://www.example.com)==ns1");
Assertion.assert(elem.lookupNamespaceURI("xsi").equals("http://www.w3.org/2001/XMLSchema-instance"),
"[a:elem_a].lookupNamespaceURI('xsi') == 'http://www.w3.org/2001/XMLSchema-instance'" );
} else {
Assertion.assert( elem.lookupNamespacePrefix(
"http://www.example.com", false) == null,"lookupNamespacePrefix(http://www.example.com)==null");
}
ls = doc.getElementsByTagName("bar:leaf");
elem = (NodeImpl)ls.item(0);
Assertion.assert(elem.lookupNamespacePrefix("url1:",false).equals("foo"),
"[bar:leaf].lookupNamespacePrefix('url1:', false) == foo");
//System.out.println("[bar:leaf].lookupNamespacePrefix('url1:', false) == "+ );
//System.out.println("==>Create b:baz with namespace 'b:' and xmlns:x='b:'");
ls = doc.getElementsByTagName("baz");
elem = (NodeImpl)ls.item(0);
ls = doc.getElementsByTagName("elem8");
elem = (NodeImpl)ls.item(0);
Element e1 = doc.createElementNS("b:","p:baz");
e1.setAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns:x", "b:");
elem.appendChild(e1);
Assertion.assert(((NodeImpl)e1).lookupNamespacePrefix("b:",false).equals("p"),
"[p:baz].lookupNamespacePrefix('b:', false) == p");
//System.out.println("[p:baz].lookupNamespacePrefix('b:', false) == "+ ((NodeImpl)e1).lookupNamespacePrefix("b:",false));
Assertion.assert(elem.lookupNamespaceURI("xsi").equals("http://www.w3.org/2001/XMLSchema-instance"),
"[bar:leaf].lookupNamespaceURI('xsi') == 'http://www.w3.org/2001/XMLSchema-instance'" );
}
//************************
//* Test normalizeDocument()
//************************
System.out.println("TEST #2: normalizeDocumention() (4 errors expected), input: tests/dom/dom3/schema.xml");
{
builder.setFeature("validate", true);
DocumentImpl core = (DocumentImpl)builder.parseURI("tests/dom/dom3/schema.xml");
NodeList ls2 = core.getElementsByTagName("decVal");
Element testElem = (Element)ls2.item(0);
testElem.removeAttributeNS("http://www.w3.org/2000/xmlns/", "xmlns");
ls2 = core.getElementsByTagName("v02:decVal");
testElem = (Element)ls2.item(0);
testElem.setPrefix("myPrefix");
Element root = core.getDocumentElement();
Element newElem = core.createElementNS(null, "decVal");
String data="4.5";
if (true) {
data = "string";
}
newElem.appendChild(core.createTextNode(data));
root.insertBefore(newElem, testElem);
newElem = core.createElementNS(null, "notInSchema");
newElem.appendChild(core.createTextNode("added new element"));
root.insertBefore(newElem, testElem);
core.setErrorHandler(new Test());
core.setNormalizationFeature("validate", true);
core.normalizeDocument();
core.setNormalizationFeature("validate", false);
root.appendChild(core.createElement("not:well:formed"));
core.normalizeDocument();
core.setNormalizationFeature("comments", false);
core.normalizeDocument();
}
//************************
// TEST: replaceWholeText()
// getWholeText()
//
//************************
System.out.println("TEST #3: wholeText, input: tests/dom/dom3/wholeText.xml");
{
builder.setFeature("validate", false);
DocumentImpl doc = (DocumentImpl)builder.parseURI("tests/dom/dom3/wholeText.xml");
Element root = doc.getDocumentElement();
Element test = (Element)doc.getElementsByTagName("elem").item(0);
test.appendChild(doc.createTextNode("Address: "));
test.appendChild(doc.createEntityReference("ent2"));
test.appendChild(doc.createTextNode("City: "));
test.appendChild(doc.createEntityReference("ent1"));
NodeList ls = test.getChildNodes();
Assertion.assert(ls.getLength()==5, "List length");
String compare1 = "Home Address: 1900 Dallas Road (East) City: Dallas. California. USA PO #5668";
Assertion.assert(((Text)ls.item(0)).getWholeText().equals(compare1), "Compare1");
String compare2 = "Address: 1900 Dallas Road (East) City: Dallas. California. USA PO #5668";
Assertion.assert(((Text)ls.item(1)).getWholeText().equals(compare2), "Compare2");
//TEST replaceWholeText()
((NodeImpl)ls.item(0)).setReadOnly(true, true);
Text original = (Text)ls.item(0);
Node newNode = original.replaceWholeText("Replace with this text");
ls = test.getChildNodes();
Assertion.assert(ls.getLength() == 1, "Length == 1");
Assertion.assert(ls.item(0).getNodeValue().equals("Replace with this text"), "Replacement works");
Assertion.assert(newNode != original, "New node created");
// replace text for node which is not yet attached to the tree
Text text = doc.createTextNode("readonly");
((NodeImpl)text).setReadOnly(true, true);
text = text.replaceWholeText("Data");
Assertion.assert(text.getNodeValue().equals("Data"), "New value 'Data'");
// test with second child that does not have any content
test = (Element)doc.getElementsByTagName("elem").item(1);
try {
((Text)test.getFirstChild()).replaceWholeText("can't replace");
} catch (DOMException e){
Assertion.assert(e !=null);
}
String compare3 = "Test: The Content ends here. ";
Assertion.assert(((Text)test.getFirstChild()).getWholeText().equals(compare3), "Compare3");
}
} catch ( Exception ex ) {
ex.printStackTrace();
}
}
public boolean handleError(DOMError error){
short severity = error.getSeverity();
if (severity == error.SEVERITY_ERROR) {
System.out.println(error.getMessage());
}
if (severity == error.SEVERITY_WARNING) {
System.out.println("[Warning]: "+error.getMessage());
}
return true;
}
}
| Add testcases to verify namespace fixup algorithm.
git-svn-id: 21df804813e9d3638e43477f308dd0be51e5f30f@318601 13f79535-47bb-0310-9956-ffa450edef68
| tests/dom/dom3/Test.java | Add testcases to verify namespace fixup algorithm. |
|
Java | apache-2.0 | 082edca5de1ffc47d1e8729967644bf3f0b94ec1 | 0 | allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.ex;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.stream.JsonWriter;
import com.intellij.codeInspection.DefaultInspectionToolResultExporter;
import com.intellij.codeInspection.InspectionsReportConverter;
import com.intellij.codeInspection.InspectionsResultUtil;
import com.intellij.openapi.util.io.FileUtil;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import static com.intellij.codeInspection.DefaultInspectionToolResultExporter.INSPECTION_RESULTS_LANGUAGE;
import static com.intellij.codeInspection.reference.SmartRefElementPointerImpl.*;
public class JsonInspectionsReportConverter implements InspectionsReportConverter {
@NonNls private static final String FORMAT_NAME = "json";
@NonNls private static final String JSON_EXTENSION = ".json";
@NonNls private static final String FILE = "file";
@NonNls private static final String LINE = "line";
@NonNls private static final String OFFSET = "offset";
@NonNls private static final String LENGTH = "length";
@NonNls private static final String MODULE = "module";
@NonNls private static final String PACKAGE = "package";
@NonNls protected static final String PROBLEM = "problem";
@NonNls protected static final String PROBLEMS = "problems";
@NonNls private static final String DESCRIPTION = "description";
@NonNls private static final String SEVERITY_ATTR = "severity";
@NonNls private static final String ATTRIBUTE_KEY_ATTR = "attribute_key";
@NonNls private static final String HINT = "hint";
@NonNls private static final String HINTS = "hints";
@NonNls private static final String DISPLAY_NAME = "displayName";
@NonNls private static final String DEFAULT_SEVERITY = "defaultSeverity";
@NonNls private static final String SHORT_NAME = "shortName";
@NonNls private static final String ENABLED = "enabled";
@NonNls private static final String NAME = "name";
@NonNls private static final String ID = "id";
@NonNls private static final String VALUE = "value";
@NonNls private static final String GROUP = "group";
@NonNls private static final String GROUPS = "groups";
@NonNls private static final String INSPECTION = "inspection";
@NonNls private static final String HIGHLIGHTED_ELEMENT = "highlighted_element";
@NonNls private static final String DUPLICATED_CODE_AGGREGATE = "DuplicatedCode" + InspectionsResultUtil.AGGREGATE;
@Override
public String getFormatName() {
return FORMAT_NAME;
}
@Override
public boolean useTmpDirForRawData() {
return true;
}
@Override
public void convert(@NotNull String rawDataDirectoryPath,
@Nullable String outputPath,
@NotNull Map<String, Tools> tools,
@NotNull List<? extends File> inspectionsResults) throws ConversionException {
if (outputPath == null) {
throw new ConversionException("Output path isn't specified");
}
try {
Files.createDirectories(new File(outputPath).toPath());
}
catch (IOException e) {
throw new ConversionException("Cannot create dirs in output path: " + outputPath + " error: " + e.getMessage());
}
Gson gson = new GsonBuilder().setPrettyPrinting().create();
SAXBuilder builder = new SAXBuilder();
for (File inspectionDataFile : inspectionsResults) {
String fileNameWithoutExt = FileUtil.getNameWithoutExtension(inspectionDataFile);
File jsonFile = new File(outputPath, fileNameWithoutExt + JSON_EXTENSION);
try (Writer writer = Files.newBufferedWriter(jsonFile.toPath(), StandardCharsets.UTF_8);
JsonWriter jsonWriter = gson.newJsonWriter(writer)) {
Document doc = builder.build(inspectionDataFile);
if (InspectionsResultUtil.DESCRIPTIONS.equals(fileNameWithoutExt)) {
convertDescriptions(jsonWriter, doc);
}
else if (DUPLICATED_CODE_AGGREGATE.equals(fileNameWithoutExt)) {
convertDuplicatedCode(jsonWriter, doc);
}
else {
convertProblems(jsonWriter, doc);
}
}
catch (IOException | JDOMException e) {
throw new ConversionException("Cannot convert file: " + inspectionDataFile.getPath() + " error: " + e.getMessage());
}
}
}
private static void convertDuplicatedCode(@NotNull JsonWriter jsonWriter, @NotNull Document problems) throws IOException {
jsonWriter.beginObject();
jsonWriter.name(PROBLEMS);
jsonWriter.beginArray();
for (Element duplicates : problems.getRootElement().getChildren("duplicate")) {
jsonWriter.beginArray();
for (Element fragment : duplicates.getChildren("fragment")) {
convertDuplicateFragment(jsonWriter, fragment);
}
jsonWriter.endArray();
}
jsonWriter.endArray();
jsonWriter.endObject();
}
private static void convertDuplicateFragment(@NotNull JsonWriter jsonWriter, Element fragment) throws IOException {
jsonWriter.beginObject();
jsonWriter.name(FILE).value(fragment.getAttributeValue(FILE));
String line = fragment.getAttributeValue(LINE);
String start = fragment.getAttributeValue("start");
String end = fragment.getAttributeValue("end");
assert line != null;
assert start != null;
assert end != null;
jsonWriter.name(LINE).value(Integer.parseInt(line));
jsonWriter.name("start").value(Integer.parseInt(start));
jsonWriter.name("end").value(Integer.parseInt(end));
jsonWriter.endObject();
}
private static void convertProblems(@NotNull JsonWriter jsonWriter, @NotNull Document problems) throws IOException {
jsonWriter.beginObject();
jsonWriter.name(PROBLEMS);
jsonWriter.beginArray();
for (Element problem : problems.getRootElement().getChildren(PROBLEM)) {
convertProblem(jsonWriter, problem);
}
jsonWriter.endArray();
jsonWriter.endObject();
}
public static void convertProblem(@NotNull JsonWriter writer, @NotNull Element problem) throws IOException {
writer.beginObject();
writer.name(FILE).value(problem.getChildText(FILE));
writeInt(writer, problem, LINE);
writeInt(writer, problem, OFFSET);
writeInt(writer, problem, LENGTH);
writer.name(MODULE).value(problem.getChildText(MODULE));
writer.name(PACKAGE).value(problem.getChildText(PACKAGE));
Element problemClassElement = problem.getChild(DefaultInspectionToolResultExporter.INSPECTION_RESULTS_PROBLEM_CLASS_ELEMENT);
if (problemClassElement != null) {
convertProblemClass(writer, problemClassElement);
}
Element entryPoint = problem.getChild(ENTRY_POINT);
if (entryPoint != null) {
convertEntryPoint(writer, entryPoint);
}
Element hints = problem.getChild(HINTS);
if (hints != null) {
convertHints(writer, hints);
}
writer.name(HIGHLIGHTED_ELEMENT).value(problem.getChildText(HIGHLIGHTED_ELEMENT));
writer.name(INSPECTION_RESULTS_LANGUAGE).value(problem.getChildText(INSPECTION_RESULTS_LANGUAGE));
writer.name(DESCRIPTION).value(problem.getChildText(DESCRIPTION));
writer.endObject();
}
private static void writeInt(@NotNull JsonWriter writer, @NotNull Element problem, @NotNull String elementName) throws IOException {
try {
int intValue = Integer.parseInt(problem.getChildText(elementName));
writer.name(elementName).value(intValue);
}
catch (NumberFormatException e) {
writer.name(elementName).nullValue();
}
}
private static void convertProblemClass(@NotNull JsonWriter writer, @NotNull Element problemClass) throws IOException {
writer.name(DefaultInspectionToolResultExporter.INSPECTION_RESULTS_PROBLEM_CLASS_ELEMENT);
writer.beginObject()
.name(NAME).value(problemClass.getText());
String inspectionId = problemClass.getAttributeValue(DefaultInspectionToolResultExporter.INSPECTION_RESULTS_ID_ATTRIBUTE);
if (inspectionId != null) {
writer.name(ID).value(inspectionId);
}
writer
.name(SEVERITY_ATTR).value(problemClass.getAttributeValue(SEVERITY_ATTR))
.name(ATTRIBUTE_KEY_ATTR).value(problemClass.getAttributeValue(ATTRIBUTE_KEY_ATTR))
.endObject();
}
private static void convertEntryPoint(@NotNull JsonWriter writer, @NotNull Element entryPoint) throws IOException {
writer.name(ENTRY_POINT);
writer.beginObject()
.name(TYPE_ATTR).value(entryPoint.getAttributeValue(TYPE_ATTR))
.name(FQNAME_ATTR).value(entryPoint.getAttributeValue(FQNAME_ATTR))
.endObject();
}
private static void convertHints(@NotNull JsonWriter writer, @NotNull Element hints) throws IOException {
writer.name(HINTS);
writer.beginArray();
for (Element hint : hints.getChildren(HINT)) {
writer.value(hint.getAttributeValue(VALUE));
}
writer.endArray();
}
private static void convertDescriptions(@NotNull JsonWriter writer, @NotNull Document descriptions) throws IOException {
writer.beginObject();
convertDescriptionsContents(writer, descriptions, null);
writer.endObject();
}
protected static void convertDescriptionsContents(@NotNull JsonWriter writer,
@NotNull Document descriptions,
@Nullable Predicate<? super String> inspectionFilter) throws IOException {
Element inspectionsElement = descriptions.getRootElement();
writer.name(InspectionsResultUtil.PROFILE).value(inspectionsElement.getAttributeValue(InspectionsResultUtil.PROFILE));
writer.name(GROUPS);
writer.beginArray();
for (Element group : inspectionsElement.getChildren(GROUP)) {
convertGroup(writer, group, inspectionFilter);
}
writer.endArray();
}
private static void convertGroup(@NotNull JsonWriter writer, @NotNull Element group, @Nullable Predicate<? super String> inspectionFilter) throws IOException {
if (inspectionFilter != null) {
boolean anyInspectionsInFilter = false;
for (Element inspection : group.getChildren(INSPECTION)) {
if (inspectionFilter.test(inspection.getAttributeValue(SHORT_NAME))) {
anyInspectionsInFilter = true;
break;
}
}
if (!anyInspectionsInFilter) return;
}
writer.beginObject();
writer.name(NAME).value(group.getAttributeValue(NAME));
writer.name(InspectionsResultUtil.INSPECTIONS_NODE).beginArray();
for (Element inspection : group.getChildren(INSPECTION)) {
if (inspectionFilter != null && !inspectionFilter.test(inspection.getAttributeValue(SHORT_NAME))) continue;
convertInspectionDescription(writer, inspection);
}
writer.endArray();
writer.endObject();
}
private static void convertInspectionDescription(@NotNull JsonWriter writer, @NotNull Element inspection) throws IOException {
writer.beginObject()
.name(SHORT_NAME).value(inspection.getAttributeValue(SHORT_NAME))
.name(DISPLAY_NAME).value(inspection.getAttributeValue(DISPLAY_NAME))
.name(DEFAULT_SEVERITY).value(inspection.getAttributeValue(DEFAULT_SEVERITY))
.name(ENABLED).value(Boolean.parseBoolean(inspection.getAttributeValue(ENABLED)))
.name(DESCRIPTION).value(inspection.getValue())
.endObject();
}
}
| platform/analysis-impl/src/com/intellij/codeInspection/ex/JsonInspectionsReportConverter.java | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.ex;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.stream.JsonWriter;
import com.intellij.codeInspection.DefaultInspectionToolResultExporter;
import com.intellij.codeInspection.InspectionsReportConverter;
import com.intellij.codeInspection.InspectionsResultUtil;
import com.intellij.openapi.util.io.FileUtil;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import static com.intellij.codeInspection.DefaultInspectionToolResultExporter.INSPECTION_RESULTS_LANGUAGE;
import static com.intellij.codeInspection.reference.SmartRefElementPointerImpl.*;
public class JsonInspectionsReportConverter implements InspectionsReportConverter {
@NonNls private static final String FORMAT_NAME = "json";
@NonNls private static final String JSON_EXTENSION = ".json";
@NonNls private static final String FILE = "file";
@NonNls private static final String LINE = "line";
@NonNls private static final String OFFSET = "offset";
@NonNls private static final String LENGTH = "length";
@NonNls private static final String MODULE = "module";
@NonNls private static final String PACKAGE = "package";
@NonNls protected static final String PROBLEM = "problem";
@NonNls protected static final String PROBLEMS = "problems";
@NonNls private static final String DESCRIPTION = "description";
@NonNls private static final String SEVERITY_ATTR = "severity";
@NonNls private static final String ATTRIBUTE_KEY_ATTR = "attribute_key";
@NonNls private static final String HINT = "hint";
@NonNls private static final String HINTS = "hints";
@NonNls private static final String DISPLAY_NAME = "displayName";
@NonNls private static final String DEFAULT_SEVERITY = "defaultSeverity";
@NonNls private static final String SHORT_NAME = "shortName";
@NonNls private static final String ENABLED = "enabled";
@NonNls private static final String NAME = "name";
@NonNls private static final String ID = "id";
@NonNls private static final String VALUE = "value";
@NonNls private static final String GROUP = "group";
@NonNls private static final String GROUPS = "groups";
@NonNls private static final String INSPECTION = "inspection";
@NonNls private static final String HIGHLIGHTED_ELEMENT = "highlighted_element";
@NonNls private static final String PROJECT_FINGERPRINT = "ProjectFingerprint";
@NonNls private static final String FILE_FINGERPRINT = "file_fingerprint";
@NonNls private static final String FILE_NAME = "file_name";
@NonNls private static final String FILE_PATH = "file_path";
@NonNls private static final String LANGUAGE = "language";
@NonNls private static final String LINES_COUNT = "lines_count";
@NonNls private static final String MODIFICATION_TIMESTAMP = "modification_timestamp";
@NonNls private static final String DUPLICATED_CODE_AGGREGATE = "DuplicatedCode" + InspectionsResultUtil.AGGREGATE;
@Override
public String getFormatName() {
return FORMAT_NAME;
}
@Override
public boolean useTmpDirForRawData() {
return true;
}
@Override
public void convert(@NotNull String rawDataDirectoryPath,
@Nullable String outputPath,
@NotNull Map<String, Tools> tools,
@NotNull List<? extends File> inspectionsResults) throws ConversionException {
if (outputPath == null) {
throw new ConversionException("Output path isn't specified");
}
try {
Files.createDirectories(new File(outputPath).toPath());
}
catch (IOException e) {
throw new ConversionException("Cannot create dirs in output path: " + outputPath + " error: " + e.getMessage());
}
Gson gson = new GsonBuilder().setPrettyPrinting().create();
SAXBuilder builder = new SAXBuilder();
for (File inspectionDataFile : inspectionsResults) {
String fileNameWithoutExt = FileUtil.getNameWithoutExtension(inspectionDataFile);
File jsonFile = new File(outputPath, fileNameWithoutExt + JSON_EXTENSION);
try (Writer writer = Files.newBufferedWriter(jsonFile.toPath(), StandardCharsets.UTF_8);
JsonWriter jsonWriter = gson.newJsonWriter(writer)) {
Document doc = builder.build(inspectionDataFile);
if (InspectionsResultUtil.DESCRIPTIONS.equals(fileNameWithoutExt)) {
convertDescriptions(jsonWriter, doc);
}
else if (PROJECT_FINGERPRINT.equals(fileNameWithoutExt)) {
convertProjectFingerprint(jsonWriter, doc);
}
else if (DUPLICATED_CODE_AGGREGATE.equals(fileNameWithoutExt)) {
convertDuplicatedCode(jsonWriter, doc);
}
else {
convertProblems(jsonWriter, doc);
}
}
catch (IOException | JDOMException e) {
throw new ConversionException("Cannot convert file: " + inspectionDataFile.getPath() + " error: " + e.getMessage());
}
}
}
private static void convertDuplicatedCode(@NotNull JsonWriter jsonWriter, @NotNull Document problems) throws IOException {
jsonWriter.beginObject();
jsonWriter.name(PROBLEMS);
jsonWriter.beginArray();
for (Element duplicates : problems.getRootElement().getChildren("duplicate")) {
jsonWriter.beginArray();
for (Element fragment : duplicates.getChildren("fragment")) {
convertDuplicateFragment(jsonWriter, fragment);
}
jsonWriter.endArray();
}
jsonWriter.endArray();
jsonWriter.endObject();
}
private static void convertDuplicateFragment(@NotNull JsonWriter jsonWriter, Element fragment) throws IOException {
jsonWriter.beginObject();
jsonWriter.name(FILE).value(fragment.getAttributeValue(FILE));
String line = fragment.getAttributeValue(LINE);
String start = fragment.getAttributeValue("start");
String end = fragment.getAttributeValue("end");
assert line != null;
assert start != null;
assert end != null;
jsonWriter.name(LINE).value(Integer.parseInt(line));
jsonWriter.name("start").value(Integer.parseInt(start));
jsonWriter.name("end").value(Integer.parseInt(end));
jsonWriter.endObject();
}
private static void convertProjectFingerprint(@NotNull JsonWriter jsonWriter, @NotNull Document problems) throws IOException {
jsonWriter.beginObject();
jsonWriter.name(PROBLEMS);
jsonWriter.beginArray();
for (Element fileFingerprint : problems.getRootElement().getChildren(FILE_FINGERPRINT)) {
convertFileFingerprint(jsonWriter, fileFingerprint);
}
jsonWriter.endArray();
jsonWriter.endObject();
}
private static void convertFileFingerprint(@NotNull JsonWriter writer, @NotNull Element problem) throws IOException {
writer.beginObject();
writer.name(FILE_NAME).value(problem.getChildText(FILE_NAME));
writer.name(FILE_PATH).value(problem.getChildText(FILE_PATH));
writer.name(LANGUAGE).value(problem.getChildText(LANGUAGE));
try {
int linesCount = Integer.parseInt(problem.getChildText(LINES_COUNT));
writer.name(LINES_COUNT).value(linesCount);
}
catch (NumberFormatException e) {
writer.name(LINES_COUNT).nullValue();
}
try {
long modificationStamp = Long.parseLong(problem.getChildText(MODIFICATION_TIMESTAMP));
writer.name(MODIFICATION_TIMESTAMP).value(modificationStamp);
}
catch (NumberFormatException e) {
writer.name(MODIFICATION_TIMESTAMP).nullValue();
}
writer.endObject();
}
private static void convertProblems(@NotNull JsonWriter jsonWriter, @NotNull Document problems) throws IOException {
jsonWriter.beginObject();
jsonWriter.name(PROBLEMS);
jsonWriter.beginArray();
for (Element problem : problems.getRootElement().getChildren(PROBLEM)) {
convertProblem(jsonWriter, problem);
}
jsonWriter.endArray();
jsonWriter.endObject();
}
public static void convertProblem(@NotNull JsonWriter writer, @NotNull Element problem) throws IOException {
writer.beginObject();
writer.name(FILE).value(problem.getChildText(FILE));
writeInt(writer, problem, LINE);
writeInt(writer, problem, OFFSET);
writeInt(writer, problem, LENGTH);
writer.name(MODULE).value(problem.getChildText(MODULE));
writer.name(PACKAGE).value(problem.getChildText(PACKAGE));
Element problemClassElement = problem.getChild(DefaultInspectionToolResultExporter.INSPECTION_RESULTS_PROBLEM_CLASS_ELEMENT);
if (problemClassElement != null) {
convertProblemClass(writer, problemClassElement);
}
Element entryPoint = problem.getChild(ENTRY_POINT);
if (entryPoint != null) {
convertEntryPoint(writer, entryPoint);
}
Element hints = problem.getChild(HINTS);
if (hints != null) {
convertHints(writer, hints);
}
writer.name(HIGHLIGHTED_ELEMENT).value(problem.getChildText(HIGHLIGHTED_ELEMENT));
writer.name(INSPECTION_RESULTS_LANGUAGE).value(problem.getChildText(INSPECTION_RESULTS_LANGUAGE));
writer.name(DESCRIPTION).value(problem.getChildText(DESCRIPTION));
writer.endObject();
}
private static void writeInt(@NotNull JsonWriter writer, @NotNull Element problem, @NotNull String elementName) throws IOException {
try {
int intValue = Integer.parseInt(problem.getChildText(elementName));
writer.name(elementName).value(intValue);
}
catch (NumberFormatException e) {
writer.name(elementName).nullValue();
}
}
private static void convertProblemClass(@NotNull JsonWriter writer, @NotNull Element problemClass) throws IOException {
writer.name(DefaultInspectionToolResultExporter.INSPECTION_RESULTS_PROBLEM_CLASS_ELEMENT);
writer.beginObject()
.name(NAME).value(problemClass.getText());
String inspectionId = problemClass.getAttributeValue(DefaultInspectionToolResultExporter.INSPECTION_RESULTS_ID_ATTRIBUTE);
if (inspectionId != null) {
writer.name(ID).value(inspectionId);
}
writer
.name(SEVERITY_ATTR).value(problemClass.getAttributeValue(SEVERITY_ATTR))
.name(ATTRIBUTE_KEY_ATTR).value(problemClass.getAttributeValue(ATTRIBUTE_KEY_ATTR))
.endObject();
}
private static void convertEntryPoint(@NotNull JsonWriter writer, @NotNull Element entryPoint) throws IOException {
writer.name(ENTRY_POINT);
writer.beginObject()
.name(TYPE_ATTR).value(entryPoint.getAttributeValue(TYPE_ATTR))
.name(FQNAME_ATTR).value(entryPoint.getAttributeValue(FQNAME_ATTR))
.endObject();
}
private static void convertHints(@NotNull JsonWriter writer, @NotNull Element hints) throws IOException {
writer.name(HINTS);
writer.beginArray();
for (Element hint : hints.getChildren(HINT)) {
writer.value(hint.getAttributeValue(VALUE));
}
writer.endArray();
}
private static void convertDescriptions(@NotNull JsonWriter writer, @NotNull Document descriptions) throws IOException {
writer.beginObject();
convertDescriptionsContents(writer, descriptions, null);
writer.endObject();
}
protected static void convertDescriptionsContents(@NotNull JsonWriter writer,
@NotNull Document descriptions,
@Nullable Predicate<? super String> inspectionFilter) throws IOException {
Element inspectionsElement = descriptions.getRootElement();
writer.name(InspectionsResultUtil.PROFILE).value(inspectionsElement.getAttributeValue(InspectionsResultUtil.PROFILE));
writer.name(GROUPS);
writer.beginArray();
for (Element group : inspectionsElement.getChildren(GROUP)) {
convertGroup(writer, group, inspectionFilter);
}
writer.endArray();
}
private static void convertGroup(@NotNull JsonWriter writer, @NotNull Element group, @Nullable Predicate<? super String> inspectionFilter) throws IOException {
if (inspectionFilter != null) {
boolean anyInspectionsInFilter = false;
for (Element inspection : group.getChildren(INSPECTION)) {
if (inspectionFilter.test(inspection.getAttributeValue(SHORT_NAME))) {
anyInspectionsInFilter = true;
break;
}
}
if (!anyInspectionsInFilter) return;
}
writer.beginObject();
writer.name(NAME).value(group.getAttributeValue(NAME));
writer.name(InspectionsResultUtil.INSPECTIONS_NODE).beginArray();
for (Element inspection : group.getChildren(INSPECTION)) {
if (inspectionFilter != null && !inspectionFilter.test(inspection.getAttributeValue(SHORT_NAME))) continue;
convertInspectionDescription(writer, inspection);
}
writer.endArray();
writer.endObject();
}
private static void convertInspectionDescription(@NotNull JsonWriter writer, @NotNull Element inspection) throws IOException {
writer.beginObject()
.name(SHORT_NAME).value(inspection.getAttributeValue(SHORT_NAME))
.name(DISPLAY_NAME).value(inspection.getAttributeValue(DISPLAY_NAME))
.name(DEFAULT_SEVERITY).value(inspection.getAttributeValue(DEFAULT_SEVERITY))
.name(ENABLED).value(Boolean.parseBoolean(inspection.getAttributeValue(ENABLED)))
.name(DESCRIPTION).value(inspection.getValue())
.endObject();
}
}
| [qodana] Drop project fingerprint inspection (replaced with ProjectFingerprintDescriber)
GitOrigin-RevId: 124180031b528de2d16926f92ea8c582e3860725 | platform/analysis-impl/src/com/intellij/codeInspection/ex/JsonInspectionsReportConverter.java | [qodana] Drop project fingerprint inspection (replaced with ProjectFingerprintDescriber) |
|
Java | apache-2.0 | a93cd9224782ac9847d71cdbe6818b9f6ce805dc | 0 | ShailShah/alluxio,ShailShah/alluxio,Reidddddd/mo-alluxio,uronce-cc/alluxio,maobaolong/alluxio,calvinjia/tachyon,Reidddddd/mo-alluxio,Reidddddd/alluxio,jswudi/alluxio,PasaLab/tachyon,maobaolong/alluxio,wwjiang007/alluxio,WilliamZapata/alluxio,aaudiber/alluxio,jsimsa/alluxio,aaudiber/alluxio,maboelhassan/alluxio,calvinjia/tachyon,jsimsa/alluxio,Alluxio/alluxio,maobaolong/alluxio,bf8086/alluxio,calvinjia/tachyon,aaudiber/alluxio,EvilMcJerkface/alluxio,Alluxio/alluxio,jsimsa/alluxio,madanadit/alluxio,bf8086/alluxio,apc999/alluxio,Alluxio/alluxio,wwjiang007/alluxio,Alluxio/alluxio,riversand963/alluxio,Reidddddd/alluxio,riversand963/alluxio,calvinjia/tachyon,ChangerYoung/alluxio,calvinjia/tachyon,WilliamZapata/alluxio,PasaLab/tachyon,uronce-cc/alluxio,madanadit/alluxio,yuluo-ding/alluxio,ShailShah/alluxio,aaudiber/alluxio,yuluo-ding/alluxio,Alluxio/alluxio,maboelhassan/alluxio,wwjiang007/alluxio,wwjiang007/alluxio,PasaLab/tachyon,madanadit/alluxio,Alluxio/alluxio,madanadit/alluxio,Alluxio/alluxio,Reidddddd/mo-alluxio,wwjiang007/alluxio,jsimsa/alluxio,maobaolong/alluxio,yuluo-ding/alluxio,EvilMcJerkface/alluxio,riversand963/alluxio,WilliamZapata/alluxio,jswudi/alluxio,bf8086/alluxio,PasaLab/tachyon,riversand963/alluxio,Reidddddd/mo-alluxio,Alluxio/alluxio,maboelhassan/alluxio,jsimsa/alluxio,PasaLab/tachyon,ShailShah/alluxio,maboelhassan/alluxio,ChangerYoung/alluxio,ChangerYoung/alluxio,apc999/alluxio,bf8086/alluxio,Reidddddd/alluxio,yuluo-ding/alluxio,Reidddddd/alluxio,calvinjia/tachyon,bf8086/alluxio,WilliamZapata/alluxio,wwjiang007/alluxio,Reidddddd/alluxio,aaudiber/alluxio,PasaLab/tachyon,apc999/alluxio,calvinjia/tachyon,uronce-cc/alluxio,riversand963/alluxio,EvilMcJerkface/alluxio,EvilMcJerkface/alluxio,bf8086/alluxio,yuluo-ding/alluxio,EvilMcJerkface/alluxio,apc999/alluxio,wwjiang007/alluxio,maobaolong/alluxio,madanadit/alluxio,calvinjia/tachyon,uronce-cc/alluxio,maboelhassan/alluxio,uronce-cc/alluxio,maobaolong/alluxio,PasaLab/tachyon,jsimsa/alluxio,uronce-cc/alluxio,bf8086/alluxio,madanadit/alluxio,EvilMcJerkface/alluxio,maobaolong/alluxio,aaudiber/alluxio,Reidddddd/alluxio,ShailShah/alluxio,apc999/alluxio,apc999/alluxio,aaudiber/alluxio,ChangerYoung/alluxio,Reidddddd/alluxio,wwjiang007/alluxio,maobaolong/alluxio,wwjiang007/alluxio,madanadit/alluxio,yuluo-ding/alluxio,Alluxio/alluxio,maboelhassan/alluxio,jswudi/alluxio,Reidddddd/mo-alluxio,wwjiang007/alluxio,ChangerYoung/alluxio,Alluxio/alluxio,jswudi/alluxio,ChangerYoung/alluxio,maobaolong/alluxio,riversand963/alluxio,Reidddddd/mo-alluxio,WilliamZapata/alluxio,EvilMcJerkface/alluxio,bf8086/alluxio,madanadit/alluxio,EvilMcJerkface/alluxio,apc999/alluxio,ShailShah/alluxio,jswudi/alluxio,WilliamZapata/alluxio,jswudi/alluxio,maobaolong/alluxio,maboelhassan/alluxio | /*
* Licensed to the University of California, Berkeley under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package tachyon.client.keyvalue.hadoop;
import java.nio.ByteBuffer;
import org.apache.hadoop.conf.Configuration;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import tachyon.Constants;
import tachyon.LocalTachyonClusterResource;
import tachyon.TachyonURI;
import tachyon.client.keyvalue.KeyValueStoreReader;
import tachyon.client.keyvalue.KeyValueStoreWriter;
import tachyon.client.keyvalue.KeyValueStores;
import tachyon.examples.keyvalue.SameKeyValueStoresTest;
import tachyon.examples.keyvalue.hadoop.CloneKeyValueStoreMapReduce;
import tachyon.hadoop.ConfUtils;
import tachyon.hadoop.TFS;
import tachyon.util.io.BufferUtils;
import tachyon.util.io.PathUtils;
/**
* Tests {@link KeyValueInputFormat}, {@link KeyValueOutputFormat}, and
* {@link KeyValueOutputCommitter} in MapReduce jobs.
*
* NOTE: The MapReduce jobs are run by {@link org.apache.hadoop.mapred.LocalJobRunner}.
*/
public final class KeyValueStoreMapReduceIntegrationTest {
private static final int BLOCK_SIZE = 512 * Constants.MB;
private static final int KEY_VALUE_PARTITION_SIZE = Constants.MB;
private static Configuration sHadoopConf;
@ClassRule
public static LocalTachyonClusterResource sLocalTachyonClusterResource =
new LocalTachyonClusterResource(Constants.GB, BLOCK_SIZE,
/* ensure key-value service is turned on */
Constants.KEY_VALUE_ENABLED, "true",
Constants.KEY_VALUE_PARTITION_SIZE_BYTES_MAX, Integer.toString(KEY_VALUE_PARTITION_SIZE));
@BeforeClass
public static void beforeClass() throws Exception {
sHadoopConf = new Configuration();
String tachyonMasterURI = sLocalTachyonClusterResource.get().getMasterUri();
sHadoopConf.set("fs.defaultFS", tachyonMasterURI);
sHadoopConf.set("fs.default.name", tachyonMasterURI);
sHadoopConf.set(String.format("fs.%s.impl", Constants.SCHEME), TFS.class.getName());
ConfUtils.storeToHadoopConfiguration(sLocalTachyonClusterResource.get().getMasterTachyonConf(),
sHadoopConf);
}
/**
* Tests that a MapReduce job can clone a key-value store.
*/
@Test(timeout = 25000)
public void cloneKeyValueStoreTest() throws Exception {
final int numKeys = 4;
final int valueBytes = KEY_VALUE_PARTITION_SIZE / 2;
// Creates a key-value store.
TachyonURI originalKVStoreURI = new TachyonURI(PathUtils.uniqPath());
KeyValueStores kvStores = KeyValueStores.Factory.create();
KeyValueStoreWriter writer = kvStores.create(originalKVStoreURI);
for (int i = 1; i <= numKeys; i ++) {
byte[] key = BufferUtils.getIncreasingByteArray(i);
byte[] value = BufferUtils.getIncreasingByteArray(valueBytes);
writer.put(key, value);
}
writer.close();
// Verifies that the newly created store has correct key-value pairs.
KeyValueStoreReader reader = kvStores.open(originalKVStoreURI);
Assert.assertEquals(numKeys, reader.size());
for (int i = 1; i <= numKeys; i ++) {
byte[] key = BufferUtils.getIncreasingByteArray(i);
byte[] expectedValue = BufferUtils.getIncreasingByteArray(valueBytes);
byte[] gotValue = reader.get(key);
Assert.assertEquals(ByteBuffer.wrap(expectedValue), ByteBuffer.wrap(gotValue));
}
reader.close();
// Clones the store.
TachyonURI clonedKVStoreURI = new TachyonURI(PathUtils.uniqPath());
CloneKeyValueStoreMapReduce.run(sHadoopConf, originalKVStoreURI.getPath(),
clonedKVStoreURI.getPath());
// Verifies that the cloned store is the same as the original one.
Assert.assertTrue(new SameKeyValueStoresTest(new TachyonURI(originalKVStoreURI.getPath()),
new TachyonURI(clonedKVStoreURI.getPath())).call());
}
}
| tests/src/test/java/tachyon/client/keyvalue/hadoop/KeyValueStoreMapReduceIntegrationTest.java | /*
* Licensed to the University of California, Berkeley under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package tachyon.client.keyvalue.hadoop;
import java.nio.ByteBuffer;
import org.apache.hadoop.conf.Configuration;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import tachyon.Constants;
import tachyon.LocalTachyonClusterResource;
import tachyon.TachyonURI;
import tachyon.client.keyvalue.KeyValueStoreReader;
import tachyon.client.keyvalue.KeyValueStoreWriter;
import tachyon.client.keyvalue.KeyValueStores;
import tachyon.examples.keyvalue.SameKeyValueStoresTest;
import tachyon.examples.keyvalue.hadoop.CloneKeyValueStoreMapReduce;
import tachyon.hadoop.ConfUtils;
import tachyon.hadoop.TFS;
import tachyon.util.io.BufferUtils;
import tachyon.util.io.PathUtils;
/**
* Tests {@link KeyValueInputFormat}, {@link KeyValueOutputFormat}, and
* {@link KeyValueOutputCommitter} in MapReduce jobs.
*
* NOTE: The MapReduce jobs are run by {@link org.apache.hadoop.mapred.LocalJobRunner}.
*/
public final class KeyValueStoreMapReduceIntegrationTest {
private static final int BLOCK_SIZE = 512 * Constants.MB;
private static final int KEY_VALUE_PARTITION_SIZE = Constants.MB;
private static Configuration sHadoopConf;
@ClassRule
public static LocalTachyonClusterResource sLocalTachyonClusterResource =
new LocalTachyonClusterResource(Constants.GB, Constants.KB, BLOCK_SIZE,
/* ensure key-value service is turned on */
Constants.KEY_VALUE_ENABLED, "true",
Constants.KEY_VALUE_PARTITION_SIZE_BYTES_MAX, Integer.toString(KEY_VALUE_PARTITION_SIZE));
@BeforeClass
public static void beforeClass() throws Exception {
sHadoopConf = new Configuration();
String tachyonMasterURI = sLocalTachyonClusterResource.get().getMasterUri();
sHadoopConf.set("fs.defaultFS", tachyonMasterURI);
sHadoopConf.set("fs.default.name", tachyonMasterURI);
sHadoopConf.set(String.format("fs.%s.impl", Constants.SCHEME), TFS.class.getName());
ConfUtils.storeToHadoopConfiguration(sLocalTachyonClusterResource.get().getMasterTachyonConf(),
sHadoopConf);
}
/**
* Tests that a MapReduce job can clone a key-value store.
*/
@Test(timeout = 25000)
public void cloneKeyValueStoreTest() throws Exception {
final int numKeys = 4;
final int valueBytes = KEY_VALUE_PARTITION_SIZE / 2;
// Creates a key-value store.
TachyonURI originalKVStoreURI = new TachyonURI(PathUtils.uniqPath());
KeyValueStores kvStores = KeyValueStores.Factory.create();
KeyValueStoreWriter writer = kvStores.create(originalKVStoreURI);
for (int i = 1; i <= numKeys; i ++) {
byte[] key = BufferUtils.getIncreasingByteArray(i);
byte[] value = BufferUtils.getIncreasingByteArray(valueBytes);
writer.put(key, value);
}
writer.close();
// Verifies that the newly created store has correct key-value pairs.
KeyValueStoreReader reader = kvStores.open(originalKVStoreURI);
Assert.assertEquals(numKeys, reader.size());
for (int i = 1; i <= numKeys; i ++) {
byte[] key = BufferUtils.getIncreasingByteArray(i);
byte[] expectedValue = BufferUtils.getIncreasingByteArray(valueBytes);
byte[] gotValue = reader.get(key);
Assert.assertEquals(ByteBuffer.wrap(expectedValue), ByteBuffer.wrap(gotValue));
}
reader.close();
// Clones the store.
TachyonURI clonedKVStoreURI = new TachyonURI(PathUtils.uniqPath());
CloneKeyValueStoreMapReduce.run(sHadoopConf, originalKVStoreURI.getPath(),
clonedKVStoreURI.getPath());
// Verifies that the cloned store is the same as the original one.
Assert.assertTrue(new SameKeyValueStoresTest(new TachyonURI(originalKVStoreURI.getPath()),
new TachyonURI(clonedKVStoreURI.getPath())).call());
}
}
| Fix compilation after merging upstream
| tests/src/test/java/tachyon/client/keyvalue/hadoop/KeyValueStoreMapReduceIntegrationTest.java | Fix compilation after merging upstream |
|
Java | apache-2.0 | 9d31e59d5b54ef64475e6e8ea36c3e2fc2e7f379 | 0 | GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack,wido/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,wido/cloudstack,resmo/cloudstack,wido/cloudstack,jcshen007/cloudstack,wido/cloudstack,resmo/cloudstack,resmo/cloudstack,jcshen007/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,jcshen007/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,DaanHoogland/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cloudstack.storage.motion;
import java.util.HashMap;
import java.util.Map;
import javax.inject.Inject;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Component;
import org.apache.cloudstack.engine.subsystem.api.storage.ClusterScope;
import org.apache.cloudstack.engine.subsystem.api.storage.CopyCommandResult;
import org.apache.cloudstack.engine.subsystem.api.storage.DataMotionStrategy;
import org.apache.cloudstack.engine.subsystem.api.storage.DataObject;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStore;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreManager;
import org.apache.cloudstack.engine.subsystem.api.storage.EndPoint;
import org.apache.cloudstack.engine.subsystem.api.storage.EndPointSelector;
import org.apache.cloudstack.engine.subsystem.api.storage.HostScope;
import org.apache.cloudstack.engine.subsystem.api.storage.ObjectInDataStoreStateMachine.Event;
import org.apache.cloudstack.engine.subsystem.api.storage.Scope;
import org.apache.cloudstack.engine.subsystem.api.storage.SnapshotInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.StorageAction;
import org.apache.cloudstack.engine.subsystem.api.storage.StorageCacheManager;
import org.apache.cloudstack.engine.subsystem.api.storage.StrategyPriority;
import org.apache.cloudstack.engine.subsystem.api.storage.TemplateInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.VolumeInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.ZoneScope;
import org.apache.cloudstack.framework.async.AsyncCompletionCallback;
import org.apache.cloudstack.framework.config.dao.ConfigurationDao;
import org.apache.cloudstack.storage.RemoteHostEndPoint;
import org.apache.cloudstack.storage.command.CopyCommand;
import org.apache.cloudstack.storage.image.datastore.ImageStoreEntity;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.storage.MigrateVolumeAnswer;
import com.cloud.agent.api.storage.MigrateVolumeCommand;
import com.cloud.agent.api.to.DataObjectType;
import com.cloud.agent.api.to.DataStoreTO;
import com.cloud.agent.api.to.DataTO;
import com.cloud.agent.api.to.NfsTO;
import com.cloud.agent.api.to.VirtualMachineTO;
import com.cloud.configuration.Config;
import com.cloud.host.Host;
import com.cloud.storage.DataStoreRole;
import com.cloud.storage.StoragePool;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.Storage.StoragePoolType;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.db.DB;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.vm.VirtualMachineManager;
@Component
public class AncientDataMotionStrategy implements DataMotionStrategy {
private static final Logger s_logger = Logger.getLogger(AncientDataMotionStrategy.class);
@Inject
EndPointSelector selector;
@Inject
ConfigurationDao configDao;
@Inject
VolumeDao volDao;
@Inject
DataStoreManager dataStoreMgr;
@Inject
StorageCacheManager cacheMgr;
@Override
public StrategyPriority canHandle(DataObject srcData, DataObject destData) {
return StrategyPriority.DEFAULT;
}
@Override
public StrategyPriority canHandle(Map<VolumeInfo, DataStore> volumeMap, Host srcHost, Host destHost) {
return StrategyPriority.CANT_HANDLE;
}
protected boolean needCacheStorage(DataObject srcData, DataObject destData) {
DataTO srcTO = srcData.getTO();
DataStoreTO srcStoreTO = srcTO.getDataStore();
if (srcStoreTO instanceof NfsTO || srcStoreTO.getRole() == DataStoreRole.ImageCache) {
//||
// (srcStoreTO instanceof PrimaryDataStoreTO && ((PrimaryDataStoreTO)srcStoreTO).getPoolType() == StoragePoolType.NetworkFilesystem)) {
return false;
}
DataTO destTO = destData.getTO();
DataStoreTO destStoreTO = destTO.getDataStore();
if (destStoreTO instanceof NfsTO || destStoreTO.getRole() == DataStoreRole.ImageCache) {
return false;
}
if (srcData.getType() == DataObjectType.TEMPLATE) {
TemplateInfo template = (TemplateInfo)srcData;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("needCacheStorage true, dest at " + destTO.getPath() + " dest role " + destStoreTO.getRole().toString() + srcTO.getPath() + " src role " +
srcStoreTO.getRole().toString());
}
return true;
}
private Scope getZoneScope(Scope destScope) {
ZoneScope zoneScope = null;
if (destScope instanceof ClusterScope) {
ClusterScope clusterScope = (ClusterScope)destScope;
zoneScope = new ZoneScope(clusterScope.getZoneId());
} else if (destScope instanceof HostScope) {
HostScope hostScope = (HostScope)destScope;
zoneScope = new ZoneScope(hostScope.getZoneId());
} else {
zoneScope = (ZoneScope)destScope;
}
return zoneScope;
}
private Scope pickCacheScopeForCopy(DataObject srcData, DataObject destData) {
Scope srcScope = srcData.getDataStore().getScope();
Scope destScope = destData.getDataStore().getScope();
Scope selectedScope = null;
if (srcScope.getScopeId() != null) {
selectedScope = getZoneScope(srcScope);
} else if (destScope.getScopeId() != null) {
selectedScope = getZoneScope(destScope);
} else {
s_logger.warn("Cannot find a zone-wide scope for movement that needs a cache storage");
}
return selectedScope;
}
protected Answer copyObject(DataObject srcData, DataObject destData, Host destHost) {
String value = configDao.getValue(Config.PrimaryStorageDownloadWait.toString());
int _primaryStorageDownloadWait = NumbersUtil.parseInt(value, Integer.parseInt(Config.PrimaryStorageDownloadWait.getDefaultValue()));
Answer answer = null;
DataObject cacheData = null;
DataObject srcForCopy = srcData;
try {
if (needCacheStorage(srcData, destData)) {
Scope destScope = pickCacheScopeForCopy(srcData, destData);
srcForCopy = cacheData = cacheMgr.createCacheObject(srcData, destScope);
}
CopyCommand cmd = new CopyCommand(srcForCopy.getTO(), destData.getTO(), _primaryStorageDownloadWait, VirtualMachineManager.ExecuteInSequence.value());
EndPoint ep = destHost != null ? RemoteHostEndPoint.getHypervisorHostEndPoint(destHost) : selector.select(srcForCopy, destData);
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
if (cacheData != null) {
final Long cacheId = cacheData.getId();
final String cacheType = cacheData.getType().toString();
final String cacheUuid = cacheData.getUuid().toString();
if (srcData.getType() == DataObjectType.VOLUME &&
(destData.getType() == DataObjectType.VOLUME ||
destData.getType() == DataObjectType.TEMPLATE)) {
// volume transfer from primary to secondary. Volume transfer between primary pools are already handled by copyVolumeBetweenPools
// Delete cache in order to certainly transfer a latest image.
s_logger.debug("Delete " + cacheType + " cache(id: " + cacheId +
", uuid: " + cacheUuid + ")");
cacheMgr.deleteCacheObject(srcForCopy);
} else {
// for template, we want to leave it on cache for performance reason
if ((answer == null || !answer.getResult()) && srcForCopy.getRefCount() < 2) {
// cache object created by this copy, not already there
s_logger.warn("Copy may not be handled correctly by agent(id: " + ep.getId() + ")." +
" Delete " + cacheType + " cache(id: " + cacheId +
", uuid: " + cacheUuid + ")");
cacheMgr.deleteCacheObject(srcForCopy);
} else {
s_logger.debug("Decrease reference count of " + cacheType +
" cache(id: " + cacheId + ", uuid: " + cacheUuid + ")");
cacheMgr.releaseCacheObject(srcForCopy);
}
}
}
return answer;
} catch (Exception e) {
s_logger.debug("copy object failed: ", e);
if (cacheData != null) {
cacheMgr.deleteCacheObject(cacheData);
}
throw new CloudRuntimeException(e.toString());
}
}
protected Answer copyObject(DataObject srcData, DataObject destData) {
return copyObject(srcData, destData, null);
}
protected DataObject cacheSnapshotChain(SnapshotInfo snapshot, Scope scope) {
DataObject leafData = null;
DataStore store = cacheMgr.getCacheStorage(snapshot, scope);
while (snapshot != null) {
DataObject cacheData = cacheMgr.createCacheObject(snapshot, store);
if (leafData == null) {
leafData = cacheData;
}
snapshot = snapshot.getParent();
}
return leafData;
}
protected void deleteSnapshotCacheChain(SnapshotInfo snapshot) {
while (snapshot != null) {
cacheMgr.deleteCacheObject(snapshot);
snapshot = snapshot.getParent();
}
}
protected void releaseSnapshotCacheChain(SnapshotInfo snapshot) {
while (snapshot != null) {
cacheMgr.releaseCacheObject(snapshot);
snapshot = snapshot.getParent();
}
}
protected Answer copyVolumeFromSnapshot(DataObject snapObj, DataObject volObj) {
SnapshotInfo snapshot = (SnapshotInfo)snapObj;
StoragePool pool = (StoragePool)volObj.getDataStore();
String basicErrMsg = "Failed to create volume from " + snapshot.getName() + " on pool " + pool;
DataStore store = snapObj.getDataStore();
DataStoreTO storTO = store.getTO();
DataObject srcData = snapObj;
try {
if (!(storTO instanceof NfsTO)) {
// cache snapshot to zone-wide staging store for the volume to be created
srcData = cacheSnapshotChain(snapshot, new ZoneScope(pool.getDataCenterId()));
}
String value = configDao.getValue(Config.CreateVolumeFromSnapshotWait.toString());
int _createVolumeFromSnapshotWait = NumbersUtil.parseInt(value, Integer.parseInt(Config.CreateVolumeFromSnapshotWait.getDefaultValue()));
EndPoint ep = null;
if (srcData.getDataStore().getRole() == DataStoreRole.Primary) {
ep = selector.select(volObj);
} else {
ep = selector.select(srcData, volObj);
}
CopyCommand cmd = new CopyCommand(srcData.getTO(), volObj.getTO(), _createVolumeFromSnapshotWait, VirtualMachineManager.ExecuteInSequence.value());
Answer answer = null;
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
return answer;
} catch (Exception e) {
s_logger.error(basicErrMsg, e);
throw new CloudRuntimeException(basicErrMsg);
} finally {
if (!(storTO instanceof NfsTO)) {
// still keep snapshot on cache which may be migrated from previous secondary storage
releaseSnapshotCacheChain((SnapshotInfo)srcData);
}
}
}
protected Answer cloneVolume(DataObject template, DataObject volume) {
CopyCommand cmd = new CopyCommand(template.getTO(), volume.getTO(), 0, VirtualMachineManager.ExecuteInSequence.value());
try {
EndPoint ep = selector.select(volume.getDataStore());
Answer answer = null;
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
return answer;
} catch (Exception e) {
s_logger.debug("Failed to send to storage pool", e);
throw new CloudRuntimeException("Failed to send to storage pool", e);
}
}
protected Answer copyVolumeBetweenPools(DataObject srcData, DataObject destData) {
String value = configDao.getValue(Config.CopyVolumeWait.key());
int _copyvolumewait = NumbersUtil.parseInt(value, Integer.parseInt(Config.CopyVolumeWait.getDefaultValue()));
Scope destScope = getZoneScope(destData.getDataStore().getScope());
DataStore cacheStore = cacheMgr.getCacheStorage(destScope);
if (cacheStore == null) {
// need to find a nfs or cifs image store, assuming that can't copy volume
// directly to s3
ImageStoreEntity imageStore = (ImageStoreEntity)dataStoreMgr.getImageStore(destScope.getScopeId());
if (!imageStore.getProtocol().equalsIgnoreCase("nfs") && !imageStore.getProtocol().equalsIgnoreCase("cifs")) {
s_logger.debug("can't find a nfs (or cifs) image store to satisfy the need for a staging store");
return null;
}
DataObject objOnImageStore = imageStore.create(srcData);
objOnImageStore.processEvent(Event.CreateOnlyRequested);
Answer answer = copyObject(srcData, objOnImageStore);
if (answer == null || !answer.getResult()) {
if (answer != null) {
s_logger.debug("copy to image store failed: " + answer.getDetails());
}
objOnImageStore.processEvent(Event.OperationFailed);
imageStore.delete(objOnImageStore);
return answer;
}
objOnImageStore.processEvent(Event.OperationSuccessed, answer);
objOnImageStore.processEvent(Event.CopyingRequested);
CopyCommand cmd = new CopyCommand(objOnImageStore.getTO(), destData.getTO(), _copyvolumewait, VirtualMachineManager.ExecuteInSequence.value());
EndPoint ep = selector.select(objOnImageStore, destData);
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
if (answer == null || !answer.getResult()) {
if (answer != null) {
s_logger.debug("copy to primary store failed: " + answer.getDetails());
}
objOnImageStore.processEvent(Event.OperationFailed);
imageStore.delete(objOnImageStore);
return answer;
}
objOnImageStore.processEvent(Event.OperationSuccessed);
imageStore.delete(objOnImageStore);
return answer;
} else {
DataObject cacheData = cacheMgr.createCacheObject(srcData, destScope);
CopyCommand cmd = new CopyCommand(cacheData.getTO(), destData.getTO(), _copyvolumewait, VirtualMachineManager.ExecuteInSequence.value());
EndPoint ep = selector.select(cacheData, destData);
Answer answer = null;
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
// delete volume on cache store
if (cacheData != null) {
cacheMgr.deleteCacheObject(cacheData);
}
return answer;
}
}
protected Answer migrateVolumeToPool(DataObject srcData, DataObject destData) {
String value = configDao.getValue(Config.MigrateWait.key());
int waitInterval = NumbersUtil.parseInt(value, Integer.parseInt(Config.MigrateWait.getDefaultValue()));
VolumeInfo volume = (VolumeInfo)srcData;
StoragePool destPool = (StoragePool)dataStoreMgr.getDataStore(destData.getDataStore().getId(), DataStoreRole.Primary);
MigrateVolumeCommand command = new MigrateVolumeCommand(volume.getId(), volume.getPath(), destPool, volume.getAttachedVmName(), waitInterval);
EndPoint ep = selector.select(srcData, StorageAction.MIGRATEVOLUME);
Answer answer = null;
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(command, false, errMsg);
} else {
answer = ep.sendMessage(command);
}
if (answer == null || !answer.getResult()) {
throw new CloudRuntimeException("Failed to migrate volume " + volume + " to storage pool " + destPool);
} else {
// Update the volume details after migration.
VolumeVO volumeVo = volDao.findById(volume.getId());
Long oldPoolId = volume.getPoolId();
volumeVo.setPath(((MigrateVolumeAnswer)answer).getVolumePath());
volumeVo.setPodId(destPool.getPodId());
volumeVo.setPoolId(destPool.getId());
volumeVo.setLastPoolId(oldPoolId);
// For SMB, pool credentials are also stored in the uri query string. We trim the query string
// part here to make sure the credentials do not get stored in the db unencrypted.
String folder = destPool.getPath();
if (destPool.getPoolType() == StoragePoolType.SMB && folder != null && folder.contains("?")) {
folder = folder.substring(0, folder.indexOf("?"));
}
volumeVo.setFolder(folder);
volDao.update(volume.getId(), volumeVo);
}
return answer;
}
// Note: destHost is currently only used if the copyObject method is invoked
@Override
public Void copyAsync(DataObject srcData, DataObject destData, Host destHost, AsyncCompletionCallback<CopyCommandResult> callback) {
Answer answer = null;
String errMsg = null;
try {
s_logger.debug("copyAsync inspecting src type " + srcData.getType().toString() + " copyAsync inspecting dest type " + destData.getType().toString());
if (srcData.getType() == DataObjectType.SNAPSHOT && destData.getType() == DataObjectType.VOLUME) {
answer = copyVolumeFromSnapshot(srcData, destData);
} else if (srcData.getType() == DataObjectType.SNAPSHOT && destData.getType() == DataObjectType.TEMPLATE) {
answer = createTemplateFromSnapshot(srcData, destData);
} else if (srcData.getType() == DataObjectType.TEMPLATE && destData.getType() == DataObjectType.VOLUME) {
answer = cloneVolume(srcData, destData);
} else if (destData.getType() == DataObjectType.VOLUME && srcData.getType() == DataObjectType.VOLUME &&
srcData.getDataStore().getRole() == DataStoreRole.Primary && destData.getDataStore().getRole() == DataStoreRole.Primary) {
if (srcData.getId() == destData.getId()) {
// The volume has to be migrated across storage pools.
answer = migrateVolumeToPool(srcData, destData);
} else {
answer = copyVolumeBetweenPools(srcData, destData);
}
} else if (srcData.getType() == DataObjectType.SNAPSHOT && destData.getType() == DataObjectType.SNAPSHOT) {
answer = copySnapshot(srcData, destData);
} else {
answer = copyObject(srcData, destData, destHost);
}
if (answer != null && !answer.getResult()) {
errMsg = answer.getDetails();
}
} catch (Exception e) {
s_logger.debug("copy failed", e);
errMsg = e.toString();
}
CopyCommandResult result = new CopyCommandResult(null, answer);
result.setResult(errMsg);
callback.complete(result);
return null;
}
@Override
public Void copyAsync(DataObject srcData, DataObject destData, AsyncCompletionCallback<CopyCommandResult> callback) {
return copyAsync(srcData, destData, null, callback);
}
@DB
protected Answer createTemplateFromSnapshot(DataObject srcData, DataObject destData) {
String value = configDao.getValue(Config.CreatePrivateTemplateFromSnapshotWait.toString());
int _createprivatetemplatefromsnapshotwait = NumbersUtil.parseInt(value, Integer.parseInt(Config.CreatePrivateTemplateFromSnapshotWait.getDefaultValue()));
boolean needCache = false;
if (needCacheStorage(srcData, destData)) {
needCache = true;
SnapshotInfo snapshot = (SnapshotInfo) srcData;
srcData = cacheSnapshotChain(snapshot, snapshot.getDataStore().getScope());
}
EndPoint ep = null;
if (srcData.getDataStore().getRole() == DataStoreRole.Primary) {
ep = selector.select(destData);
} else {
ep = selector.select(srcData, destData);
}
CopyCommand cmd = new CopyCommand(srcData.getTO(), destData.getTO(), _createprivatetemplatefromsnapshotwait, VirtualMachineManager.ExecuteInSequence.value());
Answer answer = null;
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
// clean up snapshot copied to staging
if (needCache && srcData != null) {
cacheMgr.releaseCacheObject(srcData); // reduce ref count, but keep it there on cache which is converted from previous secondary storage
}
return answer;
}
protected Answer copySnapshot(DataObject srcData, DataObject destData) {
String value = configDao.getValue(Config.BackupSnapshotWait.toString());
int _backupsnapshotwait = NumbersUtil.parseInt(value, Integer.parseInt(Config.BackupSnapshotWait.getDefaultValue()));
DataObject cacheData = null;
SnapshotInfo snapshotInfo = (SnapshotInfo)srcData;
Object payload = snapshotInfo.getPayload();
Boolean fullSnapshot = true;
if (payload != null) {
fullSnapshot = (Boolean)payload;
}
Map<String, String> options = new HashMap<String, String>();
options.put("fullSnapshot", fullSnapshot.toString());
Answer answer = null;
try {
if (needCacheStorage(srcData, destData)) {
Scope selectedScope = pickCacheScopeForCopy(srcData, destData);
cacheData = cacheMgr.getCacheObject(srcData, selectedScope);
CopyCommand cmd = new CopyCommand(srcData.getTO(), destData.getTO(), _backupsnapshotwait, VirtualMachineManager.ExecuteInSequence.value());
cmd.setCacheTO(cacheData.getTO());
cmd.setOptions(options);
EndPoint ep = selector.select(srcData, destData);
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
} else {
CopyCommand cmd = new CopyCommand(srcData.getTO(), destData.getTO(), _backupsnapshotwait, VirtualMachineManager.ExecuteInSequence.value());
cmd.setOptions(options);
EndPoint ep = selector.select(srcData, destData, StorageAction.BACKUPSNAPSHOT);
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
}
// clean up cache entry
if (cacheData != null) {
cacheMgr.deleteCacheObject(cacheData);
}
return answer;
} catch (Exception e) {
s_logger.debug("copy snasphot failed: " + e.toString());
if (cacheData != null) {
cacheMgr.deleteCacheObject(cacheData);
}
throw new CloudRuntimeException(e.toString());
}
}
@Override
public Void copyAsync(Map<VolumeInfo, DataStore> volumeMap, VirtualMachineTO vmTo, Host srcHost, Host destHost, AsyncCompletionCallback<CopyCommandResult> callback) {
CopyCommandResult result = new CopyCommandResult(null, null);
result.setResult("Unsupported operation requested for copying data.");
callback.complete(result);
return null;
}
}
| engine/storage/datamotion/src/org/apache/cloudstack/storage/motion/AncientDataMotionStrategy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cloudstack.storage.motion;
import java.util.HashMap;
import java.util.Map;
import javax.inject.Inject;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Component;
import org.apache.cloudstack.engine.subsystem.api.storage.ClusterScope;
import org.apache.cloudstack.engine.subsystem.api.storage.CopyCommandResult;
import org.apache.cloudstack.engine.subsystem.api.storage.DataMotionStrategy;
import org.apache.cloudstack.engine.subsystem.api.storage.DataObject;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStore;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreManager;
import org.apache.cloudstack.engine.subsystem.api.storage.EndPoint;
import org.apache.cloudstack.engine.subsystem.api.storage.EndPointSelector;
import org.apache.cloudstack.engine.subsystem.api.storage.HostScope;
import org.apache.cloudstack.engine.subsystem.api.storage.ObjectInDataStoreStateMachine.Event;
import org.apache.cloudstack.engine.subsystem.api.storage.Scope;
import org.apache.cloudstack.engine.subsystem.api.storage.SnapshotInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.StorageAction;
import org.apache.cloudstack.engine.subsystem.api.storage.StorageCacheManager;
import org.apache.cloudstack.engine.subsystem.api.storage.StrategyPriority;
import org.apache.cloudstack.engine.subsystem.api.storage.TemplateInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.VolumeInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.ZoneScope;
import org.apache.cloudstack.framework.async.AsyncCompletionCallback;
import org.apache.cloudstack.framework.config.dao.ConfigurationDao;
import org.apache.cloudstack.storage.RemoteHostEndPoint;
import org.apache.cloudstack.storage.command.CopyCommand;
import org.apache.cloudstack.storage.image.datastore.ImageStoreEntity;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.storage.MigrateVolumeAnswer;
import com.cloud.agent.api.storage.MigrateVolumeCommand;
import com.cloud.agent.api.to.DataObjectType;
import com.cloud.agent.api.to.DataStoreTO;
import com.cloud.agent.api.to.DataTO;
import com.cloud.agent.api.to.NfsTO;
import com.cloud.agent.api.to.VirtualMachineTO;
import com.cloud.configuration.Config;
import com.cloud.host.Host;
import com.cloud.storage.DataStoreRole;
import com.cloud.storage.StoragePool;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.Storage.StoragePoolType;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.db.DB;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.vm.VirtualMachineManager;
@Component
public class AncientDataMotionStrategy implements DataMotionStrategy {
private static final Logger s_logger = Logger.getLogger(AncientDataMotionStrategy.class);
@Inject
EndPointSelector selector;
@Inject
ConfigurationDao configDao;
@Inject
VolumeDao volDao;
@Inject
DataStoreManager dataStoreMgr;
@Inject
StorageCacheManager cacheMgr;
@Override
public StrategyPriority canHandle(DataObject srcData, DataObject destData) {
return StrategyPriority.DEFAULT;
}
@Override
public StrategyPriority canHandle(Map<VolumeInfo, DataStore> volumeMap, Host srcHost, Host destHost) {
return StrategyPriority.CANT_HANDLE;
}
protected boolean needCacheStorage(DataObject srcData, DataObject destData) {
DataTO srcTO = srcData.getTO();
DataStoreTO srcStoreTO = srcTO.getDataStore();
if (srcStoreTO instanceof NfsTO || srcStoreTO.getRole() == DataStoreRole.ImageCache) {
//||
// (srcStoreTO instanceof PrimaryDataStoreTO && ((PrimaryDataStoreTO)srcStoreTO).getPoolType() == StoragePoolType.NetworkFilesystem)) {
return false;
}
DataTO destTO = destData.getTO();
DataStoreTO destStoreTO = destTO.getDataStore();
if (destStoreTO instanceof NfsTO || destStoreTO.getRole() == DataStoreRole.ImageCache) {
return false;
}
if (srcData.getType() == DataObjectType.TEMPLATE) {
TemplateInfo template = (TemplateInfo)srcData;
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("needCacheStorage true, dest at " + destTO.getPath() + " dest role " + destStoreTO.getRole().toString() + srcTO.getPath() + " src role " +
srcStoreTO.getRole().toString());
}
return true;
}
private Scope getZoneScope(Scope destScope) {
ZoneScope zoneScope = null;
if (destScope instanceof ClusterScope) {
ClusterScope clusterScope = (ClusterScope)destScope;
zoneScope = new ZoneScope(clusterScope.getZoneId());
} else if (destScope instanceof HostScope) {
HostScope hostScope = (HostScope)destScope;
zoneScope = new ZoneScope(hostScope.getZoneId());
} else {
zoneScope = (ZoneScope)destScope;
}
return zoneScope;
}
private Scope pickCacheScopeForCopy(DataObject srcData, DataObject destData) {
Scope srcScope = srcData.getDataStore().getScope();
Scope destScope = destData.getDataStore().getScope();
Scope selectedScope = null;
if (srcScope.getScopeId() != null) {
selectedScope = getZoneScope(srcScope);
} else if (destScope.getScopeId() != null) {
selectedScope = getZoneScope(destScope);
} else {
s_logger.warn("Cannot find a zone-wide scope for movement that needs a cache storage");
}
return selectedScope;
}
protected Answer copyObject(DataObject srcData, DataObject destData, Host destHost) {
String value = configDao.getValue(Config.PrimaryStorageDownloadWait.toString());
int _primaryStorageDownloadWait = NumbersUtil.parseInt(value, Integer.parseInt(Config.PrimaryStorageDownloadWait.getDefaultValue()));
Answer answer = null;
DataObject cacheData = null;
DataObject srcForCopy = srcData;
try {
if (needCacheStorage(srcData, destData)) {
Scope destScope = pickCacheScopeForCopy(srcData, destData);
srcForCopy = cacheData = cacheMgr.createCacheObject(srcData, destScope);
}
CopyCommand cmd = new CopyCommand(srcForCopy.getTO(), destData.getTO(), _primaryStorageDownloadWait, VirtualMachineManager.ExecuteInSequence.value());
EndPoint ep = destHost != null ? RemoteHostEndPoint.getHypervisorHostEndPoint(destHost) : selector.select(srcForCopy, destData);
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
if (cacheData != null) {
if (srcData.getType() == DataObjectType.VOLUME && destData.getType() == DataObjectType.VOLUME) {
// volume transfer from primary to secondary or vice versa. Volume transfer between primary pools are already handled by copyVolumeBetweenPools
cacheMgr.deleteCacheObject(srcForCopy);
} else {
// for template, we want to leave it on cache for performance reason
if ((answer == null || !answer.getResult()) && srcForCopy.getRefCount() < 2) {
// cache object created by this copy, not already there
cacheMgr.deleteCacheObject(srcForCopy);
} else {
cacheMgr.releaseCacheObject(srcForCopy);
}
}
}
return answer;
} catch (Exception e) {
s_logger.debug("copy object failed: ", e);
if (cacheData != null) {
cacheMgr.deleteCacheObject(cacheData);
}
throw new CloudRuntimeException(e.toString());
}
}
protected Answer copyObject(DataObject srcData, DataObject destData) {
return copyObject(srcData, destData, null);
}
protected DataObject cacheSnapshotChain(SnapshotInfo snapshot, Scope scope) {
DataObject leafData = null;
DataStore store = cacheMgr.getCacheStorage(snapshot, scope);
while (snapshot != null) {
DataObject cacheData = cacheMgr.createCacheObject(snapshot, store);
if (leafData == null) {
leafData = cacheData;
}
snapshot = snapshot.getParent();
}
return leafData;
}
protected void deleteSnapshotCacheChain(SnapshotInfo snapshot) {
while (snapshot != null) {
cacheMgr.deleteCacheObject(snapshot);
snapshot = snapshot.getParent();
}
}
protected void releaseSnapshotCacheChain(SnapshotInfo snapshot) {
while (snapshot != null) {
cacheMgr.releaseCacheObject(snapshot);
snapshot = snapshot.getParent();
}
}
protected Answer copyVolumeFromSnapshot(DataObject snapObj, DataObject volObj) {
SnapshotInfo snapshot = (SnapshotInfo)snapObj;
StoragePool pool = (StoragePool)volObj.getDataStore();
String basicErrMsg = "Failed to create volume from " + snapshot.getName() + " on pool " + pool;
DataStore store = snapObj.getDataStore();
DataStoreTO storTO = store.getTO();
DataObject srcData = snapObj;
try {
if (!(storTO instanceof NfsTO)) {
// cache snapshot to zone-wide staging store for the volume to be created
srcData = cacheSnapshotChain(snapshot, new ZoneScope(pool.getDataCenterId()));
}
String value = configDao.getValue(Config.CreateVolumeFromSnapshotWait.toString());
int _createVolumeFromSnapshotWait = NumbersUtil.parseInt(value, Integer.parseInt(Config.CreateVolumeFromSnapshotWait.getDefaultValue()));
EndPoint ep = null;
if (srcData.getDataStore().getRole() == DataStoreRole.Primary) {
ep = selector.select(volObj);
} else {
ep = selector.select(srcData, volObj);
}
CopyCommand cmd = new CopyCommand(srcData.getTO(), volObj.getTO(), _createVolumeFromSnapshotWait, VirtualMachineManager.ExecuteInSequence.value());
Answer answer = null;
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
return answer;
} catch (Exception e) {
s_logger.error(basicErrMsg, e);
throw new CloudRuntimeException(basicErrMsg);
} finally {
if (!(storTO instanceof NfsTO)) {
// still keep snapshot on cache which may be migrated from previous secondary storage
releaseSnapshotCacheChain((SnapshotInfo)srcData);
}
}
}
protected Answer cloneVolume(DataObject template, DataObject volume) {
CopyCommand cmd = new CopyCommand(template.getTO(), volume.getTO(), 0, VirtualMachineManager.ExecuteInSequence.value());
try {
EndPoint ep = selector.select(volume.getDataStore());
Answer answer = null;
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
return answer;
} catch (Exception e) {
s_logger.debug("Failed to send to storage pool", e);
throw new CloudRuntimeException("Failed to send to storage pool", e);
}
}
protected Answer copyVolumeBetweenPools(DataObject srcData, DataObject destData) {
String value = configDao.getValue(Config.CopyVolumeWait.key());
int _copyvolumewait = NumbersUtil.parseInt(value, Integer.parseInt(Config.CopyVolumeWait.getDefaultValue()));
Scope destScope = getZoneScope(destData.getDataStore().getScope());
DataStore cacheStore = cacheMgr.getCacheStorage(destScope);
if (cacheStore == null) {
// need to find a nfs or cifs image store, assuming that can't copy volume
// directly to s3
ImageStoreEntity imageStore = (ImageStoreEntity)dataStoreMgr.getImageStore(destScope.getScopeId());
if (!imageStore.getProtocol().equalsIgnoreCase("nfs") && !imageStore.getProtocol().equalsIgnoreCase("cifs")) {
s_logger.debug("can't find a nfs (or cifs) image store to satisfy the need for a staging store");
return null;
}
DataObject objOnImageStore = imageStore.create(srcData);
objOnImageStore.processEvent(Event.CreateOnlyRequested);
Answer answer = copyObject(srcData, objOnImageStore);
if (answer == null || !answer.getResult()) {
if (answer != null) {
s_logger.debug("copy to image store failed: " + answer.getDetails());
}
objOnImageStore.processEvent(Event.OperationFailed);
imageStore.delete(objOnImageStore);
return answer;
}
objOnImageStore.processEvent(Event.OperationSuccessed, answer);
objOnImageStore.processEvent(Event.CopyingRequested);
CopyCommand cmd = new CopyCommand(objOnImageStore.getTO(), destData.getTO(), _copyvolumewait, VirtualMachineManager.ExecuteInSequence.value());
EndPoint ep = selector.select(objOnImageStore, destData);
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
if (answer == null || !answer.getResult()) {
if (answer != null) {
s_logger.debug("copy to primary store failed: " + answer.getDetails());
}
objOnImageStore.processEvent(Event.OperationFailed);
imageStore.delete(objOnImageStore);
return answer;
}
objOnImageStore.processEvent(Event.OperationSuccessed);
imageStore.delete(objOnImageStore);
return answer;
} else {
DataObject cacheData = cacheMgr.createCacheObject(srcData, destScope);
CopyCommand cmd = new CopyCommand(cacheData.getTO(), destData.getTO(), _copyvolumewait, VirtualMachineManager.ExecuteInSequence.value());
EndPoint ep = selector.select(cacheData, destData);
Answer answer = null;
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
// delete volume on cache store
if (cacheData != null) {
cacheMgr.deleteCacheObject(cacheData);
}
return answer;
}
}
protected Answer migrateVolumeToPool(DataObject srcData, DataObject destData) {
String value = configDao.getValue(Config.MigrateWait.key());
int waitInterval = NumbersUtil.parseInt(value, Integer.parseInt(Config.MigrateWait.getDefaultValue()));
VolumeInfo volume = (VolumeInfo)srcData;
StoragePool destPool = (StoragePool)dataStoreMgr.getDataStore(destData.getDataStore().getId(), DataStoreRole.Primary);
MigrateVolumeCommand command = new MigrateVolumeCommand(volume.getId(), volume.getPath(), destPool, volume.getAttachedVmName(), waitInterval);
EndPoint ep = selector.select(srcData, StorageAction.MIGRATEVOLUME);
Answer answer = null;
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(command, false, errMsg);
} else {
answer = ep.sendMessage(command);
}
if (answer == null || !answer.getResult()) {
throw new CloudRuntimeException("Failed to migrate volume " + volume + " to storage pool " + destPool);
} else {
// Update the volume details after migration.
VolumeVO volumeVo = volDao.findById(volume.getId());
Long oldPoolId = volume.getPoolId();
volumeVo.setPath(((MigrateVolumeAnswer)answer).getVolumePath());
volumeVo.setPodId(destPool.getPodId());
volumeVo.setPoolId(destPool.getId());
volumeVo.setLastPoolId(oldPoolId);
// For SMB, pool credentials are also stored in the uri query string. We trim the query string
// part here to make sure the credentials do not get stored in the db unencrypted.
String folder = destPool.getPath();
if (destPool.getPoolType() == StoragePoolType.SMB && folder != null && folder.contains("?")) {
folder = folder.substring(0, folder.indexOf("?"));
}
volumeVo.setFolder(folder);
volDao.update(volume.getId(), volumeVo);
}
return answer;
}
// Note: destHost is currently only used if the copyObject method is invoked
@Override
public Void copyAsync(DataObject srcData, DataObject destData, Host destHost, AsyncCompletionCallback<CopyCommandResult> callback) {
Answer answer = null;
String errMsg = null;
try {
s_logger.debug("copyAsync inspecting src type " + srcData.getType().toString() + " copyAsync inspecting dest type " + destData.getType().toString());
if (srcData.getType() == DataObjectType.SNAPSHOT && destData.getType() == DataObjectType.VOLUME) {
answer = copyVolumeFromSnapshot(srcData, destData);
} else if (srcData.getType() == DataObjectType.SNAPSHOT && destData.getType() == DataObjectType.TEMPLATE) {
answer = createTemplateFromSnapshot(srcData, destData);
} else if (srcData.getType() == DataObjectType.TEMPLATE && destData.getType() == DataObjectType.VOLUME) {
answer = cloneVolume(srcData, destData);
} else if (destData.getType() == DataObjectType.VOLUME && srcData.getType() == DataObjectType.VOLUME &&
srcData.getDataStore().getRole() == DataStoreRole.Primary && destData.getDataStore().getRole() == DataStoreRole.Primary) {
if (srcData.getId() == destData.getId()) {
// The volume has to be migrated across storage pools.
answer = migrateVolumeToPool(srcData, destData);
} else {
answer = copyVolumeBetweenPools(srcData, destData);
}
} else if (srcData.getType() == DataObjectType.SNAPSHOT && destData.getType() == DataObjectType.SNAPSHOT) {
answer = copySnapshot(srcData, destData);
} else {
answer = copyObject(srcData, destData, destHost);
}
if (answer != null && !answer.getResult()) {
errMsg = answer.getDetails();
}
} catch (Exception e) {
s_logger.debug("copy failed", e);
errMsg = e.toString();
}
CopyCommandResult result = new CopyCommandResult(null, answer);
result.setResult(errMsg);
callback.complete(result);
return null;
}
@Override
public Void copyAsync(DataObject srcData, DataObject destData, AsyncCompletionCallback<CopyCommandResult> callback) {
return copyAsync(srcData, destData, null, callback);
}
@DB
protected Answer createTemplateFromSnapshot(DataObject srcData, DataObject destData) {
String value = configDao.getValue(Config.CreatePrivateTemplateFromSnapshotWait.toString());
int _createprivatetemplatefromsnapshotwait = NumbersUtil.parseInt(value, Integer.parseInt(Config.CreatePrivateTemplateFromSnapshotWait.getDefaultValue()));
boolean needCache = false;
if (needCacheStorage(srcData, destData)) {
needCache = true;
SnapshotInfo snapshot = (SnapshotInfo) srcData;
srcData = cacheSnapshotChain(snapshot, snapshot.getDataStore().getScope());
}
EndPoint ep = null;
if (srcData.getDataStore().getRole() == DataStoreRole.Primary) {
ep = selector.select(destData);
} else {
ep = selector.select(srcData, destData);
}
CopyCommand cmd = new CopyCommand(srcData.getTO(), destData.getTO(), _createprivatetemplatefromsnapshotwait, VirtualMachineManager.ExecuteInSequence.value());
Answer answer = null;
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
// clean up snapshot copied to staging
if (needCache && srcData != null) {
cacheMgr.releaseCacheObject(srcData); // reduce ref count, but keep it there on cache which is converted from previous secondary storage
}
return answer;
}
protected Answer copySnapshot(DataObject srcData, DataObject destData) {
String value = configDao.getValue(Config.BackupSnapshotWait.toString());
int _backupsnapshotwait = NumbersUtil.parseInt(value, Integer.parseInt(Config.BackupSnapshotWait.getDefaultValue()));
DataObject cacheData = null;
SnapshotInfo snapshotInfo = (SnapshotInfo)srcData;
Object payload = snapshotInfo.getPayload();
Boolean fullSnapshot = true;
if (payload != null) {
fullSnapshot = (Boolean)payload;
}
Map<String, String> options = new HashMap<String, String>();
options.put("fullSnapshot", fullSnapshot.toString());
Answer answer = null;
try {
if (needCacheStorage(srcData, destData)) {
Scope selectedScope = pickCacheScopeForCopy(srcData, destData);
cacheData = cacheMgr.getCacheObject(srcData, selectedScope);
CopyCommand cmd = new CopyCommand(srcData.getTO(), destData.getTO(), _backupsnapshotwait, VirtualMachineManager.ExecuteInSequence.value());
cmd.setCacheTO(cacheData.getTO());
cmd.setOptions(options);
EndPoint ep = selector.select(srcData, destData);
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
} else {
CopyCommand cmd = new CopyCommand(srcData.getTO(), destData.getTO(), _backupsnapshotwait, VirtualMachineManager.ExecuteInSequence.value());
cmd.setOptions(options);
EndPoint ep = selector.select(srcData, destData, StorageAction.BACKUPSNAPSHOT);
if (ep == null) {
String errMsg = "No remote endpoint to send command, check if host or ssvm is down?";
s_logger.error(errMsg);
answer = new Answer(cmd, false, errMsg);
} else {
answer = ep.sendMessage(cmd);
}
}
// clean up cache entry
if (cacheData != null) {
cacheMgr.deleteCacheObject(cacheData);
}
return answer;
} catch (Exception e) {
s_logger.debug("copy snasphot failed: " + e.toString());
if (cacheData != null) {
cacheMgr.deleteCacheObject(cacheData);
}
throw new CloudRuntimeException(e.toString());
}
}
@Override
public Void copyAsync(Map<VolumeInfo, DataStore> volumeMap, VirtualMachineTO vmTo, Host srcHost, Host destHost, AsyncCompletionCallback<CopyCommandResult> callback) {
CopyCommandResult result = new CopyCommandResult(null, null);
result.setResult("Unsupported operation requested for copying data.");
callback.complete(result);
return null;
}
}
| CLOUDSTACK-7412: Can't create proper template from VM on S3 secondary storage environment
Signed-off-by: Rajani Karuturi <[email protected]>
| engine/storage/datamotion/src/org/apache/cloudstack/storage/motion/AncientDataMotionStrategy.java | CLOUDSTACK-7412: Can't create proper template from VM on S3 secondary storage environment |
|
Java | apache-2.0 | 7d56b851797cb4c3191d63dcdb8f888d000c8c5f | 0 | spinnaker/halyard,spinnaker/halyard,spinnaker/halyard | package com.netflix.spinnaker.halyard.config.validate.v1;
import com.netflix.spinnaker.halyard.config.model.v1.node.Stats;
import com.netflix.spinnaker.halyard.config.model.v1.node.Validator;
import com.netflix.spinnaker.halyard.config.problem.v1.ConfigProblemSetBuilder;
import com.netflix.spinnaker.halyard.core.problem.v1.Problem.Severity;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Component
@Slf4j
public class StatsValidator extends Validator<Stats> {
@Override
public void validate(ConfigProblemSetBuilder p, Stats t) {
StringBuilder msg = new StringBuilder();
msg.append("Stats are currently ");
if (t.getEnabled()) {
msg.append("ENABLED. Usage statistics are being collected. Thank you! ");
msg.append("These stats inform improvements to the product, and that helps the community. ");
msg.append("To disable, run `hal config stats disable`. ");
} else {
msg.append("DISABLED. Usage statistics are not being collected. ");
msg.append("Please consider enabling statistic collection. ");
msg.append("These stats inform improvements to the product, and that helps the community. ");
msg.append("To enable, run `hal config stats enable`. ");
}
msg.append("To learn more about what and how stats data is used, please see ");
msg.append("https://www.spinnaker.io/community/stats.");
p.addProblem(Severity.INFO, msg.toString());
}
}
| halyard-config/src/main/java/com/netflix/spinnaker/halyard/config/validate/v1/StatsValidator.java | package com.netflix.spinnaker.halyard.config.validate.v1;
import com.netflix.spinnaker.halyard.config.model.v1.node.Stats;
import com.netflix.spinnaker.halyard.config.model.v1.node.Validator;
import com.netflix.spinnaker.halyard.config.problem.v1.ConfigProblemSetBuilder;
import com.netflix.spinnaker.halyard.core.problem.v1.Problem.Severity;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Component
@Slf4j
public class StatsValidator extends Validator<Stats> {
@Override
public void validate(ConfigProblemSetBuilder p, Stats t) {
StringBuilder msg = new StringBuilder();
msg.append("Stats are currently ");
if (t.getEnabled()) {
msg.append("ENABLED. Usage statistics are being collected—Thank you! ");
msg.append("These stats inform improvements to the product, and that helps the community. ");
msg.append("To disable, run `hal config stats disable`. ");
} else {
msg.append("DISABLED. Usage statistics are not being collected. ");
msg.append("Please consider enabling statistic collection. ");
msg.append("These stats inform improvements to the product, and that helps the community. ");
msg.append("To enable, run `hal config stats enable`. ");
}
msg.append("To learn more about what and how stats data is used, please see ");
msg.append("https://www.spinnaker.io/community/stats.");
p.addProblem(Severity.INFO, msg.toString());
}
}
| fix(logs): do not print ??? when stats enabled (#1727)
| halyard-config/src/main/java/com/netflix/spinnaker/halyard/config/validate/v1/StatsValidator.java | fix(logs): do not print ??? when stats enabled (#1727) |
|
Java | apache-2.0 | 62109eb537f9aaeb497075a61b79b04e7badf67f | 0 | gentics/mesh,gentics/mesh,gentics/mesh,gentics/mesh | package com.gentics.mesh.changelog.changes;
import com.gentics.mesh.changelog.AbstractChange;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Vertex;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.core.logging.Logger;
import io.vertx.core.logging.LoggerFactory;
import java.text.NumberFormat;
import java.text.ParseException;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class ChangeNumberStringsToNumber extends AbstractChange {
private static final String NUMBER_TYPE = "number";
private static final String LIST_TYPE = "list";
private static final String SCHEMA_FIELDS = "fields";
private static final String FIELD_TYPE_KEY = "type";
private static final String FIELD_LIST_TYPE_KEY = "listType";
private static final String FIELD_NAME_KEY = "name";
private static final String UUID = "uuid";
private static final String FIELD_KEY = "fieldkey";
private static final String JSON_FIELD = "json";
private static final String SCHEMA_CONTAINER_VERSION_CLASS = "SchemaContainerVersionImpl";
private static final String HAS_SCHEMA_CONTAINER_VERSION = "HAS_SCHEMA_CONTAINER_VERSION";
private static final String MICROSCHEMA_CONTAINER_VERSION_CLASS = "MicroschemaContainerVersionImpl";
private static final String HAS_MICROSCHEMA_CONTAINER = "HAS_MICROSCHEMA_CONTAINER";
private static final String HAS_LIST = "HAS_LIST";
private static final String ITEM_PREFIX = "item-";
private static final Logger log = LoggerFactory.getLogger(ChangeNumberStringsToNumber.class);
private static final NumberFormat format = NumberFormat.getInstance(Locale.ENGLISH);
private final Map<String, Schema> schemaMap = new HashMap<>();
@Override
public String getName() {
return "Change Number String to Number";
}
@Override
public String getDescription() {
return "Changes the values of number fields (and number list fields) from strings to actual numbers.";
}
private Schema buildSchemaFromVertex(Vertex schemaVertex, String className) {
return schemaMap.computeIfAbsent(schemaVertex.getProperty(UUID), uuid -> {
Schema schema = new Schema();
schema.type = className;
schema.uuid = uuid;
schema.version = schemaVertex.getProperty("version");
String json = schemaVertex.getProperty(JSON_FIELD);
if (json == null) {
return schema;
}
JsonObject jsonSchema = new JsonObject(json);
if (!jsonSchema.containsKey(SCHEMA_FIELDS)) {
return schema;
}
schema.name = jsonSchema.getString("name");
JsonArray fields = jsonSchema.getJsonArray(SCHEMA_FIELDS);
schema.fieldMap = IntStream.range(0, fields.size())
.mapToObj(fields::getJsonObject)
.filter(f -> {
String type = f.getString(FIELD_TYPE_KEY);
return NUMBER_TYPE.equals(type) || (LIST_TYPE.equals(type) && NUMBER_TYPE.equals(f.getString(FIELD_LIST_TYPE_KEY)));
})
.collect(Collectors.toMap(o -> o.getString(FIELD_NAME_KEY), Function.identity()));
return schema;
});
}
private void updateProperty(String propertyName, Vertex node) {
Object obj = node.getProperty(propertyName);
if (obj == null) {
return;
}
if (!(obj instanceof String)) {
if (log.isDebugEnabled()) {
log.debug("Property '{}' for node '{}' in database is no string so we don't convert it. {}: '{}'", propertyName, node.getProperty(UUID), obj.getClass(), obj);
}
return;
}
String strVal = (String) obj;
Number numVal;
try {
numVal = format.parse(strVal);
} catch (ParseException e) {
log.warn("Could not parse the number '{}', for field '{}' in node {}", strVal, propertyName, node.getId());
numVal = 0;
}
node.removeProperty(propertyName);
node.setProperty(propertyName, numVal);
}
private void updateLists(Vertex container, Map<String, JsonObject> fieldMap) {
for (Vertex listElement: container.getVertices(Direction.OUT, HAS_LIST)) {
String fieldName = listElement.getProperty(FIELD_KEY);
if (fieldMap.containsKey(fieldName) && NUMBER_TYPE.equals(fieldMap.get(fieldName).getString(FIELD_LIST_TYPE_KEY))) {
listElement.getPropertyKeys().stream()
.filter(k -> k.startsWith(ITEM_PREFIX))
.forEach(k -> updateProperty(k, listElement));
}
}
}
private void updateFields(Vertex container, Map<String, JsonObject> fieldMap) {
fieldMap.entrySet().stream()
.map(Map.Entry::getValue)
.filter(f -> NUMBER_TYPE.equals(f.getString(FIELD_TYPE_KEY)))
.forEach(f -> updateProperty(f.getString(FIELD_NAME_KEY) + "-" + NUMBER_TYPE, container));
}
public void updateVerticesForSchema(Vertex schemaVertex, Map<String, JsonObject> fieldMap, String label) {
long count = 0;
for (Vertex vertex : schemaVertex.getVertices(Direction.IN, label)) {
count++;
updateFields(vertex, fieldMap);
updateLists(vertex, fieldMap);
if (count % 200 == 0) {
log.info("Updated vertices {}", count);
}
}
}
public void convertViaSchema(String schemaVersionClassName, String label) {
for (Vertex schemaVertex : getGraph().getVertices("@class", schemaVersionClassName)) {
Schema schema = buildSchemaFromVertex(schemaVertex, schemaVersionClassName);
if (!schema.fieldMap.isEmpty()) {
log.info("Update vertices for {}", schema);
updateVerticesForSchema(schemaVertex, schema.fieldMap, label);
log.info("Commit changes to database...");
getGraph().commit();
}
}
}
@Override
public void apply() {
log.info("Start converting numbers in nodes.");
convertViaSchema(SCHEMA_CONTAINER_VERSION_CLASS, HAS_SCHEMA_CONTAINER_VERSION);
log.info("Start converting numbers in micro-nodes.");
convertViaSchema(MICROSCHEMA_CONTAINER_VERSION_CLASS, HAS_MICROSCHEMA_CONTAINER);
}
@Override
public String getUuid() {
return "3F367427D10641FAB67427D10621FA90";
}
private class Schema {
String type;
String name;
String uuid;
String version;
Map<String, JsonObject> fieldMap;
@Override
public String toString() {
return type + "{" +
"name='" + name + '\'' +
", uuid='" + uuid + '\'' +
", version='" + version + '\'' +
'}';
}
}
}
| changelog-system/src/main/java/com/gentics/mesh/changelog/changes/ChangeNumberStringsToNumber.java | package com.gentics.mesh.changelog.changes;
import com.gentics.mesh.changelog.AbstractChange;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Vertex;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.core.logging.Logger;
import io.vertx.core.logging.LoggerFactory;
import java.text.NumberFormat;
import java.text.ParseException;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class ChangeNumberStringsToNumber extends AbstractChange {
private static final String NUMBER_TYPE = "number";
private static final String LIST_TYPE = "list";
private static final String SCHEMA_FIELDS = "fields";
private static final String FIELD_TYPE_KEY = "type";
private static final String FIELD_LIST_TYPE_KEY = "listType";
private static final String FIELD_NAME_KEY = "name";
private static final String UUID = "uuid";
private static final String FIELD_KEY = "fieldkey";
private static final String JSON_FIELD = "json";
private static final String SCHEMA_CONTAINER_VERSION_CLASS = "SchemaContainerVersionImpl";
private static final String HAS_SCHEMA_CONTAINER_VERSION = "HAS_SCHEMA_CONTAINER_VERSION";
private static final String MICROSCHEMA_CONTAINER_VERSION_CLASS = "MicroschemaContainerVersionImpl";
private static final String HAS_MICROSCHEMA_CONTAINER = "HAS_MICROSCHEMA_CONTAINER";
private static final String HAS_LIST = "HAS_LIST";
private static final String ITEM_PREFIX = "item-";
private static final Logger log = LoggerFactory.getLogger(ChangeNumberStringsToNumber.class);
private static final NumberFormat format = NumberFormat.getInstance(Locale.ENGLISH);
private final Map<String, Map<String, JsonObject>> schemaMap = new HashMap<>();
@Override
public String getName() {
return "Change Number String to Number";
}
@Override
public String getDescription() {
return "Changes the values of number fields (and number list fields) from strings to actual numbers.";
}
private Map<String, JsonObject> buildSchemaFromVertex(Vertex schemaVertex) {
return schemaMap.computeIfAbsent(schemaVertex.getProperty(UUID), uuid -> {
String json = schemaVertex.getProperty(JSON_FIELD);
if (json == null) {
return new HashMap<>();
}
JsonObject schema = new JsonObject(json);
if (!schema.containsKey(SCHEMA_FIELDS)) {
return new HashMap<>();
}
JsonArray fields = schema.getJsonArray(SCHEMA_FIELDS);
return IntStream.range(0, fields.size())
.mapToObj(fields::getJsonObject)
.filter(f -> {
String type = f.getString(FIELD_TYPE_KEY);
return NUMBER_TYPE.equals(type) || (LIST_TYPE.equals(type) && NUMBER_TYPE.equals(f.getString(FIELD_LIST_TYPE_KEY)));
})
.collect(Collectors.toMap(o -> o.getString(FIELD_NAME_KEY), Function.identity()));
});
}
private void updateProperty(String propertyName, Vertex node) {
Object obj = node.getProperty(propertyName);
if (obj == null) {
return;
}
if (!(obj instanceof String)) {
if (log.isDebugEnabled()) {
log.debug("Property '{}' for node '{}' in database is no string so we don't convert it. {}: '{}'", propertyName, node.getProperty(UUID), obj.getClass(), obj);
}
return;
}
String strVal = (String) obj;
Number numVal;
try {
numVal = format.parse(strVal);
} catch (ParseException e) {
log.warn("Could not parse the number '{}', for field '{}' in node {}", strVal, propertyName, node.getId());
numVal = 0;
}
node.removeProperty(propertyName);
node.setProperty(propertyName, numVal);
}
private void updateLists(Vertex container, Map<String, JsonObject> fieldMap) {
for (Vertex listElement: container.getVertices(Direction.OUT, HAS_LIST)) {
String fieldName = listElement.getProperty(FIELD_KEY);
if (fieldMap.containsKey(fieldName) && NUMBER_TYPE.equals(fieldMap.get(fieldName).getString(FIELD_LIST_TYPE_KEY))) {
listElement.getPropertyKeys().stream()
.filter(k -> k.startsWith(ITEM_PREFIX))
.forEach(k -> updateProperty(k, listElement));
}
}
}
private void updateFields(Vertex container, Map<String, JsonObject> fieldMap) {
fieldMap.entrySet().stream()
.map(Map.Entry::getValue)
.filter(f -> NUMBER_TYPE.equals(f.getString(FIELD_TYPE_KEY)))
.forEach(f -> updateProperty(f.getString(FIELD_NAME_KEY) + "-" + NUMBER_TYPE, container));
}
public void updateVerticesForSchema(Vertex schemaVertex, Map<String, JsonObject> fieldMap, String label) {
for (Vertex vertex : schemaVertex.getVertices(Direction.IN, label)) {
updateFields(vertex, fieldMap);
updateLists(vertex, fieldMap);
}
}
public void convertViaSchema(String schemaVersionClassName, String label) {
for (Vertex schemaVertex : getGraph().getVertices("ferma_type", schemaVersionClassName)) {
Map<String, JsonObject> fieldMap = buildSchemaFromVertex(schemaVertex);
if (!fieldMap.isEmpty()) {
updateVerticesForSchema(schemaVertex, fieldMap, label);
}
}
}
@Override
public void apply() {
log.info("Start converting numbers in nodes.");
convertViaSchema(SCHEMA_CONTAINER_VERSION_CLASS, HAS_SCHEMA_CONTAINER_VERSION);
log.info("Start converting numbers in micronodes.");
convertViaSchema(MICROSCHEMA_CONTAINER_VERSION_CLASS, HAS_MICROSCHEMA_CONTAINER);
}
@Override
public String getUuid() {
return "3F367427D10641FAB67427D10621FA90";
}
}
| Improve performance of changelog execution
| changelog-system/src/main/java/com/gentics/mesh/changelog/changes/ChangeNumberStringsToNumber.java | Improve performance of changelog execution |
|
Java | apache-2.0 | 020b49fc1d85f35117476198870e45ffe040a5a3 | 0 | rrenomeron/cas,dodok1/cas,rrenomeron/cas,frett/cas,GIP-RECIA/cas,rrenomeron/cas,robertoschwald/cas,tduehr/cas,rrenomeron/cas,tduehr/cas,doodelicious/cas,doodelicious/cas,William-Hill-Online/cas,doodelicious/cas,Unicon/cas,Unicon/cas,GIP-RECIA/cas,William-Hill-Online/cas,frett/cas,frett/cas,Unicon/cas,robertoschwald/cas,William-Hill-Online/cas,Unicon/cas,tduehr/cas,prigaux/cas,Unicon/cas,prigaux/cas,GIP-RECIA/cas,GIP-RECIA/cas,dodok1/cas,robertoschwald/cas,tduehr/cas,prigaux/cas,prigaux/cas,rrenomeron/cas,doodelicious/cas,GIP-RECIA/cas,dodok1/cas,robertoschwald/cas,frett/cas,William-Hill-Online/cas,dodok1/cas | package org.apereo.cas.web;
import org.jsqrl.model.SqrlAuthResponse;
import org.jsqrl.model.SqrlClientRequest;
import org.jsqrl.server.JSqrlServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
/**
* This is {@link SqrlAuthenticationController}.
*
* @author Misagh Moayyed
* @since 5.2.0
*/
@RestController("sqrlAuthenticationController")
public class SqrlAuthenticationController {
private static final Logger LOGGER = LoggerFactory.getLogger(SqrlAuthenticationController.class);
private final JSqrlServer server;
public SqrlAuthenticationController(final JSqrlServer server) {
this.server = server;
}
/**
* Sqrl response entity.
*
* @param request the request
* @param nut the nut
* @param httpRequest the http request
* @return the response entity
*/
@PostMapping(path = "/sqrl/authn")
public ResponseEntity<String> sqrl(@ModelAttribute final SqrlClientRequest request,
@RequestParam("nut") final String nut,
final HttpServletRequest httpRequest) {
final String remoteAddr = httpRequest.getRemoteAddr();
LOGGER.info("SQRL authentication response command [{}] w/ client: [{}] and Parameters [{}]. "
+ "Decoded client data [{}] w/ server [{}]'s decoded data [{}]. "
+ "Request version [{}] with ids [{}] and urs [{}]. Remote address is [{}]",
request.getCommand(), request.getClient(), request.getClientParameters(),
request.getDecodedClientData(), request.getServer(), request.getDecodedServerData(),
request.getRequestVersion(), request.getIds(), request.getUrs(), remoteAddr);
try {
LOGGER.info("Handling SQRL authentication client request for nut [{}]", nut);
final SqrlAuthResponse sqrlAuthResponse = server.handleClientRequest(request, nut, remoteAddr);
LOGGER.info("SQRL authentication response created for nut [{}]. Preparing response...", nut);
final String s = sqrlAuthResponse.toEncodedString();
LOGGER.info("Returning encoded response [{}] with status [{}]", s, HttpStatus.OK);
return new ResponseEntity(s, HttpStatus.OK);
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
LOGGER.info("Could not process SQL authentication request. Returning status [{}]", HttpStatus.INTERNAL_SERVER_ERROR);
return new ResponseEntity(HttpStatus.INTERNAL_SERVER_ERROR);
}
/**
* Check authentication response entity.
*
* @param nut the nut
* @param httpRequest the http request
* @return the response entity
*/
@GetMapping(path = "/sqrl/authcheck")
public ResponseEntity checkAuthentication(@RequestParam("nut") final String nut,
final HttpServletRequest httpRequest) {
final String remoteAddr = httpRequest.getRemoteAddr();
LOGGER.info("Checking for SQRL authentication success against nut [{}] for client [{}]", nut, remoteAddr);
if (server.checkAuthenticationStatus(nut, remoteAddr)) {
LOGGER.info("SQRL authentication request [{}] is authenticated. Returning status [{}]", remoteAddr, HttpStatus.RESET_CONTENT);
return new ResponseEntity(HttpStatus.RESET_CONTENT);
}
LOGGER.info("SQRL request is not authenticated yet");
return new ResponseEntity(HttpStatus.OK);
}
}
| support/cas-server-support-sqrl/src/main/java/org/apereo/cas/web/SqrlAuthenticationController.java | package org.apereo.cas.web;
import org.jsqrl.model.SqrlAuthResponse;
import org.jsqrl.model.SqrlClientRequest;
import org.jsqrl.server.JSqrlServer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
/**
* This is {@link SqrlAuthenticationController}.
*
* @author Misagh Moayyed
* @since 5.2.0
*/
@RestController("sqrlAuthenticationController")
public class SqrlAuthenticationController {
private static final Logger LOGGER = LoggerFactory.getLogger(SqrlAuthenticationController.class);
private final JSqrlServer server;
public SqrlAuthenticationController(final JSqrlServer server) {
this.server = server;
}
/**
* Sqrl response entity.
*
* @param request the request
* @param nut the nut
* @param httpRequest the http request
* @return the response entity
*/
@PostMapping(path = "/sqrl/authn")
public ResponseEntity<String> sqrl(@ModelAttribute final SqrlClientRequest request,
@RequestParam("nut") final String nut,
final HttpServletRequest httpRequest) {
final String remoteAddr = httpRequest.getRemoteAddr();
LOGGER.info("SQRL authentication response command [{}] w/ client: [{}] and Parameters [{}]. "
+ "Decoded client data [{}] w/ server [{}]'s decoded data [{}]. "
+ "Request version [{}] with ids [{}] and urs [{}]. Remote address is [{}]",
request.getCommand(), request.getClient(), request.getClientParameters(),
request.getDecodedClientData(), request.getServer(), request.getDecodedServerData(),
request.getRequestVersion(), request.getIds(), request.getUrs(), remoteAddr);
try {
LOGGER.info("Handling SQRL authentication client request for nut [{}]", nut);
final SqrlAuthResponse sqrlAuthResponse = server.handleClientRequest(request, nut, remoteAddr);
LOGGER.info("SQRL authentication response [{}] for nut [{}]", sqrlAuthResponse, nut);
final String s = sqrlAuthResponse.toEncodedString();
LOGGER.info("Returning encoded response [{}] with status [{}]", s, HttpStatus.OK);
return new ResponseEntity(s, HttpStatus.OK);
} catch (final Exception e) {
LOGGER.error(e.getMessage(), e);
}
return new ResponseEntity(HttpStatus.INTERNAL_SERVER_ERROR);
}
/**
* Check authentication response entity.
*
* @param nut the nut
* @param httpRequest the http request
* @return the response entity
*/
@GetMapping(path = "/sqrl/authcheck")
public ResponseEntity checkAuthentication(@RequestParam("nut") final String nut,
final HttpServletRequest httpRequest) {
final String remoteAddr = httpRequest.getRemoteAddr();
LOGGER.debug("Checking for SQRL authentication success against nut [{}] for client [{}]", nut, remoteAddr);
if (server.checkAuthenticationStatus(nut, remoteAddr)) {
LOGGER.info("SQRL authentication request [{}] is authenticated. Returning status [{}]", remoteAddr, HttpStatus.RESET_CONTENT);
return new ResponseEntity(HttpStatus.RESET_CONTENT);
}
LOGGER.debug("SQRL request is not authenticated yet");
return new ResponseEntity(HttpStatus.OK);
}
}
| [skip tests]
| support/cas-server-support-sqrl/src/main/java/org/apereo/cas/web/SqrlAuthenticationController.java | [skip tests] |
|
Java | apache-2.0 | 6efb8c9c6018ec688eeba8c61c220009ec350a44 | 0 | bitmybytes/hadoop,littlezhou/hadoop,steveloughran/hadoop,nandakumar131/hadoop,mapr/hadoop-common,littlezhou/hadoop,ucare-uchicago/hadoop,wenxinhe/hadoop,szegedim/hadoop,apurtell/hadoop,nandakumar131/hadoop,xiao-chen/hadoop,bitmybytes/hadoop,Ethanlm/hadoop,GeLiXin/hadoop,plusplusjiajia/hadoop,littlezhou/hadoop,plusplusjiajia/hadoop,apache/hadoop,lukmajercak/hadoop,ronny-macmaster/hadoop,Ethanlm/hadoop,soumabrata-chakraborty/hadoop,plusplusjiajia/hadoop,szegedim/hadoop,bitmybytes/hadoop,wwjiang007/hadoop,979969786/hadoop,979969786/hadoop,dierobotsdie/hadoop,legend-hua/hadoop,JingchengDu/hadoop,979969786/hadoop,steveloughran/hadoop,ronny-macmaster/hadoop,ChetnaChaudhari/hadoop,plusplusjiajia/hadoop,steveloughran/hadoop,ronny-macmaster/hadoop,ucare-uchicago/hadoop,ChetnaChaudhari/hadoop,JingchengDu/hadoop,Ethanlm/hadoop,szegedim/hadoop,WIgor/hadoop,ctrezzo/hadoop,plusplusjiajia/hadoop,huafengw/hadoop,apache/hadoop,lukmajercak/hadoop,xiao-chen/hadoop,WIgor/hadoop,nandakumar131/hadoop,JingchengDu/hadoop,wenxinhe/hadoop,mapr/hadoop-common,ronny-macmaster/hadoop,nandakumar131/hadoop,szegedim/hadoop,ronny-macmaster/hadoop,ucare-uchicago/hadoop,JingchengDu/hadoop,littlezhou/hadoop,ucare-uchicago/hadoop,apache/hadoop,ChetnaChaudhari/hadoop,apache/hadoop,legend-hua/hadoop,WIgor/hadoop,littlezhou/hadoop,apurtell/hadoop,lukmajercak/hadoop,xiao-chen/hadoop,dennishuo/hadoop,JingchengDu/hadoop,apurtell/hadoop,legend-hua/hadoop,Ethanlm/hadoop,wwjiang007/hadoop,ronny-macmaster/hadoop,szegedim/hadoop,lukmajercak/hadoop,dennishuo/hadoop,littlezhou/hadoop,WIgor/hadoop,dennishuo/hadoop,dierobotsdie/hadoop,ronny-macmaster/hadoop,xiao-chen/hadoop,wenxinhe/hadoop,mapr/hadoop-common,huafengw/hadoop,lukmajercak/hadoop,Ethanlm/hadoop,steveloughran/hadoop,979969786/hadoop,soumabrata-chakraborty/hadoop,nandakumar131/hadoop,wwjiang007/hadoop,GeLiXin/hadoop,GeLiXin/hadoop,wwjiang007/hadoop,JingchengDu/hadoop,lukmajercak/hadoop,mapr/hadoop-common,huafengw/hadoop,dennishuo/hadoop,steveloughran/hadoop,nandakumar131/hadoop,dierobotsdie/hadoop,apache/hadoop,apurtell/hadoop,979969786/hadoop,legend-hua/hadoop,szegedim/hadoop,lukmajercak/hadoop,steveloughran/hadoop,ctrezzo/hadoop,ChetnaChaudhari/hadoop,soumabrata-chakraborty/hadoop,GeLiXin/hadoop,979969786/hadoop,ChetnaChaudhari/hadoop,legend-hua/hadoop,wwjiang007/hadoop,dierobotsdie/hadoop,soumabrata-chakraborty/hadoop,GeLiXin/hadoop,ucare-uchicago/hadoop,GeLiXin/hadoop,szegedim/hadoop,JingchengDu/hadoop,legend-hua/hadoop,huafengw/hadoop,apurtell/hadoop,apurtell/hadoop,plusplusjiajia/hadoop,steveloughran/hadoop,wenxinhe/hadoop,apache/hadoop,ChetnaChaudhari/hadoop,Ethanlm/hadoop,apache/hadoop,wenxinhe/hadoop,xiao-chen/hadoop,GeLiXin/hadoop,wwjiang007/hadoop,dennishuo/hadoop,WIgor/hadoop,ctrezzo/hadoop,ctrezzo/hadoop,mapr/hadoop-common,apurtell/hadoop,wenxinhe/hadoop,huafengw/hadoop,wwjiang007/hadoop,ctrezzo/hadoop,nandakumar131/hadoop,soumabrata-chakraborty/hadoop,ChetnaChaudhari/hadoop,Ethanlm/hadoop,979969786/hadoop,WIgor/hadoop,mapr/hadoop-common,xiao-chen/hadoop,ucare-uchicago/hadoop,soumabrata-chakraborty/hadoop,dierobotsdie/hadoop,xiao-chen/hadoop,dennishuo/hadoop,plusplusjiajia/hadoop,dennishuo/hadoop,bitmybytes/hadoop,bitmybytes/hadoop,soumabrata-chakraborty/hadoop,dierobotsdie/hadoop,ucare-uchicago/hadoop,WIgor/hadoop,ctrezzo/hadoop,legend-hua/hadoop,bitmybytes/hadoop,mapr/hadoop-common,dierobotsdie/hadoop,huafengw/hadoop,ctrezzo/hadoop,bitmybytes/hadoop,wenxinhe/hadoop,littlezhou/hadoop,huafengw/hadoop | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.balancer;
import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY;
import static org.apache.hadoop.fs.StorageType.DEFAULT;
import static org.apache.hadoop.fs.StorageType.RAM_DISK;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_DATA_TRANSFER_PROTECTION_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BALANCER_ADDRESS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BALANCER_KERBEROS_PRINCIPAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BALANCER_KEYTAB_ENABLED_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BALANCER_KEYTAB_FILE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_BLOCK_PINNING_ENABLED;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_LAZY_WRITER_INTERVAL_SEC;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HTTP_POLICY_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import org.junit.AfterClass;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URI;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.TimeoutException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.NameNodeProxies;
import org.apache.hadoop.hdfs.StripedFileTestUtil;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.balancer.Balancer.Cli;
import org.apache.hadoop.hdfs.server.balancer.Balancer.Result;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicyWithUpgradeDomain;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementStatus;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.LazyPersistTestCase;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Tool;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Test;
/**
* This class tests if a balancer schedules tasks correctly.
*/
public class TestBalancer {
private static final Log LOG = LogFactory.getLog(TestBalancer.class);
static {
GenericTestUtils.setLogLevel(Balancer.LOG, Level.ALL);
}
final static long CAPACITY = 5000L;
final static String RACK0 = "/rack0";
final static String RACK1 = "/rack1";
final static String RACK2 = "/rack2";
final private static String fileName = "/tmp.txt";
final static Path filePath = new Path(fileName);
final static private String username = "balancer";
private static String principal;
private static File baseDir;
private static String keystoresDir;
private static String sslConfDir;
private static MiniKdc kdc;
private static File keytabFile;
private MiniDFSCluster cluster;
@After
public void shutdown() throws Exception {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
ClientProtocol client;
static final long TIMEOUT = 40000L; //msec
static final double CAPACITY_ALLOWED_VARIANCE = 0.005; // 0.5%
static final double BALANCE_ALLOWED_VARIANCE = 0.11; // 10%+delta
static final int DEFAULT_BLOCK_SIZE = 100;
static final int DEFAULT_RAM_DISK_BLOCK_SIZE = 5 * 1024 * 1024;
private static final Random r = new Random();
static {
initTestSetup();
}
public static void initTestSetup() {
// do not create id file since it occupies the disk space
NameNodeConnector.setWrite2IdFile(false);
}
static void initConf(Configuration conf) {
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, DEFAULT_BLOCK_SIZE);
conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, DEFAULT_BLOCK_SIZE);
conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 500);
conf.setLong(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 1L);
SimulatedFSDataset.setFactory(conf);
conf.setLong(DFSConfigKeys.DFS_BALANCER_MOVEDWINWIDTH_KEY, 2000L);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
}
static void initConfWithRamDisk(Configuration conf,
long ramDiskCapacity) {
conf.setLong(DFS_BLOCK_SIZE_KEY, DEFAULT_RAM_DISK_BLOCK_SIZE);
conf.setLong(DFS_DATANODE_MAX_LOCKED_MEMORY_KEY, ramDiskCapacity);
conf.setInt(DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC, 3);
conf.setLong(DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 500);
conf.setInt(DFS_DATANODE_LAZY_WRITER_INTERVAL_SEC, 1);
LazyPersistTestCase.initCacheManipulator();
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
}
int dataBlocks = StripedFileTestUtil.NUM_DATA_BLOCKS;
int parityBlocks = StripedFileTestUtil.NUM_PARITY_BLOCKS;
int groupSize = dataBlocks + parityBlocks;
private final static int cellSize = StripedFileTestUtil.BLOCK_STRIPED_CELL_SIZE;
private final static int stripesPerBlock = 4;
static int DEFAULT_STRIPE_BLOCK_SIZE = cellSize * stripesPerBlock;
static void initConfWithStripe(Configuration conf) {
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, DEFAULT_STRIPE_BLOCK_SIZE);
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_REPLICATION_CONSIDERLOAD_KEY, false);
conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
SimulatedFSDataset.setFactory(conf);
conf.setLong(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 1L);
conf.setLong(DFSConfigKeys.DFS_BALANCER_MOVEDWINWIDTH_KEY, 2000L);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
}
static void initSecureConf(Configuration conf) throws Exception {
baseDir = GenericTestUtils.getTestDir(TestBalancer.class.getSimpleName());
FileUtil.fullyDelete(baseDir);
assertTrue(baseDir.mkdirs());
Properties kdcConf = MiniKdc.createConf();
kdc = new MiniKdc(kdcConf, baseDir);
kdc.start();
SecurityUtil.setAuthenticationMethod(
UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
UserGroupInformation.setConfiguration(conf);
KerberosName.resetDefaultRealm();
assertTrue("Expected configuration to enable security",
UserGroupInformation.isSecurityEnabled());
keytabFile = new File(baseDir, username + ".keytab");
String keytab = keytabFile.getAbsolutePath();
// Windows will not reverse name lookup "127.0.0.1" to "localhost".
String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
principal = username + "/" + krbInstance + "@" + kdc.getRealm();
String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();
kdc.createPrincipal(keytabFile, username, username + "/" + krbInstance,
"HTTP/" + krbInstance);
conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, principal);
conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, principal);
conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
conf.setBoolean(DFS_BALANCER_KEYTAB_ENABLED_KEY, true);
conf.set(DFS_BALANCER_ADDRESS_KEY, "localhost:0");
conf.set(DFS_BALANCER_KEYTAB_FILE_KEY, keytab);
conf.set(DFS_BALANCER_KERBEROS_PRINCIPAL_KEY, principal);
keystoresDir = baseDir.getAbsolutePath();
sslConfDir = KeyStoreTestUtil.getClasspathDir(TestBalancer.class);
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
conf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
KeyStoreTestUtil.getClientSSLConfigFileName());
conf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
KeyStoreTestUtil.getServerSSLConfigFileName());
initConf(conf);
}
@AfterClass
public static void destroy() throws Exception {
if (kdc != null) {
kdc.stop();
FileUtil.fullyDelete(baseDir);
KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
}
}
/* create a file with a length of <code>fileLen</code> */
public static void createFile(MiniDFSCluster cluster, Path filePath, long
fileLen,
short replicationFactor, int nnIndex)
throws IOException, InterruptedException, TimeoutException {
FileSystem fs = cluster.getFileSystem(nnIndex);
DFSTestUtil.createFile(fs, filePath, fileLen,
replicationFactor, r.nextLong());
DFSTestUtil.waitReplication(fs, filePath, replicationFactor);
}
/* fill up a cluster with <code>numNodes</code> datanodes
* whose used space to be <code>size</code>
*/
private ExtendedBlock[] generateBlocks(Configuration conf, long size,
short numNodes) throws IOException, InterruptedException, TimeoutException {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numNodes).build();
try {
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
short replicationFactor = (short)(numNodes-1);
long fileLen = size/replicationFactor;
createFile(cluster , filePath, fileLen, replicationFactor, 0);
List<LocatedBlock> locatedBlocks = client.
getBlockLocations(fileName, 0, fileLen).getLocatedBlocks();
int numOfBlocks = locatedBlocks.size();
ExtendedBlock[] blocks = new ExtendedBlock[numOfBlocks];
for(int i=0; i<numOfBlocks; i++) {
ExtendedBlock b = locatedBlocks.get(i).getBlock();
blocks[i] = new ExtendedBlock(b.getBlockPoolId(), b.getBlockId(), b
.getNumBytes(), b.getGenerationStamp());
}
return blocks;
} finally {
cluster.shutdown();
}
}
/* Distribute all blocks according to the given distribution */
static Block[][] distributeBlocks(ExtendedBlock[] blocks,
short replicationFactor, final long[] distribution) {
// make a copy
long[] usedSpace = new long[distribution.length];
System.arraycopy(distribution, 0, usedSpace, 0, distribution.length);
List<List<Block>> blockReports =
new ArrayList<List<Block>>(usedSpace.length);
Block[][] results = new Block[usedSpace.length][];
for(int i=0; i<usedSpace.length; i++) {
blockReports.add(new ArrayList<Block>());
}
for(int i=0; i<blocks.length; i++) {
for(int j=0; j<replicationFactor; j++) {
boolean notChosen = true;
while(notChosen) {
int chosenIndex = r.nextInt(usedSpace.length);
if( usedSpace[chosenIndex]>0 ) {
notChosen = false;
blockReports.get(chosenIndex).add(blocks[i].getLocalBlock());
usedSpace[chosenIndex] -= blocks[i].getNumBytes();
}
}
}
}
for(int i=0; i<usedSpace.length; i++) {
List<Block> nodeBlockList = blockReports.get(i);
results[i] = nodeBlockList.toArray(new Block[nodeBlockList.size()]);
}
return results;
}
static long sum(long[] x) {
long s = 0L;
for(long a : x) {
s += a;
}
return s;
}
/* we first start a cluster and fill the cluster up to a certain size.
* then redistribute blocks according the required distribution.
* Afterwards a balancer is running to balance the cluster.
*/
private void testUnevenDistribution(Configuration conf,
long distribution[], long capacities[], String[] racks) throws Exception {
int numDatanodes = distribution.length;
if (capacities.length != numDatanodes || racks.length != numDatanodes) {
throw new IllegalArgumentException("Array length is not the same");
}
// calculate total space that need to be filled
final long totalUsedSpace = sum(distribution);
// fill the cluster
ExtendedBlock[] blocks = generateBlocks(conf, totalUsedSpace,
(short) numDatanodes);
// redistribute blocks
Block[][] blocksDN = distributeBlocks(
blocks, (short)(numDatanodes-1), distribution);
// restart the cluster: do NOT format the cluster
conf.set(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, "0.0f");
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDatanodes)
.format(false)
.racks(racks)
.simulatedCapacities(capacities)
.build();
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
for(int i = 0; i < blocksDN.length; i++)
cluster.injectBlocks(i, Arrays.asList(blocksDN[i]), null);
final long totalCapacity = sum(capacities);
runBalancer(conf, totalUsedSpace, totalCapacity);
cluster.shutdown();
}
/**
* Wait until heartbeat gives expected results, within CAPACITY_ALLOWED_VARIANCE,
* summed over all nodes. Times out after TIMEOUT msec.
* @param expectedUsedSpace
* @param expectedTotalSpace
* @throws IOException - if getStats() fails
* @throws TimeoutException
*/
static void waitForHeartBeat(long expectedUsedSpace,
long expectedTotalSpace, ClientProtocol client, MiniDFSCluster cluster)
throws IOException, TimeoutException {
long timeout = TIMEOUT;
long failtime = (timeout <= 0L) ? Long.MAX_VALUE
: Time.monotonicNow() + timeout;
while (true) {
long[] status = client.getStats();
double totalSpaceVariance = Math.abs((double)status[0] - expectedTotalSpace)
/ expectedTotalSpace;
double usedSpaceVariance = Math.abs((double)status[1] - expectedUsedSpace)
/ expectedUsedSpace;
if (totalSpaceVariance < CAPACITY_ALLOWED_VARIANCE
&& usedSpaceVariance < CAPACITY_ALLOWED_VARIANCE)
break; //done
if (Time.monotonicNow() > failtime) {
throw new TimeoutException("Cluster failed to reached expected values of "
+ "totalSpace (current: " + status[0]
+ ", expected: " + expectedTotalSpace
+ "), or usedSpace (current: " + status[1]
+ ", expected: " + expectedUsedSpace
+ "), in more than " + timeout + " msec.");
}
try {
Thread.sleep(100L);
} catch(InterruptedException ignored) {
}
}
}
/**
* Wait until balanced: each datanode gives utilization within
* BALANCE_ALLOWED_VARIANCE of average
* @throws IOException
* @throws TimeoutException
*/
static void waitForBalancer(long totalUsedSpace, long totalCapacity,
ClientProtocol client, MiniDFSCluster cluster, BalancerParameters p)
throws IOException, TimeoutException {
waitForBalancer(totalUsedSpace, totalCapacity, client, cluster, p, 0);
}
/**
* Make sure that balancer can't move pinned blocks.
* If specified favoredNodes when create file, blocks will be pinned use
* sticky bit.
* @throws Exception
*/
@Test(timeout=100000)
public void testBalancerWithPinnedBlocks() throws Exception {
// This test assumes stick-bit based block pin mechanism available only
// in Linux/Unix. It can be unblocked on Windows when HDFS-7759 is ready to
// provide a different mechanism for Windows.
assumeNotWindows();
final Configuration conf = new HdfsConfiguration();
initConf(conf);
conf.setBoolean(DFS_DATANODE_BLOCK_PINNING_ENABLED, true);
long[] capacities = new long[] { CAPACITY, CAPACITY };
String[] hosts = {"host0", "host1"};
String[] racks = { RACK0, RACK1 };
int numOfDatanodes = capacities.length;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(capacities.length)
.hosts(hosts).racks(racks).simulatedCapacities(capacities).build();
cluster.waitActive();
client = NameNodeProxies.createProxy(conf,
cluster.getFileSystem(0).getUri(), ClientProtocol.class).getProxy();
// fill up the cluster to be 80% full
long totalCapacity = sum(capacities);
long totalUsedSpace = totalCapacity * 8 / 10;
InetSocketAddress[] favoredNodes = new InetSocketAddress[numOfDatanodes];
for (int i = 0; i < favoredNodes.length; i++) {
// DFSClient will attempt reverse lookup. In case it resolves
// "127.0.0.1" to "localhost", we manually specify the hostname.
int port = cluster.getDataNodes().get(i).getXferAddress().getPort();
favoredNodes[i] = new InetSocketAddress(hosts[i], port);
}
DFSTestUtil.createFile(cluster.getFileSystem(0), filePath, false, 1024,
totalUsedSpace / numOfDatanodes, DEFAULT_BLOCK_SIZE,
(short) numOfDatanodes, 0, false, favoredNodes);
// start up an empty node with the same capacity
cluster.startDataNodes(conf, 1, true, null, new String[] { RACK2 },
new long[] { CAPACITY });
totalCapacity += CAPACITY;
// run balancer and validate results
waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
// start rebalancing
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
int r = Balancer.run(namenodes, BalancerParameters.DEFAULT, conf);
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
}
/**
* Verify balancer won't violate the default block placement policy.
* @throws Exception
*/
@Test(timeout=100000)
public void testRackPolicyAfterBalance() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
long[] capacities = new long[] { CAPACITY, CAPACITY };
String[] hosts = {"host0", "host1"};
String[] racks = { RACK0, RACK1 };
runBalancerAndVerifyBlockPlacmentPolicy(conf, capacities, hosts, racks,
null, CAPACITY, "host2", RACK1, null);
}
/**
* Verify balancer won't violate upgrade domain block placement policy.
* @throws Exception
*/
@Test(timeout=100000)
public void testUpgradeDomainPolicyAfterBalance() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
conf.setClass(DFSConfigKeys.DFS_BLOCK_REPLICATOR_CLASSNAME_KEY,
BlockPlacementPolicyWithUpgradeDomain.class,
BlockPlacementPolicy.class);
long[] capacities = new long[] { CAPACITY, CAPACITY, CAPACITY };
String[] hosts = {"host0", "host1", "host2"};
String[] racks = { RACK0, RACK1, RACK1 };
String[] UDs = { "ud0", "ud1", "ud2" };
runBalancerAndVerifyBlockPlacmentPolicy(conf, capacities, hosts, racks,
UDs, CAPACITY, "host3", RACK2, "ud2");
}
private void runBalancerAndVerifyBlockPlacmentPolicy(Configuration conf,
long[] capacities, String[] hosts, String[] racks, String[] UDs,
long newCapacity, String newHost, String newRack, String newUD)
throws Exception {
int numOfDatanodes = capacities.length;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(capacities.length)
.hosts(hosts).racks(racks).simulatedCapacities(capacities).build();
DatanodeManager dm = cluster.getNamesystem().getBlockManager().
getDatanodeManager();
if (UDs != null) {
for(int i = 0; i < UDs.length; i++) {
DatanodeID datanodeId = cluster.getDataNodes().get(i).getDatanodeId();
dm.getDatanode(datanodeId).setUpgradeDomain(UDs[i]);
}
}
try {
cluster.waitActive();
client = NameNodeProxies.createProxy(conf,
cluster.getFileSystem(0).getUri(), ClientProtocol.class).getProxy();
// fill up the cluster to be 80% full
long totalCapacity = sum(capacities);
long totalUsedSpace = totalCapacity * 8 / 10;
final long fileSize = totalUsedSpace / numOfDatanodes;
DFSTestUtil.createFile(cluster.getFileSystem(0), filePath, false, 1024,
fileSize, DEFAULT_BLOCK_SIZE, (short) numOfDatanodes, 0, false);
// start up an empty node with the same capacity on the same rack as the
// pinned host.
cluster.startDataNodes(conf, 1, true, null, new String[] { newRack },
new String[] { newHost }, new long[] { newCapacity });
if (newUD != null) {
DatanodeID newId = cluster.getDataNodes().get(
numOfDatanodes).getDatanodeId();
dm.getDatanode(newId).setUpgradeDomain(newUD);
}
totalCapacity += newCapacity;
// run balancer and validate results
waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
// start rebalancing
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
Balancer.run(namenodes, BalancerParameters.DEFAULT, conf);
BlockPlacementPolicy placementPolicy =
cluster.getNamesystem().getBlockManager().getBlockPlacementPolicy();
List<LocatedBlock> locatedBlocks = client.
getBlockLocations(fileName, 0, fileSize).getLocatedBlocks();
for (LocatedBlock locatedBlock : locatedBlocks) {
BlockPlacementStatus status = placementPolicy.verifyBlockPlacement(
locatedBlock.getLocations(), numOfDatanodes);
assertTrue(status.isPlacementPolicySatisfied());
}
} finally {
cluster.shutdown();
}
}
/**
* Wait until balanced: each datanode gives utilization within
* BALANCE_ALLOWED_VARIANCE of average
* @throws IOException
* @throws TimeoutException
*/
static void waitForBalancer(long totalUsedSpace, long totalCapacity,
ClientProtocol client, MiniDFSCluster cluster, BalancerParameters p,
int expectedExcludedNodes) throws IOException, TimeoutException {
long timeout = TIMEOUT;
long failtime = (timeout <= 0L) ? Long.MAX_VALUE
: Time.monotonicNow() + timeout;
if (!p.getIncludedNodes().isEmpty()) {
totalCapacity = p.getIncludedNodes().size() * CAPACITY;
}
if (!p.getExcludedNodes().isEmpty()) {
totalCapacity -= p.getExcludedNodes().size() * CAPACITY;
}
final double avgUtilization = ((double)totalUsedSpace) / totalCapacity;
boolean balanced;
do {
DatanodeInfo[] datanodeReport =
client.getDatanodeReport(DatanodeReportType.ALL);
assertEquals(datanodeReport.length, cluster.getDataNodes().size());
balanced = true;
int actualExcludedNodeCount = 0;
for (DatanodeInfo datanode : datanodeReport) {
double nodeUtilization = ((double)datanode.getDfsUsed())
/ datanode.getCapacity();
if (Dispatcher.Util.isExcluded(p.getExcludedNodes(), datanode)) {
assertTrue(nodeUtilization == 0);
actualExcludedNodeCount++;
continue;
}
if (!Dispatcher.Util.isIncluded(p.getIncludedNodes(), datanode)) {
assertTrue(nodeUtilization == 0);
actualExcludedNodeCount++;
continue;
}
if (Math.abs(avgUtilization - nodeUtilization) > BALANCE_ALLOWED_VARIANCE) {
balanced = false;
if (Time.monotonicNow() > failtime) {
throw new TimeoutException(
"Rebalancing expected avg utilization to become "
+ avgUtilization + ", but on datanode " + datanode
+ " it remains at " + nodeUtilization
+ " after more than " + TIMEOUT + " msec.");
}
try {
Thread.sleep(100);
} catch (InterruptedException ignored) {
}
break;
}
}
assertEquals(expectedExcludedNodes,actualExcludedNodeCount);
} while (!balanced);
}
String long2String(long[] array) {
if (array.length == 0) {
return "<empty>";
}
StringBuilder b = new StringBuilder("[").append(array[0]);
for(int i = 1; i < array.length; i++) {
b.append(", ").append(array[i]);
}
return b.append("]").toString();
}
/**
* Class which contains information about the
* new nodes to be added to the cluster for balancing.
*/
static abstract class NewNodeInfo {
Set<String> nodesToBeExcluded = new HashSet<String>();
Set<String> nodesToBeIncluded = new HashSet<String>();
abstract String[] getNames();
abstract int getNumberofNewNodes();
abstract int getNumberofIncludeNodes();
abstract int getNumberofExcludeNodes();
public Set<String> getNodesToBeIncluded() {
return nodesToBeIncluded;
}
public Set<String> getNodesToBeExcluded() {
return nodesToBeExcluded;
}
}
/**
* The host names of new nodes are specified
*/
static class HostNameBasedNodes extends NewNodeInfo {
String[] hostnames;
public HostNameBasedNodes(String[] hostnames,
Set<String> nodesToBeExcluded, Set<String> nodesToBeIncluded) {
this.hostnames = hostnames;
this.nodesToBeExcluded = nodesToBeExcluded;
this.nodesToBeIncluded = nodesToBeIncluded;
}
@Override
String[] getNames() {
return hostnames;
}
@Override
int getNumberofNewNodes() {
return hostnames.length;
}
@Override
int getNumberofIncludeNodes() {
return nodesToBeIncluded.size();
}
@Override
int getNumberofExcludeNodes() {
return nodesToBeExcluded.size();
}
}
/**
* The number of data nodes to be started are specified.
* The data nodes will have same host name, but different port numbers.
*
*/
static class PortNumberBasedNodes extends NewNodeInfo {
int newNodes;
int excludeNodes;
int includeNodes;
public PortNumberBasedNodes(int newNodes, int excludeNodes, int includeNodes) {
this.newNodes = newNodes;
this.excludeNodes = excludeNodes;
this.includeNodes = includeNodes;
}
@Override
String[] getNames() {
return null;
}
@Override
int getNumberofNewNodes() {
return newNodes;
}
@Override
int getNumberofIncludeNodes() {
return includeNodes;
}
@Override
int getNumberofExcludeNodes() {
return excludeNodes;
}
}
private void doTest(Configuration conf, long[] capacities, String[] racks,
long newCapacity, String newRack, boolean useTool) throws Exception {
doTest(conf, capacities, racks, newCapacity, newRack, null, useTool, false);
}
/** This test start a cluster with specified number of nodes,
* and fills it to be 30% full (with a single file replicated identically
* to all datanodes);
* It then adds one new empty node and starts balancing.
*
* @param conf - configuration
* @param capacities - array of capacities of original nodes in cluster
* @param racks - array of racks for original nodes in cluster
* @param newCapacity - new node's capacity
* @param newRack - new node's rack
* @param nodes - information about new nodes to be started.
* @param useTool - if true run test via Cli with command-line argument
* parsing, etc. Otherwise invoke balancer API directly.
* @param useFile - if true, the hosts to included or excluded will be stored in a
* file and then later read from the file.
* @throws Exception
*/
private void doTest(Configuration conf, long[] capacities,
String[] racks, long newCapacity, String newRack, NewNodeInfo nodes,
boolean useTool, boolean useFile) throws Exception {
LOG.info("capacities = " + long2String(capacities));
LOG.info("racks = " + Arrays.asList(racks));
LOG.info("newCapacity= " + newCapacity);
LOG.info("newRack = " + newRack);
LOG.info("useTool = " + useTool);
assertEquals(capacities.length, racks.length);
int numOfDatanodes = capacities.length;
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(capacities.length)
.racks(racks)
.simulatedCapacities(capacities)
.build();
try {
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
long totalCapacity = sum(capacities);
// fill up the cluster to be 30% full
long totalUsedSpace = totalCapacity*3/10;
createFile(cluster, filePath, totalUsedSpace / numOfDatanodes,
(short) numOfDatanodes, 0);
if (nodes == null) { // there is no specification of new nodes.
// start up an empty node with the same capacity and on the same rack
cluster.startDataNodes(conf, 1, true, null,
new String[]{newRack}, null,new long[]{newCapacity});
totalCapacity += newCapacity;
} else {
//if running a test with "include list", include original nodes as well
if (nodes.getNumberofIncludeNodes()>0) {
for (DataNode dn: cluster.getDataNodes())
nodes.getNodesToBeIncluded().add(dn.getDatanodeId().getHostName());
}
String[] newRacks = new String[nodes.getNumberofNewNodes()];
long[] newCapacities = new long[nodes.getNumberofNewNodes()];
for (int i=0; i < nodes.getNumberofNewNodes(); i++) {
newRacks[i] = newRack;
newCapacities[i] = newCapacity;
}
// if host names are specified for the new nodes to be created.
if (nodes.getNames() != null) {
cluster.startDataNodes(conf, nodes.getNumberofNewNodes(), true, null,
newRacks, nodes.getNames(), newCapacities);
totalCapacity += newCapacity*nodes.getNumberofNewNodes();
} else { // host names are not specified
cluster.startDataNodes(conf, nodes.getNumberofNewNodes(), true, null,
newRacks, null, newCapacities);
totalCapacity += newCapacity*nodes.getNumberofNewNodes();
//populate the include nodes
if (nodes.getNumberofIncludeNodes() > 0) {
int totalNodes = cluster.getDataNodes().size();
for (int i=0; i < nodes.getNumberofIncludeNodes(); i++) {
nodes.getNodesToBeIncluded().add (cluster.getDataNodes().get(
totalNodes-1-i).getDatanodeId().getXferAddr());
}
}
//polulate the exclude nodes
if (nodes.getNumberofExcludeNodes() > 0) {
int totalNodes = cluster.getDataNodes().size();
for (int i=0; i < nodes.getNumberofExcludeNodes(); i++) {
nodes.getNodesToBeExcluded().add (cluster.getDataNodes().get(
totalNodes-1-i).getDatanodeId().getXferAddr());
}
}
}
}
// run balancer and validate results
BalancerParameters.Builder pBuilder =
new BalancerParameters.Builder();
if (nodes != null) {
pBuilder.setExcludedNodes(nodes.getNodesToBeExcluded());
pBuilder.setIncludedNodes(nodes.getNodesToBeIncluded());
pBuilder.setRunDuringUpgrade(false);
}
BalancerParameters p = pBuilder.build();
int expectedExcludedNodes = 0;
if (nodes != null) {
if (!nodes.getNodesToBeExcluded().isEmpty()) {
expectedExcludedNodes = nodes.getNodesToBeExcluded().size();
} else if (!nodes.getNodesToBeIncluded().isEmpty()) {
expectedExcludedNodes =
cluster.getDataNodes().size() - nodes.getNodesToBeIncluded().size();
}
}
// run balancer and validate results
if (useTool) {
runBalancerCli(conf, totalUsedSpace, totalCapacity, p, useFile, expectedExcludedNodes);
} else {
runBalancer(conf, totalUsedSpace, totalCapacity, p, expectedExcludedNodes);
}
} finally {
cluster.shutdown();
}
}
private void runBalancer(Configuration conf, long totalUsedSpace,
long totalCapacity) throws Exception {
runBalancer(conf, totalUsedSpace, totalCapacity,
BalancerParameters.DEFAULT, 0);
}
private void runBalancer(Configuration conf, long totalUsedSpace,
long totalCapacity, BalancerParameters p, int excludedNodes)
throws Exception {
waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
// start rebalancing
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
final int r = runBalancer(namenodes, p, conf);
if (conf.getInt(DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY,
DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_DEFAULT) ==0) {
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
return;
} else {
assertEquals(ExitStatus.SUCCESS.getExitCode(), r);
}
waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
LOG.info(" .");
waitForBalancer(totalUsedSpace, totalCapacity, client, cluster, p, excludedNodes);
}
private static int runBalancer(Collection<URI> namenodes,
final BalancerParameters p,
Configuration conf) throws IOException, InterruptedException {
final long sleeptime =
conf.getLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY,
DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_DEFAULT) * 2000 +
conf.getLong(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY,
DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_DEFAULT) * 1000;
LOG.info("namenodes = " + namenodes);
LOG.info("parameters = " + p);
LOG.info("Print stack trace", new Throwable());
System.out.println("Time Stamp Iteration# Bytes Already Moved Bytes Left To Move Bytes Being Moved");
List<NameNodeConnector> connectors = Collections.emptyList();
try {
connectors = NameNodeConnector.newNameNodeConnectors(namenodes,
Balancer.class.getSimpleName(), Balancer.BALANCER_ID_PATH, conf,
BalancerParameters.DEFAULT.getMaxIdleIteration());
boolean done = false;
for(int iteration = 0; !done; iteration++) {
done = true;
Collections.shuffle(connectors);
for(NameNodeConnector nnc : connectors) {
final Balancer b = new Balancer(nnc, p, conf);
final Result r = b.runOneIteration();
r.print(iteration, System.out);
// clean all lists
b.resetData(conf);
if (r.exitStatus == ExitStatus.IN_PROGRESS) {
done = false;
} else if (r.exitStatus != ExitStatus.SUCCESS) {
//must be an error statue, return.
return r.exitStatus.getExitCode();
} else {
if (iteration > 0) {
assertTrue(r.bytesAlreadyMoved > 0);
}
}
}
if (!done) {
Thread.sleep(sleeptime);
}
}
} finally {
for(NameNodeConnector nnc : connectors) {
IOUtils.cleanup(LOG, nnc);
}
}
return ExitStatus.SUCCESS.getExitCode();
}
private void runBalancerCli(Configuration conf, long totalUsedSpace,
long totalCapacity, BalancerParameters p, boolean useFile,
int expectedExcludedNodes) throws Exception {
waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
List <String> args = new ArrayList<String>();
args.add("-policy");
args.add("datanode");
File excludeHostsFile = null;
if (!p.getExcludedNodes().isEmpty()) {
args.add("-exclude");
if (useFile) {
excludeHostsFile = new File ("exclude-hosts-file");
PrintWriter pw = new PrintWriter(excludeHostsFile);
for (String host : p.getExcludedNodes()) {
pw.write( host + "\n");
}
pw.close();
args.add("-f");
args.add("exclude-hosts-file");
} else {
args.add(StringUtils.join(p.getExcludedNodes(), ','));
}
}
File includeHostsFile = null;
if (!p.getIncludedNodes().isEmpty()) {
args.add("-include");
if (useFile) {
includeHostsFile = new File ("include-hosts-file");
PrintWriter pw = new PrintWriter(includeHostsFile);
for (String host : p.getIncludedNodes()) {
pw.write( host + "\n");
}
pw.close();
args.add("-f");
args.add("include-hosts-file");
} else {
args.add(StringUtils.join(p.getIncludedNodes(), ','));
}
}
final Tool tool = new Cli();
tool.setConf(conf);
final int r = tool.run(args.toArray(new String[0])); // start rebalancing
assertEquals("Tools should exit 0 on success", 0, r);
waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
LOG.info("Rebalancing with default ctor.");
waitForBalancer(totalUsedSpace, totalCapacity, client, cluster, p, expectedExcludedNodes);
if (excludeHostsFile != null && excludeHostsFile.exists()) {
excludeHostsFile.delete();
}
if (includeHostsFile != null && includeHostsFile.exists()) {
includeHostsFile.delete();
}
}
/** one-node cluster test*/
private void oneNodeTest(Configuration conf, boolean useTool) throws Exception {
// add an empty node with half of the CAPACITY & the same rack
doTest(conf, new long[]{CAPACITY}, new String[]{RACK0}, CAPACITY/2,
RACK0, useTool);
}
/** two-node cluster test */
private void twoNodeTest(Configuration conf) throws Exception {
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, false);
}
/** test using a user-supplied conf */
public void integrationTest(Configuration conf) throws Exception {
initConf(conf);
oneNodeTest(conf, false);
}
@Test(timeout = 100000)
public void testUnknownDatanodeSimple() throws Exception {
Configuration conf = new HdfsConfiguration();
initConf(conf);
testUnknownDatanode(conf);
}
/* we first start a cluster and fill the cluster up to a certain size.
* then redistribute blocks according the required distribution.
* Then we start an empty datanode.
* Afterwards a balancer is run to balance the cluster.
* A partially filled datanode is excluded during balancing.
* This triggers a situation where one of the block's location is unknown.
*/
private void testUnknownDatanode(Configuration conf)
throws IOException, InterruptedException, TimeoutException {
long distribution[] = new long[] {50*CAPACITY/100, 70*CAPACITY/100, 0*CAPACITY/100};
long capacities[] = new long[]{CAPACITY, CAPACITY, CAPACITY};
String racks[] = new String[] {RACK0, RACK1, RACK1};
int numDatanodes = distribution.length;
if (capacities.length != numDatanodes || racks.length != numDatanodes) {
throw new IllegalArgumentException("Array length is not the same");
}
// calculate total space that need to be filled
final long totalUsedSpace = sum(distribution);
// fill the cluster
ExtendedBlock[] blocks = generateBlocks(conf, totalUsedSpace,
(short) numDatanodes);
// redistribute blocks
Block[][] blocksDN = distributeBlocks(
blocks, (short)(numDatanodes-1), distribution);
// restart the cluster: do NOT format the cluster
conf.set(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, "0.0f");
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDatanodes)
.format(false)
.racks(racks)
.simulatedCapacities(capacities)
.build();
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
for(int i = 0; i < 3; i++) {
cluster.injectBlocks(i, Arrays.asList(blocksDN[i]), null);
}
cluster.startDataNodes(conf, 1, true, null,
new String[]{RACK0}, null,new long[]{CAPACITY});
cluster.triggerHeartbeats();
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
Set<String> datanodes = new HashSet<String>();
datanodes.add(cluster.getDataNodes().get(0).getDatanodeId().getHostName());
BalancerParameters.Builder pBuilder =
new BalancerParameters.Builder();
pBuilder.setExcludedNodes(datanodes);
pBuilder.setRunDuringUpgrade(false);
final int r = Balancer.run(namenodes, pBuilder.build(), conf);
assertEquals(ExitStatus.SUCCESS.getExitCode(), r);
}
/**
* Test parse method in Balancer#Cli class with threshold value out of
* boundaries.
*/
@Test(timeout=100000)
public void testBalancerCliParseWithThresholdOutOfBoundaries() {
String parameters[] = new String[] { "-threshold", "0" };
String reason = "IllegalArgumentException is expected when threshold value"
+ " is out of boundary.";
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
assertEquals("Number out of range: threshold = 0.0", e.getMessage());
}
parameters = new String[] { "-threshold", "101" };
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
assertEquals("Number out of range: threshold = 101.0", e.getMessage());
}
}
/** Test a cluster with even distribution,
* then a new empty node is added to the cluster*/
@Test(timeout=100000)
public void testBalancer0() throws Exception {
testBalancer0Internal(new HdfsConfiguration());
}
void testBalancer0Internal(Configuration conf) throws Exception {
initConf(conf);
oneNodeTest(conf, false);
twoNodeTest(conf);
}
/** Test unevenly distributed cluster */
@Test(timeout=100000)
public void testBalancer1() throws Exception {
testBalancer1Internal(new HdfsConfiguration());
}
void testBalancer1Internal(Configuration conf) throws Exception {
initConf(conf);
testUnevenDistribution(conf,
new long[]{50 * CAPACITY / 100, 10 * CAPACITY / 100},
new long[]{CAPACITY, CAPACITY},
new String[]{RACK0, RACK1});
}
@Test(expected=HadoopIllegalArgumentException.class)
public void testBalancerWithZeroThreadsForMove() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY, 0);
testBalancer1Internal (conf);
}
@Test(timeout=100000)
public void testBalancerWithNonZeroThreadsForMove() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY, 8);
testBalancer1Internal(conf);
}
@Test(timeout=100000)
public void testBalancer2() throws Exception {
testBalancer2Internal(new HdfsConfiguration());
}
void testBalancer2Internal(Configuration conf) throws Exception {
initConf(conf);
testBalancerDefaultConstructor(conf, new long[]{CAPACITY, CAPACITY},
new String[]{RACK0, RACK1}, CAPACITY, RACK2);
}
private void testBalancerDefaultConstructor(Configuration conf,
long[] capacities, String[] racks, long newCapacity, String newRack)
throws Exception {
int numOfDatanodes = capacities.length;
assertEquals(numOfDatanodes, racks.length);
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(capacities.length)
.racks(racks)
.simulatedCapacities(capacities)
.build();
try {
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
long totalCapacity = sum(capacities);
// fill up the cluster to be 30% full
long totalUsedSpace = totalCapacity * 3 / 10;
createFile(cluster, filePath, totalUsedSpace / numOfDatanodes,
(short) numOfDatanodes, 0);
// start up an empty node with the same capacity and on the same rack
cluster.startDataNodes(conf, 1, true, null, new String[] { newRack },
new long[] { newCapacity });
totalCapacity += newCapacity;
// run balancer and validate results
runBalancer(conf, totalUsedSpace, totalCapacity);
} finally {
cluster.shutdown();
}
}
/**
* Test parse method in Balancer#Cli class with wrong number of params
*/
@Test(timeout=100000)
public void testBalancerCliParseWithWrongParams() {
String parameters[] = new String[] { "-threshold" };
String reason =
"IllegalArgumentException is expected when value is not specified";
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] { "-policy" };
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-threshold", "1", "-policy"};
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-threshold", "1", "-include"};
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-threshold", "1", "-exclude"};
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-include", "-f"};
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-exclude", "-f"};
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-include", "testnode1", "-exclude", "testnode2"};
try {
Balancer.Cli.parse(parameters);
fail("IllegalArgumentException is expected when both -exclude and -include are specified");
} catch (IllegalArgumentException e) {
}
parameters = new String[] { "-blockpools" };
try {
Balancer.Cli.parse(parameters);
fail("IllegalArgumentException is expected when a value "
+ "is not specified for the blockpool flag");
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-source"};
try {
Balancer.Cli.parse(parameters);
fail(reason + " for -source parameter");
} catch (IllegalArgumentException ignored) {
// expected
}
}
@Test
public void testBalancerCliParseBlockpools() {
String[] parameters = new String[] { "-blockpools", "bp-1,bp-2,bp-3" };
BalancerParameters p = Balancer.Cli.parse(parameters);
assertEquals(3, p.getBlockPools().size());
parameters = new String[] { "-blockpools", "bp-1" };
p = Balancer.Cli.parse(parameters);
assertEquals(1, p.getBlockPools().size());
parameters = new String[] { "-blockpools", "bp-1,,bp-2" };
p = Balancer.Cli.parse(parameters);
assertEquals(3, p.getBlockPools().size());
parameters = new String[] { "-blockpools", "bp-1," };
p = Balancer.Cli.parse(parameters);
assertEquals(1, p.getBlockPools().size());
}
/**
* Verify balancer exits 0 on success.
*/
@Test(timeout=100000)
public void testExitZeroOnSuccess() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
oneNodeTest(conf, true);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list
*/
@Test(timeout=100000)
public void testBalancerWithExcludeList() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> excludeHosts = new HashSet<String>();
excludeHosts.add( "datanodeY");
excludeHosts.add( "datanodeZ");
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1}, CAPACITY, RACK2,
new HostNameBasedNodes(new String[] {"datanodeX", "datanodeY", "datanodeZ"},
excludeHosts, BalancerParameters.DEFAULT.getIncludedNodes()),
false, false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list
*/
@Test(timeout=100000)
public void testBalancerWithExcludeListWithPorts() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, new PortNumberBasedNodes(3, 2, 0), false, false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list
*/
@Test(timeout=100000)
public void testBalancerCliWithExcludeList() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> excludeHosts = new HashSet<String>();
excludeHosts.add( "datanodeY");
excludeHosts.add( "datanodeZ");
doTest(conf, new long[] { CAPACITY, CAPACITY },
new String[] { RACK0, RACK1 }, CAPACITY, RACK2, new HostNameBasedNodes(
new String[] { "datanodeX", "datanodeY", "datanodeZ" },
excludeHosts, BalancerParameters.DEFAULT.getIncludedNodes()), true,
false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list
*/
@Test(timeout=100000)
public void testBalancerCliWithExcludeListWithPorts() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, new PortNumberBasedNodes(3, 2, 0), true, false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list in a file
*/
@Test(timeout=100000)
public void testBalancerCliWithExcludeListInAFile() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> excludeHosts = new HashSet<String>();
excludeHosts.add( "datanodeY");
excludeHosts.add( "datanodeZ");
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1}, CAPACITY, RACK2,
new HostNameBasedNodes(new String[] {"datanodeX", "datanodeY", "datanodeZ"},
excludeHosts, BalancerParameters.DEFAULT.getIncludedNodes()), true,
true);
}
/**
* Test a cluster with even distribution,G
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list
*/
@Test(timeout=100000)
public void testBalancerCliWithExcludeListWithPortsInAFile() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, new PortNumberBasedNodes(3, 2, 0), true, true);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout=100000)
public void testBalancerWithIncludeList() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> includeHosts = new HashSet<String>();
includeHosts.add( "datanodeY");
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1}, CAPACITY, RACK2,
new HostNameBasedNodes(new String[] {"datanodeX", "datanodeY", "datanodeZ"},
BalancerParameters.DEFAULT.getExcludedNodes(), includeHosts),
false, false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout=100000)
public void testBalancerWithIncludeListWithPorts() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, new PortNumberBasedNodes(3, 0, 1), false, false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout=100000)
public void testBalancerCliWithIncludeList() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> includeHosts = new HashSet<String>();
includeHosts.add( "datanodeY");
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1}, CAPACITY, RACK2,
new HostNameBasedNodes(new String[] {"datanodeX", "datanodeY", "datanodeZ"},
BalancerParameters.DEFAULT.getExcludedNodes(), includeHosts), true,
false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout=100000)
public void testBalancerCliWithIncludeListWithPorts() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, new PortNumberBasedNodes(3, 0, 1), true, false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout=100000)
public void testBalancerCliWithIncludeListInAFile() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> includeHosts = new HashSet<String>();
includeHosts.add( "datanodeY");
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1}, CAPACITY, RACK2,
new HostNameBasedNodes(new String[] {"datanodeX", "datanodeY", "datanodeZ"},
BalancerParameters.DEFAULT.getExcludedNodes(), includeHosts), true,
true);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout=100000)
public void testBalancerCliWithIncludeListWithPortsInAFile() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, new PortNumberBasedNodes(3, 0, 1), true, true);
}
/*
* Test Balancer with Ram_Disk configured
* One DN has two files on RAM_DISK, other DN has no files on RAM_DISK.
* Then verify that the balancer does not migrate files on RAM_DISK across DN.
*/
@Test(timeout=300000)
public void testBalancerWithRamDisk() throws Exception {
final int SEED = 0xFADED;
final short REPL_FACT = 1;
Configuration conf = new Configuration();
final int defaultRamDiskCapacity = 10;
final long ramDiskStorageLimit =
((long) defaultRamDiskCapacity * DEFAULT_RAM_DISK_BLOCK_SIZE) +
(DEFAULT_RAM_DISK_BLOCK_SIZE - 1);
final long diskStorageLimit =
((long) defaultRamDiskCapacity * DEFAULT_RAM_DISK_BLOCK_SIZE) +
(DEFAULT_RAM_DISK_BLOCK_SIZE - 1);
initConfWithRamDisk(conf, ramDiskStorageLimit);
cluster = new MiniDFSCluster
.Builder(conf)
.numDataNodes(1)
.storageCapacities(new long[] { ramDiskStorageLimit, diskStorageLimit })
.storageTypes(new StorageType[] { RAM_DISK, DEFAULT })
.build();
cluster.waitActive();
// Create few files on RAM_DISK
final String METHOD_NAME = GenericTestUtils.getMethodName();
final Path path1 = new Path("/" + METHOD_NAME + ".01.dat");
final Path path2 = new Path("/" + METHOD_NAME + ".02.dat");
DistributedFileSystem fs = cluster.getFileSystem();
DFSClient client = fs.getClient();
DFSTestUtil.createFile(fs, path1, true,
DEFAULT_RAM_DISK_BLOCK_SIZE, 4 * DEFAULT_RAM_DISK_BLOCK_SIZE,
DEFAULT_RAM_DISK_BLOCK_SIZE, REPL_FACT, SEED, true);
DFSTestUtil.createFile(fs, path2, true,
DEFAULT_RAM_DISK_BLOCK_SIZE, 1 * DEFAULT_RAM_DISK_BLOCK_SIZE,
DEFAULT_RAM_DISK_BLOCK_SIZE, REPL_FACT, SEED, true);
// Sleep for a short time to allow the lazy writer thread to do its job
Thread.sleep(6 * 1000);
// Add another fresh DN with the same type/capacity without files on RAM_DISK
StorageType[][] storageTypes = new StorageType[][] {{RAM_DISK, DEFAULT}};
long[][] storageCapacities = new long[][]{{ramDiskStorageLimit,
diskStorageLimit}};
cluster.startDataNodes(conf, REPL_FACT, storageTypes, true, null,
null, null, storageCapacities, null, false, false, false, null);
cluster.triggerHeartbeats();
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
// Run Balancer
final BalancerParameters p = BalancerParameters.DEFAULT;
final int r = Balancer.run(namenodes, p, conf);
// Validate no RAM_DISK block should be moved
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
// Verify files are still on RAM_DISK
DFSTestUtil.verifyFileReplicasOnStorageType(fs, client, path1, RAM_DISK);
DFSTestUtil.verifyFileReplicasOnStorageType(fs, client, path2, RAM_DISK);
}
/**
* Check that the balancer exits when there is an unfinalized upgrade.
*/
@Test(timeout=300000)
public void testBalancerDuringUpgrade() throws Exception {
final int SEED = 0xFADED;
Configuration conf = new HdfsConfiguration();
conf.setLong(DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 500);
conf.setLong(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 1);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
final int BLOCK_SIZE = 1024*1024;
cluster = new MiniDFSCluster
.Builder(conf)
.numDataNodes(1)
.storageCapacities(new long[] { BLOCK_SIZE * 10 })
.storageTypes(new StorageType[] { DEFAULT })
.storagesPerDatanode(1)
.build();
cluster.waitActive();
// Create a file on the single DN
final String METHOD_NAME = GenericTestUtils.getMethodName();
final Path path1 = new Path("/" + METHOD_NAME + ".01.dat");
DistributedFileSystem fs = cluster.getFileSystem();
DFSTestUtil.createFile(fs, path1, BLOCK_SIZE, BLOCK_SIZE * 2, BLOCK_SIZE,
(short) 1, SEED);
// Add another DN with the same capacity, cluster is now unbalanced
cluster.startDataNodes(conf, 1, true, null, null);
cluster.triggerHeartbeats();
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
// Run balancer
final BalancerParameters p = BalancerParameters.DEFAULT;
fs.setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_ENTER);
fs.rollingUpgrade(HdfsConstants.RollingUpgradeAction.PREPARE);
fs.setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_LEAVE);
// Rolling upgrade should abort the balancer
assertEquals(ExitStatus.UNFINALIZED_UPGRADE.getExitCode(),
Balancer.run(namenodes, p, conf));
// Should work with the -runDuringUpgrade flag.
BalancerParameters.Builder b =
new BalancerParameters.Builder();
b.setRunDuringUpgrade(true);
final BalancerParameters runDuringUpgrade = b.build();
assertEquals(ExitStatus.SUCCESS.getExitCode(),
Balancer.run(namenodes, runDuringUpgrade, conf));
// Finalize the rolling upgrade
fs.rollingUpgrade(HdfsConstants.RollingUpgradeAction.FINALIZE);
// Should also work after finalization.
assertEquals(ExitStatus.SUCCESS.getExitCode(),
Balancer.run(namenodes, p, conf));
}
/**
* Test special case. Two replicas belong to same block should not in same node.
* We have 2 nodes.
* We have a block in (DN0,SSD) and (DN1,DISK).
* Replica in (DN0,SSD) should not be moved to (DN1,SSD).
* Otherwise DN1 has 2 replicas.
*/
@Test(timeout=100000)
public void testTwoReplicaShouldNotInSameDN() throws Exception {
final Configuration conf = new HdfsConfiguration();
int blockSize = 5 * 1024 * 1024 ;
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
conf.setLong(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 1L);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
int numOfDatanodes =2;
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(2)
.racks(new String[]{"/default/rack0", "/default/rack0"})
.storagesPerDatanode(2)
.storageTypes(new StorageType[][]{
{StorageType.SSD, StorageType.DISK},
{StorageType.SSD, StorageType.DISK}})
.storageCapacities(new long[][]{
{100 * blockSize, 20 * blockSize},
{20 * blockSize, 100 * blockSize}})
.build();
cluster.waitActive();
//set "/bar" directory with ONE_SSD storage policy.
DistributedFileSystem fs = cluster.getFileSystem();
Path barDir = new Path("/bar");
fs.mkdir(barDir,new FsPermission((short)777));
fs.setStoragePolicy(barDir, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
// Insert 30 blocks. So (DN0,SSD) and (DN1,DISK) are about half full,
// and (DN0,SSD) and (DN1,DISK) are about 15% full.
long fileLen = 30 * blockSize;
// fooFile has ONE_SSD policy. So
// (DN0,SSD) and (DN1,DISK) have 2 replicas belong to same block.
// (DN0,DISK) and (DN1,SSD) have 2 replicas belong to same block.
Path fooFile = new Path(barDir, "foo");
createFile(cluster, fooFile, fileLen, (short) numOfDatanodes, 0);
// update space info
cluster.triggerHeartbeats();
BalancerParameters p = BalancerParameters.DEFAULT;
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
final int r = Balancer.run(namenodes, p, conf);
// Replica in (DN0,SSD) was not moved to (DN1,SSD), because (DN1,DISK)
// already has one. Otherwise DN1 will have 2 replicas.
// For same reason, no replicas were moved.
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
}
/**
* Test running many balancer simultaneously.
*
* Case-1: First balancer is running. Now, running second one should get
* "Another balancer is running. Exiting.." IOException and fail immediately
*
* Case-2: When running second balancer 'balancer.id' file exists but the
* lease doesn't exists. Now, the second balancer should run successfully.
*/
@Test(timeout = 100000)
public void testManyBalancerSimultaneously() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
// add an empty node with half of the capacities(4 * CAPACITY) & the same
// rack
long[] capacities = new long[] { 4 * CAPACITY };
String[] racks = new String[] { RACK0 };
long newCapacity = 2 * CAPACITY;
String newRack = RACK0;
LOG.info("capacities = " + long2String(capacities));
LOG.info("racks = " + Arrays.asList(racks));
LOG.info("newCapacity= " + newCapacity);
LOG.info("newRack = " + newRack);
LOG.info("useTool = " + false);
assertEquals(capacities.length, racks.length);
int numOfDatanodes = capacities.length;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(capacities.length)
.racks(racks).simulatedCapacities(capacities).build();
cluster.waitActive();
client = NameNodeProxies.createProxy(conf,
cluster.getFileSystem(0).getUri(), ClientProtocol.class).getProxy();
long totalCapacity = sum(capacities);
// fill up the cluster to be 30% full
final long totalUsedSpace = totalCapacity * 3 / 10;
createFile(cluster, filePath, totalUsedSpace / numOfDatanodes,
(short) numOfDatanodes, 0);
// start up an empty node with the same capacity and on the same rack
cluster.startDataNodes(conf, 1, true, null, new String[] { newRack },
new long[] { newCapacity });
// Case1: Simulate first balancer by creating 'balancer.id' file. It
// will keep this file until the balancing operation is completed.
FileSystem fs = cluster.getFileSystem(0);
final FSDataOutputStream out = fs
.create(Balancer.BALANCER_ID_PATH, false);
out.writeBytes(InetAddress.getLocalHost().getHostName());
out.hflush();
assertTrue("'balancer.id' file doesn't exist!",
fs.exists(Balancer.BALANCER_ID_PATH));
// start second balancer
final String[] args = { "-policy", "datanode" };
final Tool tool = new Cli();
tool.setConf(conf);
int exitCode = tool.run(args); // start balancing
assertEquals("Exit status code mismatches",
ExitStatus.IO_EXCEPTION.getExitCode(), exitCode);
// Case2: Release lease so that another balancer would be able to
// perform balancing.
out.close();
assertTrue("'balancer.id' file doesn't exist!",
fs.exists(Balancer.BALANCER_ID_PATH));
exitCode = tool.run(args); // start balancing
assertEquals("Exit status code mismatches",
ExitStatus.SUCCESS.getExitCode(), exitCode);
}
/** Balancer should not move blocks with size < minBlockSize. */
@Test(timeout=60000)
public void testMinBlockSizeAndSourceNodes() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
final short replication = 3;
final long[] lengths = {10, 10, 10, 10};
final long[] capacities = new long[replication];
final long totalUsed = capacities.length * sum(lengths);
Arrays.fill(capacities, 1000);
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(capacities.length)
.simulatedCapacities(capacities)
.build();
final DistributedFileSystem dfs = cluster.getFileSystem();
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, dfs.getUri(),
ClientProtocol.class).getProxy();
// fill up the cluster to be 80% full
for(int i = 0; i < lengths.length; i++) {
final long size = lengths[i];
final Path p = new Path("/file" + i + "_size" + size);
try(final OutputStream out = dfs.create(p)) {
for(int j = 0; j < size; j++) {
out.write(j);
}
}
}
// start up an empty node with the same capacity
cluster.startDataNodes(conf, capacities.length, true, null, null, capacities);
LOG.info("capacities = " + Arrays.toString(capacities));
LOG.info("totalUsedSpace= " + totalUsed);
LOG.info("lengths = " + Arrays.toString(lengths) + ", #=" + lengths.length);
waitForHeartBeat(totalUsed, 2*capacities[0]*capacities.length, client, cluster);
final Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
{ // run Balancer with min-block-size=50
final BalancerParameters p = Balancer.Cli.parse(new String[] {
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1"
});
assertEquals(p.getBalancingPolicy(), BalancingPolicy.Node.INSTANCE);
assertEquals(p.getThreshold(), 1.0, 0.001);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 50);
final int r = Balancer.run(namenodes, p, conf);
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
}
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1);
{ // run Balancer with empty nodes as source nodes
final Set<String> sourceNodes = new HashSet<>();
final List<DataNode> datanodes = cluster.getDataNodes();
for(int i = capacities.length; i < datanodes.size(); i++) {
sourceNodes.add(datanodes.get(i).getDisplayName());
}
final BalancerParameters p = Balancer.Cli.parse(new String[] {
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1",
"-source", StringUtils.join(sourceNodes, ',')
});
assertEquals(p.getBalancingPolicy(), BalancingPolicy.Node.INSTANCE);
assertEquals(p.getThreshold(), 1.0, 0.001);
assertEquals(p.getSourceNodes(), sourceNodes);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 50);
final int r = Balancer.run(namenodes, p, conf);
assertEquals(ExitStatus.NO_MOVE_BLOCK.getExitCode(), r);
}
{ // run Balancer with a filled node as a source node
final Set<String> sourceNodes = new HashSet<>();
final List<DataNode> datanodes = cluster.getDataNodes();
sourceNodes.add(datanodes.get(0).getDisplayName());
final BalancerParameters p = Balancer.Cli.parse(new String[] {
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1",
"-source", StringUtils.join(sourceNodes, ',')
});
assertEquals(p.getBalancingPolicy(), BalancingPolicy.Node.INSTANCE);
assertEquals(p.getThreshold(), 1.0, 0.001);
assertEquals(p.getSourceNodes(), sourceNodes);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1);
final int r = Balancer.run(namenodes, p, conf);
assertEquals(ExitStatus.NO_MOVE_BLOCK.getExitCode(), r);
}
{ // run Balancer with all filled node as source nodes
final Set<String> sourceNodes = new HashSet<>();
final List<DataNode> datanodes = cluster.getDataNodes();
for(int i = 0; i < capacities.length; i++) {
sourceNodes.add(datanodes.get(i).getDisplayName());
}
final BalancerParameters p = Balancer.Cli.parse(new String[] {
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1",
"-source", StringUtils.join(sourceNodes, ',')
});
assertEquals(p.getBalancingPolicy(), BalancingPolicy.Node.INSTANCE);
assertEquals(p.getThreshold(), 1.0, 0.001);
assertEquals(p.getSourceNodes(), sourceNodes);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1);
final int r = Balancer.run(namenodes, p, conf);
assertEquals(ExitStatus.SUCCESS.getExitCode(), r);
}
}
public void integrationTestWithStripedFile(Configuration conf) throws Exception {
initConfWithStripe(conf);
doTestBalancerWithStripedFile(conf);
}
@Test(timeout = 100000)
public void testBalancerWithStripedFile() throws Exception {
Configuration conf = new Configuration();
initConfWithStripe(conf);
doTestBalancerWithStripedFile(conf);
}
private void doTestBalancerWithStripedFile(Configuration conf) throws Exception {
int numOfDatanodes = dataBlocks + parityBlocks + 2;
int numOfRacks = dataBlocks;
long capacity = 20 * DEFAULT_STRIPE_BLOCK_SIZE;
long[] capacities = new long[numOfDatanodes];
for (int i = 0; i < capacities.length; i++) {
capacities[i] = capacity;
}
String[] racks = new String[numOfDatanodes];
for (int i = 0; i < numOfDatanodes; i++) {
racks[i] = "/rack" + (i % numOfRacks);
}
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(numOfDatanodes)
.racks(racks)
.simulatedCapacities(capacities)
.build();
try {
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
client.setErasureCodingPolicy("/", null);
long totalCapacity = sum(capacities);
// fill up the cluster with 30% data. It'll be 45% full plus parity.
long fileLen = totalCapacity * 3 / 10;
long totalUsedSpace = fileLen * (dataBlocks + parityBlocks) / dataBlocks;
FileSystem fs = cluster.getFileSystem(0);
DFSTestUtil.createFile(fs, filePath, fileLen, (short) 3, r.nextLong());
// verify locations of striped blocks
LocatedBlocks locatedBlocks = client.getBlockLocations(fileName, 0, fileLen);
StripedFileTestUtil.verifyLocatedStripedBlocks(locatedBlocks, groupSize);
// add one datanode
String newRack = "/rack" + (++numOfRacks);
cluster.startDataNodes(conf, 1, true, null,
new String[]{newRack}, null, new long[]{capacity});
totalCapacity += capacity;
cluster.triggerHeartbeats();
// run balancer and validate results
BalancerParameters p = BalancerParameters.DEFAULT;
runBalancer(conf, totalUsedSpace, totalCapacity, p, 0);
// verify locations of striped blocks
locatedBlocks = client.getBlockLocations(fileName, 0, fileLen);
StripedFileTestUtil.verifyLocatedStripedBlocks(locatedBlocks, groupSize);
} finally {
cluster.shutdown();
}
}
/**
* Test Balancer runs fine when logging in with a keytab in kerberized env.
* Reusing testUnknownDatanode here for basic functionality testing.
*/
@Test(timeout = 300000)
public void testBalancerWithKeytabs() throws Exception {
final Configuration conf = new HdfsConfiguration();
try {
initSecureConf(conf);
final UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
principal, keytabFile.getAbsolutePath());
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
// verify that balancer runs Ok.
testUnknownDatanode(conf);
// verify that UGI was logged in using keytab.
assertTrue(UserGroupInformation.isLoginKeytabBased());
return null;
}
});
} finally {
// Reset UGI so that other tests are not affected.
UserGroupInformation.reset();
UserGroupInformation.setConfiguration(new Configuration());
}
}
/**
* @param args
*/
public static void main(String[] args) throws Exception {
TestBalancer balancerTest = new TestBalancer();
balancerTest.testBalancer0();
balancerTest.testBalancer1();
balancerTest.testBalancer2();
}
}
| hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.balancer;
import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY;
import static org.apache.hadoop.fs.StorageType.DEFAULT;
import static org.apache.hadoop.fs.StorageType.RAM_DISK;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_DATA_TRANSFER_PROTECTION_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BALANCER_ADDRESS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BALANCER_KERBEROS_PRINCIPAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BALANCER_KEYTAB_ENABLED_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BALANCER_KEYTAB_FILE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_BLOCK_PINNING_ENABLED;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_LAZY_WRITER_INTERVAL_SEC;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HTTP_POLICY_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import org.junit.AfterClass;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URI;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.TimeoutException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.NameNodeProxies;
import org.apache.hadoop.hdfs.StripedFileTestUtil;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.balancer.Balancer.Cli;
import org.apache.hadoop.hdfs.server.balancer.Balancer.Result;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicyWithUpgradeDomain;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementStatus;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.LazyPersistTestCase;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Tool;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Test;
/**
* This class tests if a balancer schedules tasks correctly.
*/
public class TestBalancer {
private static final Log LOG = LogFactory.getLog(TestBalancer.class);
static {
GenericTestUtils.setLogLevel(Balancer.LOG, Level.ALL);
}
final static long CAPACITY = 5000L;
final static String RACK0 = "/rack0";
final static String RACK1 = "/rack1";
final static String RACK2 = "/rack2";
final private static String fileName = "/tmp.txt";
final static Path filePath = new Path(fileName);
final static private String username = "balancer";
private static String principal;
private static File baseDir;
private static String keystoresDir;
private static String sslConfDir;
private static MiniKdc kdc;
private static File keytabFile;
private MiniDFSCluster cluster;
@After
public void shutdown() throws Exception {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
ClientProtocol client;
static final long TIMEOUT = 40000L; //msec
static final double CAPACITY_ALLOWED_VARIANCE = 0.005; // 0.5%
static final double BALANCE_ALLOWED_VARIANCE = 0.11; // 10%+delta
static final int DEFAULT_BLOCK_SIZE = 100;
static final int DEFAULT_RAM_DISK_BLOCK_SIZE = 5 * 1024 * 1024;
private static final Random r = new Random();
static {
initTestSetup();
}
public static void initTestSetup() {
// do not create id file since it occupies the disk space
NameNodeConnector.setWrite2IdFile(false);
}
static void initConf(Configuration conf) {
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, DEFAULT_BLOCK_SIZE);
conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, DEFAULT_BLOCK_SIZE);
conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 500);
conf.setLong(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 1L);
SimulatedFSDataset.setFactory(conf);
conf.setLong(DFSConfigKeys.DFS_BALANCER_MOVEDWINWIDTH_KEY, 2000L);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
}
static void initConfWithRamDisk(Configuration conf,
long ramDiskCapacity) {
conf.setLong(DFS_BLOCK_SIZE_KEY, DEFAULT_RAM_DISK_BLOCK_SIZE);
conf.setLong(DFS_DATANODE_MAX_LOCKED_MEMORY_KEY, ramDiskCapacity);
conf.setInt(DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC, 3);
conf.setLong(DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 500);
conf.setInt(DFS_DATANODE_LAZY_WRITER_INTERVAL_SEC, 1);
LazyPersistTestCase.initCacheManipulator();
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
}
int dataBlocks = StripedFileTestUtil.NUM_DATA_BLOCKS;
int parityBlocks = StripedFileTestUtil.NUM_PARITY_BLOCKS;
int groupSize = dataBlocks + parityBlocks;
private final static int cellSize = StripedFileTestUtil.BLOCK_STRIPED_CELL_SIZE;
private final static int stripesPerBlock = 4;
static int DEFAULT_STRIPE_BLOCK_SIZE = cellSize * stripesPerBlock;
static void initConfWithStripe(Configuration conf) {
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, DEFAULT_STRIPE_BLOCK_SIZE);
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_REPLICATION_CONSIDERLOAD_KEY, false);
conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
SimulatedFSDataset.setFactory(conf);
conf.setLong(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 1L);
conf.setLong(DFSConfigKeys.DFS_BALANCER_MOVEDWINWIDTH_KEY, 2000L);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
}
static void initSecureConf(Configuration conf) throws Exception {
baseDir = GenericTestUtils.getTestDir(TestBalancer.class.getSimpleName());
FileUtil.fullyDelete(baseDir);
assertTrue(baseDir.mkdirs());
Properties kdcConf = MiniKdc.createConf();
kdc = new MiniKdc(kdcConf, baseDir);
kdc.start();
SecurityUtil.setAuthenticationMethod(
UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
UserGroupInformation.setConfiguration(conf);
KerberosName.resetDefaultRealm();
assertTrue("Expected configuration to enable security",
UserGroupInformation.isSecurityEnabled());
keytabFile = new File(baseDir, username + ".keytab");
String keytab = keytabFile.getAbsolutePath();
// Windows will not reverse name lookup "127.0.0.1" to "localhost".
String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
principal = username + "/" + krbInstance + "@" + kdc.getRealm();
String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();
kdc.createPrincipal(keytabFile, username, username + "/" + krbInstance,
"HTTP/" + krbInstance);
conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, principal);
conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, principal);
conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
conf.setBoolean(DFS_BALANCER_KEYTAB_ENABLED_KEY, true);
conf.set(DFS_BALANCER_ADDRESS_KEY, "localhost:0");
conf.set(DFS_BALANCER_KEYTAB_FILE_KEY, keytab);
conf.set(DFS_BALANCER_KERBEROS_PRINCIPAL_KEY, principal);
keystoresDir = baseDir.getAbsolutePath();
sslConfDir = KeyStoreTestUtil.getClasspathDir(TestBalancer.class);
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
conf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
KeyStoreTestUtil.getClientSSLConfigFileName());
conf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
KeyStoreTestUtil.getServerSSLConfigFileName());
initConf(conf);
}
@AfterClass
public static void destroy() throws Exception {
if (kdc != null) {
kdc.stop();
}
FileUtil.fullyDelete(baseDir);
KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
}
/* create a file with a length of <code>fileLen</code> */
public static void createFile(MiniDFSCluster cluster, Path filePath, long
fileLen,
short replicationFactor, int nnIndex)
throws IOException, InterruptedException, TimeoutException {
FileSystem fs = cluster.getFileSystem(nnIndex);
DFSTestUtil.createFile(fs, filePath, fileLen,
replicationFactor, r.nextLong());
DFSTestUtil.waitReplication(fs, filePath, replicationFactor);
}
/* fill up a cluster with <code>numNodes</code> datanodes
* whose used space to be <code>size</code>
*/
private ExtendedBlock[] generateBlocks(Configuration conf, long size,
short numNodes) throws IOException, InterruptedException, TimeoutException {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numNodes).build();
try {
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
short replicationFactor = (short)(numNodes-1);
long fileLen = size/replicationFactor;
createFile(cluster , filePath, fileLen, replicationFactor, 0);
List<LocatedBlock> locatedBlocks = client.
getBlockLocations(fileName, 0, fileLen).getLocatedBlocks();
int numOfBlocks = locatedBlocks.size();
ExtendedBlock[] blocks = new ExtendedBlock[numOfBlocks];
for(int i=0; i<numOfBlocks; i++) {
ExtendedBlock b = locatedBlocks.get(i).getBlock();
blocks[i] = new ExtendedBlock(b.getBlockPoolId(), b.getBlockId(), b
.getNumBytes(), b.getGenerationStamp());
}
return blocks;
} finally {
cluster.shutdown();
}
}
/* Distribute all blocks according to the given distribution */
static Block[][] distributeBlocks(ExtendedBlock[] blocks,
short replicationFactor, final long[] distribution) {
// make a copy
long[] usedSpace = new long[distribution.length];
System.arraycopy(distribution, 0, usedSpace, 0, distribution.length);
List<List<Block>> blockReports =
new ArrayList<List<Block>>(usedSpace.length);
Block[][] results = new Block[usedSpace.length][];
for(int i=0; i<usedSpace.length; i++) {
blockReports.add(new ArrayList<Block>());
}
for(int i=0; i<blocks.length; i++) {
for(int j=0; j<replicationFactor; j++) {
boolean notChosen = true;
while(notChosen) {
int chosenIndex = r.nextInt(usedSpace.length);
if( usedSpace[chosenIndex]>0 ) {
notChosen = false;
blockReports.get(chosenIndex).add(blocks[i].getLocalBlock());
usedSpace[chosenIndex] -= blocks[i].getNumBytes();
}
}
}
}
for(int i=0; i<usedSpace.length; i++) {
List<Block> nodeBlockList = blockReports.get(i);
results[i] = nodeBlockList.toArray(new Block[nodeBlockList.size()]);
}
return results;
}
static long sum(long[] x) {
long s = 0L;
for(long a : x) {
s += a;
}
return s;
}
/* we first start a cluster and fill the cluster up to a certain size.
* then redistribute blocks according the required distribution.
* Afterwards a balancer is running to balance the cluster.
*/
private void testUnevenDistribution(Configuration conf,
long distribution[], long capacities[], String[] racks) throws Exception {
int numDatanodes = distribution.length;
if (capacities.length != numDatanodes || racks.length != numDatanodes) {
throw new IllegalArgumentException("Array length is not the same");
}
// calculate total space that need to be filled
final long totalUsedSpace = sum(distribution);
// fill the cluster
ExtendedBlock[] blocks = generateBlocks(conf, totalUsedSpace,
(short) numDatanodes);
// redistribute blocks
Block[][] blocksDN = distributeBlocks(
blocks, (short)(numDatanodes-1), distribution);
// restart the cluster: do NOT format the cluster
conf.set(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, "0.0f");
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDatanodes)
.format(false)
.racks(racks)
.simulatedCapacities(capacities)
.build();
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
for(int i = 0; i < blocksDN.length; i++)
cluster.injectBlocks(i, Arrays.asList(blocksDN[i]), null);
final long totalCapacity = sum(capacities);
runBalancer(conf, totalUsedSpace, totalCapacity);
cluster.shutdown();
}
/**
* Wait until heartbeat gives expected results, within CAPACITY_ALLOWED_VARIANCE,
* summed over all nodes. Times out after TIMEOUT msec.
* @param expectedUsedSpace
* @param expectedTotalSpace
* @throws IOException - if getStats() fails
* @throws TimeoutException
*/
static void waitForHeartBeat(long expectedUsedSpace,
long expectedTotalSpace, ClientProtocol client, MiniDFSCluster cluster)
throws IOException, TimeoutException {
long timeout = TIMEOUT;
long failtime = (timeout <= 0L) ? Long.MAX_VALUE
: Time.monotonicNow() + timeout;
while (true) {
long[] status = client.getStats();
double totalSpaceVariance = Math.abs((double)status[0] - expectedTotalSpace)
/ expectedTotalSpace;
double usedSpaceVariance = Math.abs((double)status[1] - expectedUsedSpace)
/ expectedUsedSpace;
if (totalSpaceVariance < CAPACITY_ALLOWED_VARIANCE
&& usedSpaceVariance < CAPACITY_ALLOWED_VARIANCE)
break; //done
if (Time.monotonicNow() > failtime) {
throw new TimeoutException("Cluster failed to reached expected values of "
+ "totalSpace (current: " + status[0]
+ ", expected: " + expectedTotalSpace
+ "), or usedSpace (current: " + status[1]
+ ", expected: " + expectedUsedSpace
+ "), in more than " + timeout + " msec.");
}
try {
Thread.sleep(100L);
} catch(InterruptedException ignored) {
}
}
}
/**
* Wait until balanced: each datanode gives utilization within
* BALANCE_ALLOWED_VARIANCE of average
* @throws IOException
* @throws TimeoutException
*/
static void waitForBalancer(long totalUsedSpace, long totalCapacity,
ClientProtocol client, MiniDFSCluster cluster, BalancerParameters p)
throws IOException, TimeoutException {
waitForBalancer(totalUsedSpace, totalCapacity, client, cluster, p, 0);
}
/**
* Make sure that balancer can't move pinned blocks.
* If specified favoredNodes when create file, blocks will be pinned use
* sticky bit.
* @throws Exception
*/
@Test(timeout=100000)
public void testBalancerWithPinnedBlocks() throws Exception {
// This test assumes stick-bit based block pin mechanism available only
// in Linux/Unix. It can be unblocked on Windows when HDFS-7759 is ready to
// provide a different mechanism for Windows.
assumeNotWindows();
final Configuration conf = new HdfsConfiguration();
initConf(conf);
conf.setBoolean(DFS_DATANODE_BLOCK_PINNING_ENABLED, true);
long[] capacities = new long[] { CAPACITY, CAPACITY };
String[] hosts = {"host0", "host1"};
String[] racks = { RACK0, RACK1 };
int numOfDatanodes = capacities.length;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(capacities.length)
.hosts(hosts).racks(racks).simulatedCapacities(capacities).build();
cluster.waitActive();
client = NameNodeProxies.createProxy(conf,
cluster.getFileSystem(0).getUri(), ClientProtocol.class).getProxy();
// fill up the cluster to be 80% full
long totalCapacity = sum(capacities);
long totalUsedSpace = totalCapacity * 8 / 10;
InetSocketAddress[] favoredNodes = new InetSocketAddress[numOfDatanodes];
for (int i = 0; i < favoredNodes.length; i++) {
// DFSClient will attempt reverse lookup. In case it resolves
// "127.0.0.1" to "localhost", we manually specify the hostname.
int port = cluster.getDataNodes().get(i).getXferAddress().getPort();
favoredNodes[i] = new InetSocketAddress(hosts[i], port);
}
DFSTestUtil.createFile(cluster.getFileSystem(0), filePath, false, 1024,
totalUsedSpace / numOfDatanodes, DEFAULT_BLOCK_SIZE,
(short) numOfDatanodes, 0, false, favoredNodes);
// start up an empty node with the same capacity
cluster.startDataNodes(conf, 1, true, null, new String[] { RACK2 },
new long[] { CAPACITY });
totalCapacity += CAPACITY;
// run balancer and validate results
waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
// start rebalancing
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
int r = Balancer.run(namenodes, BalancerParameters.DEFAULT, conf);
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
}
/**
* Verify balancer won't violate the default block placement policy.
* @throws Exception
*/
@Test(timeout=100000)
public void testRackPolicyAfterBalance() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
long[] capacities = new long[] { CAPACITY, CAPACITY };
String[] hosts = {"host0", "host1"};
String[] racks = { RACK0, RACK1 };
runBalancerAndVerifyBlockPlacmentPolicy(conf, capacities, hosts, racks,
null, CAPACITY, "host2", RACK1, null);
}
/**
* Verify balancer won't violate upgrade domain block placement policy.
* @throws Exception
*/
@Test(timeout=100000)
public void testUpgradeDomainPolicyAfterBalance() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
conf.setClass(DFSConfigKeys.DFS_BLOCK_REPLICATOR_CLASSNAME_KEY,
BlockPlacementPolicyWithUpgradeDomain.class,
BlockPlacementPolicy.class);
long[] capacities = new long[] { CAPACITY, CAPACITY, CAPACITY };
String[] hosts = {"host0", "host1", "host2"};
String[] racks = { RACK0, RACK1, RACK1 };
String[] UDs = { "ud0", "ud1", "ud2" };
runBalancerAndVerifyBlockPlacmentPolicy(conf, capacities, hosts, racks,
UDs, CAPACITY, "host3", RACK2, "ud2");
}
private void runBalancerAndVerifyBlockPlacmentPolicy(Configuration conf,
long[] capacities, String[] hosts, String[] racks, String[] UDs,
long newCapacity, String newHost, String newRack, String newUD)
throws Exception {
int numOfDatanodes = capacities.length;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(capacities.length)
.hosts(hosts).racks(racks).simulatedCapacities(capacities).build();
DatanodeManager dm = cluster.getNamesystem().getBlockManager().
getDatanodeManager();
if (UDs != null) {
for(int i = 0; i < UDs.length; i++) {
DatanodeID datanodeId = cluster.getDataNodes().get(i).getDatanodeId();
dm.getDatanode(datanodeId).setUpgradeDomain(UDs[i]);
}
}
try {
cluster.waitActive();
client = NameNodeProxies.createProxy(conf,
cluster.getFileSystem(0).getUri(), ClientProtocol.class).getProxy();
// fill up the cluster to be 80% full
long totalCapacity = sum(capacities);
long totalUsedSpace = totalCapacity * 8 / 10;
final long fileSize = totalUsedSpace / numOfDatanodes;
DFSTestUtil.createFile(cluster.getFileSystem(0), filePath, false, 1024,
fileSize, DEFAULT_BLOCK_SIZE, (short) numOfDatanodes, 0, false);
// start up an empty node with the same capacity on the same rack as the
// pinned host.
cluster.startDataNodes(conf, 1, true, null, new String[] { newRack },
new String[] { newHost }, new long[] { newCapacity });
if (newUD != null) {
DatanodeID newId = cluster.getDataNodes().get(
numOfDatanodes).getDatanodeId();
dm.getDatanode(newId).setUpgradeDomain(newUD);
}
totalCapacity += newCapacity;
// run balancer and validate results
waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
// start rebalancing
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
Balancer.run(namenodes, BalancerParameters.DEFAULT, conf);
BlockPlacementPolicy placementPolicy =
cluster.getNamesystem().getBlockManager().getBlockPlacementPolicy();
List<LocatedBlock> locatedBlocks = client.
getBlockLocations(fileName, 0, fileSize).getLocatedBlocks();
for (LocatedBlock locatedBlock : locatedBlocks) {
BlockPlacementStatus status = placementPolicy.verifyBlockPlacement(
locatedBlock.getLocations(), numOfDatanodes);
assertTrue(status.isPlacementPolicySatisfied());
}
} finally {
cluster.shutdown();
}
}
/**
* Wait until balanced: each datanode gives utilization within
* BALANCE_ALLOWED_VARIANCE of average
* @throws IOException
* @throws TimeoutException
*/
static void waitForBalancer(long totalUsedSpace, long totalCapacity,
ClientProtocol client, MiniDFSCluster cluster, BalancerParameters p,
int expectedExcludedNodes) throws IOException, TimeoutException {
long timeout = TIMEOUT;
long failtime = (timeout <= 0L) ? Long.MAX_VALUE
: Time.monotonicNow() + timeout;
if (!p.getIncludedNodes().isEmpty()) {
totalCapacity = p.getIncludedNodes().size() * CAPACITY;
}
if (!p.getExcludedNodes().isEmpty()) {
totalCapacity -= p.getExcludedNodes().size() * CAPACITY;
}
final double avgUtilization = ((double)totalUsedSpace) / totalCapacity;
boolean balanced;
do {
DatanodeInfo[] datanodeReport =
client.getDatanodeReport(DatanodeReportType.ALL);
assertEquals(datanodeReport.length, cluster.getDataNodes().size());
balanced = true;
int actualExcludedNodeCount = 0;
for (DatanodeInfo datanode : datanodeReport) {
double nodeUtilization = ((double)datanode.getDfsUsed())
/ datanode.getCapacity();
if (Dispatcher.Util.isExcluded(p.getExcludedNodes(), datanode)) {
assertTrue(nodeUtilization == 0);
actualExcludedNodeCount++;
continue;
}
if (!Dispatcher.Util.isIncluded(p.getIncludedNodes(), datanode)) {
assertTrue(nodeUtilization == 0);
actualExcludedNodeCount++;
continue;
}
if (Math.abs(avgUtilization - nodeUtilization) > BALANCE_ALLOWED_VARIANCE) {
balanced = false;
if (Time.monotonicNow() > failtime) {
throw new TimeoutException(
"Rebalancing expected avg utilization to become "
+ avgUtilization + ", but on datanode " + datanode
+ " it remains at " + nodeUtilization
+ " after more than " + TIMEOUT + " msec.");
}
try {
Thread.sleep(100);
} catch (InterruptedException ignored) {
}
break;
}
}
assertEquals(expectedExcludedNodes,actualExcludedNodeCount);
} while (!balanced);
}
String long2String(long[] array) {
if (array.length == 0) {
return "<empty>";
}
StringBuilder b = new StringBuilder("[").append(array[0]);
for(int i = 1; i < array.length; i++) {
b.append(", ").append(array[i]);
}
return b.append("]").toString();
}
/**
* Class which contains information about the
* new nodes to be added to the cluster for balancing.
*/
static abstract class NewNodeInfo {
Set<String> nodesToBeExcluded = new HashSet<String>();
Set<String> nodesToBeIncluded = new HashSet<String>();
abstract String[] getNames();
abstract int getNumberofNewNodes();
abstract int getNumberofIncludeNodes();
abstract int getNumberofExcludeNodes();
public Set<String> getNodesToBeIncluded() {
return nodesToBeIncluded;
}
public Set<String> getNodesToBeExcluded() {
return nodesToBeExcluded;
}
}
/**
* The host names of new nodes are specified
*/
static class HostNameBasedNodes extends NewNodeInfo {
String[] hostnames;
public HostNameBasedNodes(String[] hostnames,
Set<String> nodesToBeExcluded, Set<String> nodesToBeIncluded) {
this.hostnames = hostnames;
this.nodesToBeExcluded = nodesToBeExcluded;
this.nodesToBeIncluded = nodesToBeIncluded;
}
@Override
String[] getNames() {
return hostnames;
}
@Override
int getNumberofNewNodes() {
return hostnames.length;
}
@Override
int getNumberofIncludeNodes() {
return nodesToBeIncluded.size();
}
@Override
int getNumberofExcludeNodes() {
return nodesToBeExcluded.size();
}
}
/**
* The number of data nodes to be started are specified.
* The data nodes will have same host name, but different port numbers.
*
*/
static class PortNumberBasedNodes extends NewNodeInfo {
int newNodes;
int excludeNodes;
int includeNodes;
public PortNumberBasedNodes(int newNodes, int excludeNodes, int includeNodes) {
this.newNodes = newNodes;
this.excludeNodes = excludeNodes;
this.includeNodes = includeNodes;
}
@Override
String[] getNames() {
return null;
}
@Override
int getNumberofNewNodes() {
return newNodes;
}
@Override
int getNumberofIncludeNodes() {
return includeNodes;
}
@Override
int getNumberofExcludeNodes() {
return excludeNodes;
}
}
private void doTest(Configuration conf, long[] capacities, String[] racks,
long newCapacity, String newRack, boolean useTool) throws Exception {
doTest(conf, capacities, racks, newCapacity, newRack, null, useTool, false);
}
/** This test start a cluster with specified number of nodes,
* and fills it to be 30% full (with a single file replicated identically
* to all datanodes);
* It then adds one new empty node and starts balancing.
*
* @param conf - configuration
* @param capacities - array of capacities of original nodes in cluster
* @param racks - array of racks for original nodes in cluster
* @param newCapacity - new node's capacity
* @param newRack - new node's rack
* @param nodes - information about new nodes to be started.
* @param useTool - if true run test via Cli with command-line argument
* parsing, etc. Otherwise invoke balancer API directly.
* @param useFile - if true, the hosts to included or excluded will be stored in a
* file and then later read from the file.
* @throws Exception
*/
private void doTest(Configuration conf, long[] capacities,
String[] racks, long newCapacity, String newRack, NewNodeInfo nodes,
boolean useTool, boolean useFile) throws Exception {
LOG.info("capacities = " + long2String(capacities));
LOG.info("racks = " + Arrays.asList(racks));
LOG.info("newCapacity= " + newCapacity);
LOG.info("newRack = " + newRack);
LOG.info("useTool = " + useTool);
assertEquals(capacities.length, racks.length);
int numOfDatanodes = capacities.length;
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(capacities.length)
.racks(racks)
.simulatedCapacities(capacities)
.build();
try {
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
long totalCapacity = sum(capacities);
// fill up the cluster to be 30% full
long totalUsedSpace = totalCapacity*3/10;
createFile(cluster, filePath, totalUsedSpace / numOfDatanodes,
(short) numOfDatanodes, 0);
if (nodes == null) { // there is no specification of new nodes.
// start up an empty node with the same capacity and on the same rack
cluster.startDataNodes(conf, 1, true, null,
new String[]{newRack}, null,new long[]{newCapacity});
totalCapacity += newCapacity;
} else {
//if running a test with "include list", include original nodes as well
if (nodes.getNumberofIncludeNodes()>0) {
for (DataNode dn: cluster.getDataNodes())
nodes.getNodesToBeIncluded().add(dn.getDatanodeId().getHostName());
}
String[] newRacks = new String[nodes.getNumberofNewNodes()];
long[] newCapacities = new long[nodes.getNumberofNewNodes()];
for (int i=0; i < nodes.getNumberofNewNodes(); i++) {
newRacks[i] = newRack;
newCapacities[i] = newCapacity;
}
// if host names are specified for the new nodes to be created.
if (nodes.getNames() != null) {
cluster.startDataNodes(conf, nodes.getNumberofNewNodes(), true, null,
newRacks, nodes.getNames(), newCapacities);
totalCapacity += newCapacity*nodes.getNumberofNewNodes();
} else { // host names are not specified
cluster.startDataNodes(conf, nodes.getNumberofNewNodes(), true, null,
newRacks, null, newCapacities);
totalCapacity += newCapacity*nodes.getNumberofNewNodes();
//populate the include nodes
if (nodes.getNumberofIncludeNodes() > 0) {
int totalNodes = cluster.getDataNodes().size();
for (int i=0; i < nodes.getNumberofIncludeNodes(); i++) {
nodes.getNodesToBeIncluded().add (cluster.getDataNodes().get(
totalNodes-1-i).getDatanodeId().getXferAddr());
}
}
//polulate the exclude nodes
if (nodes.getNumberofExcludeNodes() > 0) {
int totalNodes = cluster.getDataNodes().size();
for (int i=0; i < nodes.getNumberofExcludeNodes(); i++) {
nodes.getNodesToBeExcluded().add (cluster.getDataNodes().get(
totalNodes-1-i).getDatanodeId().getXferAddr());
}
}
}
}
// run balancer and validate results
BalancerParameters.Builder pBuilder =
new BalancerParameters.Builder();
if (nodes != null) {
pBuilder.setExcludedNodes(nodes.getNodesToBeExcluded());
pBuilder.setIncludedNodes(nodes.getNodesToBeIncluded());
pBuilder.setRunDuringUpgrade(false);
}
BalancerParameters p = pBuilder.build();
int expectedExcludedNodes = 0;
if (nodes != null) {
if (!nodes.getNodesToBeExcluded().isEmpty()) {
expectedExcludedNodes = nodes.getNodesToBeExcluded().size();
} else if (!nodes.getNodesToBeIncluded().isEmpty()) {
expectedExcludedNodes =
cluster.getDataNodes().size() - nodes.getNodesToBeIncluded().size();
}
}
// run balancer and validate results
if (useTool) {
runBalancerCli(conf, totalUsedSpace, totalCapacity, p, useFile, expectedExcludedNodes);
} else {
runBalancer(conf, totalUsedSpace, totalCapacity, p, expectedExcludedNodes);
}
} finally {
cluster.shutdown();
}
}
private void runBalancer(Configuration conf, long totalUsedSpace,
long totalCapacity) throws Exception {
runBalancer(conf, totalUsedSpace, totalCapacity,
BalancerParameters.DEFAULT, 0);
}
private void runBalancer(Configuration conf, long totalUsedSpace,
long totalCapacity, BalancerParameters p, int excludedNodes)
throws Exception {
waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
// start rebalancing
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
final int r = runBalancer(namenodes, p, conf);
if (conf.getInt(DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY,
DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_DEFAULT) ==0) {
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
return;
} else {
assertEquals(ExitStatus.SUCCESS.getExitCode(), r);
}
waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
LOG.info(" .");
waitForBalancer(totalUsedSpace, totalCapacity, client, cluster, p, excludedNodes);
}
private static int runBalancer(Collection<URI> namenodes,
final BalancerParameters p,
Configuration conf) throws IOException, InterruptedException {
final long sleeptime =
conf.getLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY,
DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_DEFAULT) * 2000 +
conf.getLong(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY,
DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_DEFAULT) * 1000;
LOG.info("namenodes = " + namenodes);
LOG.info("parameters = " + p);
LOG.info("Print stack trace", new Throwable());
System.out.println("Time Stamp Iteration# Bytes Already Moved Bytes Left To Move Bytes Being Moved");
List<NameNodeConnector> connectors = Collections.emptyList();
try {
connectors = NameNodeConnector.newNameNodeConnectors(namenodes,
Balancer.class.getSimpleName(), Balancer.BALANCER_ID_PATH, conf,
BalancerParameters.DEFAULT.getMaxIdleIteration());
boolean done = false;
for(int iteration = 0; !done; iteration++) {
done = true;
Collections.shuffle(connectors);
for(NameNodeConnector nnc : connectors) {
final Balancer b = new Balancer(nnc, p, conf);
final Result r = b.runOneIteration();
r.print(iteration, System.out);
// clean all lists
b.resetData(conf);
if (r.exitStatus == ExitStatus.IN_PROGRESS) {
done = false;
} else if (r.exitStatus != ExitStatus.SUCCESS) {
//must be an error statue, return.
return r.exitStatus.getExitCode();
} else {
if (iteration > 0) {
assertTrue(r.bytesAlreadyMoved > 0);
}
}
}
if (!done) {
Thread.sleep(sleeptime);
}
}
} finally {
for(NameNodeConnector nnc : connectors) {
IOUtils.cleanup(LOG, nnc);
}
}
return ExitStatus.SUCCESS.getExitCode();
}
private void runBalancerCli(Configuration conf, long totalUsedSpace,
long totalCapacity, BalancerParameters p, boolean useFile,
int expectedExcludedNodes) throws Exception {
waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
List <String> args = new ArrayList<String>();
args.add("-policy");
args.add("datanode");
File excludeHostsFile = null;
if (!p.getExcludedNodes().isEmpty()) {
args.add("-exclude");
if (useFile) {
excludeHostsFile = new File ("exclude-hosts-file");
PrintWriter pw = new PrintWriter(excludeHostsFile);
for (String host : p.getExcludedNodes()) {
pw.write( host + "\n");
}
pw.close();
args.add("-f");
args.add("exclude-hosts-file");
} else {
args.add(StringUtils.join(p.getExcludedNodes(), ','));
}
}
File includeHostsFile = null;
if (!p.getIncludedNodes().isEmpty()) {
args.add("-include");
if (useFile) {
includeHostsFile = new File ("include-hosts-file");
PrintWriter pw = new PrintWriter(includeHostsFile);
for (String host : p.getIncludedNodes()) {
pw.write( host + "\n");
}
pw.close();
args.add("-f");
args.add("include-hosts-file");
} else {
args.add(StringUtils.join(p.getIncludedNodes(), ','));
}
}
final Tool tool = new Cli();
tool.setConf(conf);
final int r = tool.run(args.toArray(new String[0])); // start rebalancing
assertEquals("Tools should exit 0 on success", 0, r);
waitForHeartBeat(totalUsedSpace, totalCapacity, client, cluster);
LOG.info("Rebalancing with default ctor.");
waitForBalancer(totalUsedSpace, totalCapacity, client, cluster, p, expectedExcludedNodes);
if (excludeHostsFile != null && excludeHostsFile.exists()) {
excludeHostsFile.delete();
}
if (includeHostsFile != null && includeHostsFile.exists()) {
includeHostsFile.delete();
}
}
/** one-node cluster test*/
private void oneNodeTest(Configuration conf, boolean useTool) throws Exception {
// add an empty node with half of the CAPACITY & the same rack
doTest(conf, new long[]{CAPACITY}, new String[]{RACK0}, CAPACITY/2,
RACK0, useTool);
}
/** two-node cluster test */
private void twoNodeTest(Configuration conf) throws Exception {
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, false);
}
/** test using a user-supplied conf */
public void integrationTest(Configuration conf) throws Exception {
initConf(conf);
oneNodeTest(conf, false);
}
@Test(timeout = 100000)
public void testUnknownDatanodeSimple() throws Exception {
Configuration conf = new HdfsConfiguration();
initConf(conf);
testUnknownDatanode(conf);
}
/* we first start a cluster and fill the cluster up to a certain size.
* then redistribute blocks according the required distribution.
* Then we start an empty datanode.
* Afterwards a balancer is run to balance the cluster.
* A partially filled datanode is excluded during balancing.
* This triggers a situation where one of the block's location is unknown.
*/
private void testUnknownDatanode(Configuration conf)
throws IOException, InterruptedException, TimeoutException {
long distribution[] = new long[] {50*CAPACITY/100, 70*CAPACITY/100, 0*CAPACITY/100};
long capacities[] = new long[]{CAPACITY, CAPACITY, CAPACITY};
String racks[] = new String[] {RACK0, RACK1, RACK1};
int numDatanodes = distribution.length;
if (capacities.length != numDatanodes || racks.length != numDatanodes) {
throw new IllegalArgumentException("Array length is not the same");
}
// calculate total space that need to be filled
final long totalUsedSpace = sum(distribution);
// fill the cluster
ExtendedBlock[] blocks = generateBlocks(conf, totalUsedSpace,
(short) numDatanodes);
// redistribute blocks
Block[][] blocksDN = distributeBlocks(
blocks, (short)(numDatanodes-1), distribution);
// restart the cluster: do NOT format the cluster
conf.set(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, "0.0f");
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDatanodes)
.format(false)
.racks(racks)
.simulatedCapacities(capacities)
.build();
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
for(int i = 0; i < 3; i++) {
cluster.injectBlocks(i, Arrays.asList(blocksDN[i]), null);
}
cluster.startDataNodes(conf, 1, true, null,
new String[]{RACK0}, null,new long[]{CAPACITY});
cluster.triggerHeartbeats();
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
Set<String> datanodes = new HashSet<String>();
datanodes.add(cluster.getDataNodes().get(0).getDatanodeId().getHostName());
BalancerParameters.Builder pBuilder =
new BalancerParameters.Builder();
pBuilder.setExcludedNodes(datanodes);
pBuilder.setRunDuringUpgrade(false);
final int r = Balancer.run(namenodes, pBuilder.build(), conf);
assertEquals(ExitStatus.SUCCESS.getExitCode(), r);
}
/**
* Test parse method in Balancer#Cli class with threshold value out of
* boundaries.
*/
@Test(timeout=100000)
public void testBalancerCliParseWithThresholdOutOfBoundaries() {
String parameters[] = new String[] { "-threshold", "0" };
String reason = "IllegalArgumentException is expected when threshold value"
+ " is out of boundary.";
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
assertEquals("Number out of range: threshold = 0.0", e.getMessage());
}
parameters = new String[] { "-threshold", "101" };
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
assertEquals("Number out of range: threshold = 101.0", e.getMessage());
}
}
/** Test a cluster with even distribution,
* then a new empty node is added to the cluster*/
@Test(timeout=100000)
public void testBalancer0() throws Exception {
testBalancer0Internal(new HdfsConfiguration());
}
void testBalancer0Internal(Configuration conf) throws Exception {
initConf(conf);
oneNodeTest(conf, false);
twoNodeTest(conf);
}
/** Test unevenly distributed cluster */
@Test(timeout=100000)
public void testBalancer1() throws Exception {
testBalancer1Internal(new HdfsConfiguration());
}
void testBalancer1Internal(Configuration conf) throws Exception {
initConf(conf);
testUnevenDistribution(conf,
new long[]{50 * CAPACITY / 100, 10 * CAPACITY / 100},
new long[]{CAPACITY, CAPACITY},
new String[]{RACK0, RACK1});
}
@Test(expected=HadoopIllegalArgumentException.class)
public void testBalancerWithZeroThreadsForMove() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY, 0);
testBalancer1Internal (conf);
}
@Test(timeout=100000)
public void testBalancerWithNonZeroThreadsForMove() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY, 8);
testBalancer1Internal(conf);
}
@Test(timeout=100000)
public void testBalancer2() throws Exception {
testBalancer2Internal(new HdfsConfiguration());
}
void testBalancer2Internal(Configuration conf) throws Exception {
initConf(conf);
testBalancerDefaultConstructor(conf, new long[]{CAPACITY, CAPACITY},
new String[]{RACK0, RACK1}, CAPACITY, RACK2);
}
private void testBalancerDefaultConstructor(Configuration conf,
long[] capacities, String[] racks, long newCapacity, String newRack)
throws Exception {
int numOfDatanodes = capacities.length;
assertEquals(numOfDatanodes, racks.length);
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(capacities.length)
.racks(racks)
.simulatedCapacities(capacities)
.build();
try {
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
long totalCapacity = sum(capacities);
// fill up the cluster to be 30% full
long totalUsedSpace = totalCapacity * 3 / 10;
createFile(cluster, filePath, totalUsedSpace / numOfDatanodes,
(short) numOfDatanodes, 0);
// start up an empty node with the same capacity and on the same rack
cluster.startDataNodes(conf, 1, true, null, new String[] { newRack },
new long[] { newCapacity });
totalCapacity += newCapacity;
// run balancer and validate results
runBalancer(conf, totalUsedSpace, totalCapacity);
} finally {
cluster.shutdown();
}
}
/**
* Test parse method in Balancer#Cli class with wrong number of params
*/
@Test(timeout=100000)
public void testBalancerCliParseWithWrongParams() {
String parameters[] = new String[] { "-threshold" };
String reason =
"IllegalArgumentException is expected when value is not specified";
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] { "-policy" };
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-threshold", "1", "-policy"};
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-threshold", "1", "-include"};
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-threshold", "1", "-exclude"};
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-include", "-f"};
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-exclude", "-f"};
try {
Balancer.Cli.parse(parameters);
fail(reason);
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-include", "testnode1", "-exclude", "testnode2"};
try {
Balancer.Cli.parse(parameters);
fail("IllegalArgumentException is expected when both -exclude and -include are specified");
} catch (IllegalArgumentException e) {
}
parameters = new String[] { "-blockpools" };
try {
Balancer.Cli.parse(parameters);
fail("IllegalArgumentException is expected when a value "
+ "is not specified for the blockpool flag");
} catch (IllegalArgumentException e) {
}
parameters = new String[] {"-source"};
try {
Balancer.Cli.parse(parameters);
fail(reason + " for -source parameter");
} catch (IllegalArgumentException ignored) {
// expected
}
}
@Test
public void testBalancerCliParseBlockpools() {
String[] parameters = new String[] { "-blockpools", "bp-1,bp-2,bp-3" };
BalancerParameters p = Balancer.Cli.parse(parameters);
assertEquals(3, p.getBlockPools().size());
parameters = new String[] { "-blockpools", "bp-1" };
p = Balancer.Cli.parse(parameters);
assertEquals(1, p.getBlockPools().size());
parameters = new String[] { "-blockpools", "bp-1,,bp-2" };
p = Balancer.Cli.parse(parameters);
assertEquals(3, p.getBlockPools().size());
parameters = new String[] { "-blockpools", "bp-1," };
p = Balancer.Cli.parse(parameters);
assertEquals(1, p.getBlockPools().size());
}
/**
* Verify balancer exits 0 on success.
*/
@Test(timeout=100000)
public void testExitZeroOnSuccess() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
oneNodeTest(conf, true);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list
*/
@Test(timeout=100000)
public void testBalancerWithExcludeList() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> excludeHosts = new HashSet<String>();
excludeHosts.add( "datanodeY");
excludeHosts.add( "datanodeZ");
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1}, CAPACITY, RACK2,
new HostNameBasedNodes(new String[] {"datanodeX", "datanodeY", "datanodeZ"},
excludeHosts, BalancerParameters.DEFAULT.getIncludedNodes()),
false, false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list
*/
@Test(timeout=100000)
public void testBalancerWithExcludeListWithPorts() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, new PortNumberBasedNodes(3, 2, 0), false, false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list
*/
@Test(timeout=100000)
public void testBalancerCliWithExcludeList() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> excludeHosts = new HashSet<String>();
excludeHosts.add( "datanodeY");
excludeHosts.add( "datanodeZ");
doTest(conf, new long[] { CAPACITY, CAPACITY },
new String[] { RACK0, RACK1 }, CAPACITY, RACK2, new HostNameBasedNodes(
new String[] { "datanodeX", "datanodeY", "datanodeZ" },
excludeHosts, BalancerParameters.DEFAULT.getIncludedNodes()), true,
false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list
*/
@Test(timeout=100000)
public void testBalancerCliWithExcludeListWithPorts() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, new PortNumberBasedNodes(3, 2, 0), true, false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list in a file
*/
@Test(timeout=100000)
public void testBalancerCliWithExcludeListInAFile() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> excludeHosts = new HashSet<String>();
excludeHosts.add( "datanodeY");
excludeHosts.add( "datanodeZ");
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1}, CAPACITY, RACK2,
new HostNameBasedNodes(new String[] {"datanodeX", "datanodeY", "datanodeZ"},
excludeHosts, BalancerParameters.DEFAULT.getIncludedNodes()), true,
true);
}
/**
* Test a cluster with even distribution,G
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the exclude list
*/
@Test(timeout=100000)
public void testBalancerCliWithExcludeListWithPortsInAFile() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, new PortNumberBasedNodes(3, 2, 0), true, true);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout=100000)
public void testBalancerWithIncludeList() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> includeHosts = new HashSet<String>();
includeHosts.add( "datanodeY");
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1}, CAPACITY, RACK2,
new HostNameBasedNodes(new String[] {"datanodeX", "datanodeY", "datanodeZ"},
BalancerParameters.DEFAULT.getExcludedNodes(), includeHosts),
false, false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout=100000)
public void testBalancerWithIncludeListWithPorts() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, new PortNumberBasedNodes(3, 0, 1), false, false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout=100000)
public void testBalancerCliWithIncludeList() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> includeHosts = new HashSet<String>();
includeHosts.add( "datanodeY");
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1}, CAPACITY, RACK2,
new HostNameBasedNodes(new String[] {"datanodeX", "datanodeY", "datanodeZ"},
BalancerParameters.DEFAULT.getExcludedNodes(), includeHosts), true,
false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout=100000)
public void testBalancerCliWithIncludeListWithPorts() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, new PortNumberBasedNodes(3, 0, 1), true, false);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout=100000)
public void testBalancerCliWithIncludeListInAFile() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
Set<String> includeHosts = new HashSet<String>();
includeHosts.add( "datanodeY");
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1}, CAPACITY, RACK2,
new HostNameBasedNodes(new String[] {"datanodeX", "datanodeY", "datanodeZ"},
BalancerParameters.DEFAULT.getExcludedNodes(), includeHosts), true,
true);
}
/**
* Test a cluster with even distribution,
* then three nodes are added to the cluster,
* runs balancer with two of the nodes in the include list
*/
@Test(timeout=100000)
public void testBalancerCliWithIncludeListWithPortsInAFile() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
doTest(conf, new long[]{CAPACITY, CAPACITY}, new String[]{RACK0, RACK1},
CAPACITY, RACK2, new PortNumberBasedNodes(3, 0, 1), true, true);
}
/*
* Test Balancer with Ram_Disk configured
* One DN has two files on RAM_DISK, other DN has no files on RAM_DISK.
* Then verify that the balancer does not migrate files on RAM_DISK across DN.
*/
@Test(timeout=300000)
public void testBalancerWithRamDisk() throws Exception {
final int SEED = 0xFADED;
final short REPL_FACT = 1;
Configuration conf = new Configuration();
final int defaultRamDiskCapacity = 10;
final long ramDiskStorageLimit =
((long) defaultRamDiskCapacity * DEFAULT_RAM_DISK_BLOCK_SIZE) +
(DEFAULT_RAM_DISK_BLOCK_SIZE - 1);
final long diskStorageLimit =
((long) defaultRamDiskCapacity * DEFAULT_RAM_DISK_BLOCK_SIZE) +
(DEFAULT_RAM_DISK_BLOCK_SIZE - 1);
initConfWithRamDisk(conf, ramDiskStorageLimit);
cluster = new MiniDFSCluster
.Builder(conf)
.numDataNodes(1)
.storageCapacities(new long[] { ramDiskStorageLimit, diskStorageLimit })
.storageTypes(new StorageType[] { RAM_DISK, DEFAULT })
.build();
cluster.waitActive();
// Create few files on RAM_DISK
final String METHOD_NAME = GenericTestUtils.getMethodName();
final Path path1 = new Path("/" + METHOD_NAME + ".01.dat");
final Path path2 = new Path("/" + METHOD_NAME + ".02.dat");
DistributedFileSystem fs = cluster.getFileSystem();
DFSClient client = fs.getClient();
DFSTestUtil.createFile(fs, path1, true,
DEFAULT_RAM_DISK_BLOCK_SIZE, 4 * DEFAULT_RAM_DISK_BLOCK_SIZE,
DEFAULT_RAM_DISK_BLOCK_SIZE, REPL_FACT, SEED, true);
DFSTestUtil.createFile(fs, path2, true,
DEFAULT_RAM_DISK_BLOCK_SIZE, 1 * DEFAULT_RAM_DISK_BLOCK_SIZE,
DEFAULT_RAM_DISK_BLOCK_SIZE, REPL_FACT, SEED, true);
// Sleep for a short time to allow the lazy writer thread to do its job
Thread.sleep(6 * 1000);
// Add another fresh DN with the same type/capacity without files on RAM_DISK
StorageType[][] storageTypes = new StorageType[][] {{RAM_DISK, DEFAULT}};
long[][] storageCapacities = new long[][]{{ramDiskStorageLimit,
diskStorageLimit}};
cluster.startDataNodes(conf, REPL_FACT, storageTypes, true, null,
null, null, storageCapacities, null, false, false, false, null);
cluster.triggerHeartbeats();
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
// Run Balancer
final BalancerParameters p = BalancerParameters.DEFAULT;
final int r = Balancer.run(namenodes, p, conf);
// Validate no RAM_DISK block should be moved
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
// Verify files are still on RAM_DISK
DFSTestUtil.verifyFileReplicasOnStorageType(fs, client, path1, RAM_DISK);
DFSTestUtil.verifyFileReplicasOnStorageType(fs, client, path2, RAM_DISK);
}
/**
* Check that the balancer exits when there is an unfinalized upgrade.
*/
@Test(timeout=300000)
public void testBalancerDuringUpgrade() throws Exception {
final int SEED = 0xFADED;
Configuration conf = new HdfsConfiguration();
conf.setLong(DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 500);
conf.setLong(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 1);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
final int BLOCK_SIZE = 1024*1024;
cluster = new MiniDFSCluster
.Builder(conf)
.numDataNodes(1)
.storageCapacities(new long[] { BLOCK_SIZE * 10 })
.storageTypes(new StorageType[] { DEFAULT })
.storagesPerDatanode(1)
.build();
cluster.waitActive();
// Create a file on the single DN
final String METHOD_NAME = GenericTestUtils.getMethodName();
final Path path1 = new Path("/" + METHOD_NAME + ".01.dat");
DistributedFileSystem fs = cluster.getFileSystem();
DFSTestUtil.createFile(fs, path1, BLOCK_SIZE, BLOCK_SIZE * 2, BLOCK_SIZE,
(short) 1, SEED);
// Add another DN with the same capacity, cluster is now unbalanced
cluster.startDataNodes(conf, 1, true, null, null);
cluster.triggerHeartbeats();
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
// Run balancer
final BalancerParameters p = BalancerParameters.DEFAULT;
fs.setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_ENTER);
fs.rollingUpgrade(HdfsConstants.RollingUpgradeAction.PREPARE);
fs.setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_LEAVE);
// Rolling upgrade should abort the balancer
assertEquals(ExitStatus.UNFINALIZED_UPGRADE.getExitCode(),
Balancer.run(namenodes, p, conf));
// Should work with the -runDuringUpgrade flag.
BalancerParameters.Builder b =
new BalancerParameters.Builder();
b.setRunDuringUpgrade(true);
final BalancerParameters runDuringUpgrade = b.build();
assertEquals(ExitStatus.SUCCESS.getExitCode(),
Balancer.run(namenodes, runDuringUpgrade, conf));
// Finalize the rolling upgrade
fs.rollingUpgrade(HdfsConstants.RollingUpgradeAction.FINALIZE);
// Should also work after finalization.
assertEquals(ExitStatus.SUCCESS.getExitCode(),
Balancer.run(namenodes, p, conf));
}
/**
* Test special case. Two replicas belong to same block should not in same node.
* We have 2 nodes.
* We have a block in (DN0,SSD) and (DN1,DISK).
* Replica in (DN0,SSD) should not be moved to (DN1,SSD).
* Otherwise DN1 has 2 replicas.
*/
@Test(timeout=100000)
public void testTwoReplicaShouldNotInSameDN() throws Exception {
final Configuration conf = new HdfsConfiguration();
int blockSize = 5 * 1024 * 1024 ;
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
conf.setLong(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 1L);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
int numOfDatanodes =2;
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(2)
.racks(new String[]{"/default/rack0", "/default/rack0"})
.storagesPerDatanode(2)
.storageTypes(new StorageType[][]{
{StorageType.SSD, StorageType.DISK},
{StorageType.SSD, StorageType.DISK}})
.storageCapacities(new long[][]{
{100 * blockSize, 20 * blockSize},
{20 * blockSize, 100 * blockSize}})
.build();
cluster.waitActive();
//set "/bar" directory with ONE_SSD storage policy.
DistributedFileSystem fs = cluster.getFileSystem();
Path barDir = new Path("/bar");
fs.mkdir(barDir,new FsPermission((short)777));
fs.setStoragePolicy(barDir, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
// Insert 30 blocks. So (DN0,SSD) and (DN1,DISK) are about half full,
// and (DN0,SSD) and (DN1,DISK) are about 15% full.
long fileLen = 30 * blockSize;
// fooFile has ONE_SSD policy. So
// (DN0,SSD) and (DN1,DISK) have 2 replicas belong to same block.
// (DN0,DISK) and (DN1,SSD) have 2 replicas belong to same block.
Path fooFile = new Path(barDir, "foo");
createFile(cluster, fooFile, fileLen, (short) numOfDatanodes, 0);
// update space info
cluster.triggerHeartbeats();
BalancerParameters p = BalancerParameters.DEFAULT;
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
final int r = Balancer.run(namenodes, p, conf);
// Replica in (DN0,SSD) was not moved to (DN1,SSD), because (DN1,DISK)
// already has one. Otherwise DN1 will have 2 replicas.
// For same reason, no replicas were moved.
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
}
/**
* Test running many balancer simultaneously.
*
* Case-1: First balancer is running. Now, running second one should get
* "Another balancer is running. Exiting.." IOException and fail immediately
*
* Case-2: When running second balancer 'balancer.id' file exists but the
* lease doesn't exists. Now, the second balancer should run successfully.
*/
@Test(timeout = 100000)
public void testManyBalancerSimultaneously() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
// add an empty node with half of the capacities(4 * CAPACITY) & the same
// rack
long[] capacities = new long[] { 4 * CAPACITY };
String[] racks = new String[] { RACK0 };
long newCapacity = 2 * CAPACITY;
String newRack = RACK0;
LOG.info("capacities = " + long2String(capacities));
LOG.info("racks = " + Arrays.asList(racks));
LOG.info("newCapacity= " + newCapacity);
LOG.info("newRack = " + newRack);
LOG.info("useTool = " + false);
assertEquals(capacities.length, racks.length);
int numOfDatanodes = capacities.length;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(capacities.length)
.racks(racks).simulatedCapacities(capacities).build();
cluster.waitActive();
client = NameNodeProxies.createProxy(conf,
cluster.getFileSystem(0).getUri(), ClientProtocol.class).getProxy();
long totalCapacity = sum(capacities);
// fill up the cluster to be 30% full
final long totalUsedSpace = totalCapacity * 3 / 10;
createFile(cluster, filePath, totalUsedSpace / numOfDatanodes,
(short) numOfDatanodes, 0);
// start up an empty node with the same capacity and on the same rack
cluster.startDataNodes(conf, 1, true, null, new String[] { newRack },
new long[] { newCapacity });
// Case1: Simulate first balancer by creating 'balancer.id' file. It
// will keep this file until the balancing operation is completed.
FileSystem fs = cluster.getFileSystem(0);
final FSDataOutputStream out = fs
.create(Balancer.BALANCER_ID_PATH, false);
out.writeBytes(InetAddress.getLocalHost().getHostName());
out.hflush();
assertTrue("'balancer.id' file doesn't exist!",
fs.exists(Balancer.BALANCER_ID_PATH));
// start second balancer
final String[] args = { "-policy", "datanode" };
final Tool tool = new Cli();
tool.setConf(conf);
int exitCode = tool.run(args); // start balancing
assertEquals("Exit status code mismatches",
ExitStatus.IO_EXCEPTION.getExitCode(), exitCode);
// Case2: Release lease so that another balancer would be able to
// perform balancing.
out.close();
assertTrue("'balancer.id' file doesn't exist!",
fs.exists(Balancer.BALANCER_ID_PATH));
exitCode = tool.run(args); // start balancing
assertEquals("Exit status code mismatches",
ExitStatus.SUCCESS.getExitCode(), exitCode);
}
/** Balancer should not move blocks with size < minBlockSize. */
@Test(timeout=60000)
public void testMinBlockSizeAndSourceNodes() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
final short replication = 3;
final long[] lengths = {10, 10, 10, 10};
final long[] capacities = new long[replication];
final long totalUsed = capacities.length * sum(lengths);
Arrays.fill(capacities, 1000);
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(capacities.length)
.simulatedCapacities(capacities)
.build();
final DistributedFileSystem dfs = cluster.getFileSystem();
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, dfs.getUri(),
ClientProtocol.class).getProxy();
// fill up the cluster to be 80% full
for(int i = 0; i < lengths.length; i++) {
final long size = lengths[i];
final Path p = new Path("/file" + i + "_size" + size);
try(final OutputStream out = dfs.create(p)) {
for(int j = 0; j < size; j++) {
out.write(j);
}
}
}
// start up an empty node with the same capacity
cluster.startDataNodes(conf, capacities.length, true, null, null, capacities);
LOG.info("capacities = " + Arrays.toString(capacities));
LOG.info("totalUsedSpace= " + totalUsed);
LOG.info("lengths = " + Arrays.toString(lengths) + ", #=" + lengths.length);
waitForHeartBeat(totalUsed, 2*capacities[0]*capacities.length, client, cluster);
final Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
{ // run Balancer with min-block-size=50
final BalancerParameters p = Balancer.Cli.parse(new String[] {
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1"
});
assertEquals(p.getBalancingPolicy(), BalancingPolicy.Node.INSTANCE);
assertEquals(p.getThreshold(), 1.0, 0.001);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 50);
final int r = Balancer.run(namenodes, p, conf);
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
}
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1);
{ // run Balancer with empty nodes as source nodes
final Set<String> sourceNodes = new HashSet<>();
final List<DataNode> datanodes = cluster.getDataNodes();
for(int i = capacities.length; i < datanodes.size(); i++) {
sourceNodes.add(datanodes.get(i).getDisplayName());
}
final BalancerParameters p = Balancer.Cli.parse(new String[] {
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1",
"-source", StringUtils.join(sourceNodes, ',')
});
assertEquals(p.getBalancingPolicy(), BalancingPolicy.Node.INSTANCE);
assertEquals(p.getThreshold(), 1.0, 0.001);
assertEquals(p.getSourceNodes(), sourceNodes);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 50);
final int r = Balancer.run(namenodes, p, conf);
assertEquals(ExitStatus.NO_MOVE_BLOCK.getExitCode(), r);
}
{ // run Balancer with a filled node as a source node
final Set<String> sourceNodes = new HashSet<>();
final List<DataNode> datanodes = cluster.getDataNodes();
sourceNodes.add(datanodes.get(0).getDisplayName());
final BalancerParameters p = Balancer.Cli.parse(new String[] {
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1",
"-source", StringUtils.join(sourceNodes, ',')
});
assertEquals(p.getBalancingPolicy(), BalancingPolicy.Node.INSTANCE);
assertEquals(p.getThreshold(), 1.0, 0.001);
assertEquals(p.getSourceNodes(), sourceNodes);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1);
final int r = Balancer.run(namenodes, p, conf);
assertEquals(ExitStatus.NO_MOVE_BLOCK.getExitCode(), r);
}
{ // run Balancer with all filled node as source nodes
final Set<String> sourceNodes = new HashSet<>();
final List<DataNode> datanodes = cluster.getDataNodes();
for(int i = 0; i < capacities.length; i++) {
sourceNodes.add(datanodes.get(i).getDisplayName());
}
final BalancerParameters p = Balancer.Cli.parse(new String[] {
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1",
"-source", StringUtils.join(sourceNodes, ',')
});
assertEquals(p.getBalancingPolicy(), BalancingPolicy.Node.INSTANCE);
assertEquals(p.getThreshold(), 1.0, 0.001);
assertEquals(p.getSourceNodes(), sourceNodes);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1);
final int r = Balancer.run(namenodes, p, conf);
assertEquals(ExitStatus.SUCCESS.getExitCode(), r);
}
}
public void integrationTestWithStripedFile(Configuration conf) throws Exception {
initConfWithStripe(conf);
doTestBalancerWithStripedFile(conf);
}
@Test(timeout = 100000)
public void testBalancerWithStripedFile() throws Exception {
Configuration conf = new Configuration();
initConfWithStripe(conf);
doTestBalancerWithStripedFile(conf);
}
private void doTestBalancerWithStripedFile(Configuration conf) throws Exception {
int numOfDatanodes = dataBlocks + parityBlocks + 2;
int numOfRacks = dataBlocks;
long capacity = 20 * DEFAULT_STRIPE_BLOCK_SIZE;
long[] capacities = new long[numOfDatanodes];
for (int i = 0; i < capacities.length; i++) {
capacities[i] = capacity;
}
String[] racks = new String[numOfDatanodes];
for (int i = 0; i < numOfDatanodes; i++) {
racks[i] = "/rack" + (i % numOfRacks);
}
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(numOfDatanodes)
.racks(racks)
.simulatedCapacities(capacities)
.build();
try {
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
client.setErasureCodingPolicy("/", null);
long totalCapacity = sum(capacities);
// fill up the cluster with 30% data. It'll be 45% full plus parity.
long fileLen = totalCapacity * 3 / 10;
long totalUsedSpace = fileLen * (dataBlocks + parityBlocks) / dataBlocks;
FileSystem fs = cluster.getFileSystem(0);
DFSTestUtil.createFile(fs, filePath, fileLen, (short) 3, r.nextLong());
// verify locations of striped blocks
LocatedBlocks locatedBlocks = client.getBlockLocations(fileName, 0, fileLen);
StripedFileTestUtil.verifyLocatedStripedBlocks(locatedBlocks, groupSize);
// add one datanode
String newRack = "/rack" + (++numOfRacks);
cluster.startDataNodes(conf, 1, true, null,
new String[]{newRack}, null, new long[]{capacity});
totalCapacity += capacity;
cluster.triggerHeartbeats();
// run balancer and validate results
BalancerParameters p = BalancerParameters.DEFAULT;
runBalancer(conf, totalUsedSpace, totalCapacity, p, 0);
// verify locations of striped blocks
locatedBlocks = client.getBlockLocations(fileName, 0, fileLen);
StripedFileTestUtil.verifyLocatedStripedBlocks(locatedBlocks, groupSize);
} finally {
cluster.shutdown();
}
}
/**
* Test Balancer runs fine when logging in with a keytab in kerberized env.
* Reusing testUnknownDatanode here for basic functionality testing.
*/
@Test(timeout = 300000)
public void testBalancerWithKeytabs() throws Exception {
final Configuration conf = new HdfsConfiguration();
try {
initSecureConf(conf);
final UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
principal, keytabFile.getAbsolutePath());
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
// verify that balancer runs Ok.
testUnknownDatanode(conf);
// verify that UGI was logged in using keytab.
assertTrue(UserGroupInformation.isLoginKeytabBased());
return null;
}
});
} finally {
// Reset UGI so that other tests are not affected.
UserGroupInformation.reset();
UserGroupInformation.setConfiguration(new Configuration());
}
}
/**
* @param args
*/
public static void main(String[] args) throws Exception {
TestBalancer balancerTest = new TestBalancer();
balancerTest.testBalancer0();
balancerTest.testBalancer1();
balancerTest.testBalancer2();
}
}
| HDFS-11135. The tests in TestBalancer run fails due to NPE. Contributed By Yiqun Lin
| hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java | HDFS-11135. The tests in TestBalancer run fails due to NPE. Contributed By Yiqun Lin |
|
Java | apache-2.0 | 7f3b5ea00180c406edf9fa7208a15897c03030c2 | 0 | Valkryst/Schillsaver | package view;
import javafx.scene.control.Control;
import javafx.scene.control.Tooltip;
import javafx.scene.layout.ColumnConstraints;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.Pane;
import lombok.Getter;
import java.util.Objects;
public class View {
/** The pane of the view. */
@Getter protected Pane pane;
/**
* Sets the tooltip of a JavaFX control.
*
* @param control
* The control.
*
* @param message
* The message.
*
* @throws java.lang.NullPointerException
* If the message is null.
*
* @throws java.lang.IllegalArgumentException
* If the message is empty.
*/
static void setTooltip(final Control control, final String message) {
Objects.requireNonNull(message);
if (message.isEmpty()) {
throw new IllegalArgumentException("The message cannot be empty.");
}
control.setTooltip(new Tooltip(message));
}
/**
* Creates a GridPane with one row and one column where the column
* fills 100% of the width.
*
* @return
* The pane.
*/
static GridPane getFullGridPane() {
final GridPane pane = new GridPane();
final ColumnConstraints column = new ColumnConstraints();
column.setPercentWidth(100);
pane.getColumnConstraints().addAll(column);
return pane;
}
/**
* Creates a GridPane with one row and two columns where each column
* fills 50% of the width.
*
* @return
* The pane.
*/
static GridPane getHalvedGridPane() {
final GridPane pane = new GridPane();
final ColumnConstraints column1 = new ColumnConstraints();
column1.setPercentWidth(50);
final ColumnConstraints column2 = new ColumnConstraints();
column2.setPercentWidth(50);
pane.getColumnConstraints().addAll(column1, column2);
return pane;
}
}
| src/view/View.java | package view;
import javafx.scene.control.Control;
import javafx.scene.control.Tooltip;
import javafx.scene.layout.Pane;
import lombok.Getter;
import java.util.Objects;
public class View {
/** The pane of the view. */
@Getter protected Pane pane;
/**
* Sets the tooltip of a JavaFX control.
*
* @param control
* The control.
*
* @param message
* The message.
*
* @throws java.lang.NullPointerException
* If the message is null.
*
* @throws java.lang.IllegalArgumentException
* If the message is empty.
*/
static void setTooltip(final Control control, final String message) {
Objects.requireNonNull(message);
if (message.isEmpty()) {
throw new IllegalArgumentException("The message cannot be empty.");
}
control.setTooltip(new Tooltip(message));
}
}
| Adds helper functions for creating gridpanes.
| src/view/View.java | Adds helper functions for creating gridpanes. |
|
Java | bsd-3-clause | 836659c04029291f24ea8f1e5cfbb0e9945a493e | 0 | ickStream/ickstream-java-common,bluegaspode/ickstream-java-common | /*
* Copyright (c) 2013-2014, ickStream GmbH
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of ickStream nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.ickstream.protocol.oauth;
import com.ickstream.protocol.backend.common.RestCallHelper;
import com.ickstream.protocol.backend.common.ServiceCredentials;
import com.ickstream.protocol.backend.common.UnauthorizedAccessException;
import com.ickstream.protocol.common.exception.UnauthorizedException;
import com.ickstream.protocol.service.corebackend.*;
import org.scribe.builder.ServiceBuilder;
import org.scribe.builder.api.Api;
import org.scribe.model.OAuthConstants;
import org.scribe.model.Token;
import org.scribe.model.Verifier;
import org.scribe.oauth.OAuth10aServiceImpl;
import org.scribe.oauth.OAuthService;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
public abstract class AbstractOAuthService extends HttpServlet {
protected abstract CoreBackendService getCoreBackendService();
protected boolean isAddService(HttpServletRequest req) {
return req.getRequestURI().endsWith("/addservice");
}
protected boolean isAddIdentity(HttpServletRequest req) {
return req.getRequestURI().endsWith("/addidentity");
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
if (req.getRequestURI().endsWith("/create_user") &&
req.getSession().getAttribute(OAuthConstants.REDIRECT_URI) != null &&
req.getSession().getAttribute(OAuthConstants.CLIENT_ID) != null &&
req.getParameter(OAuthConstants.CODE) != null) {
renderAccountCreationPage(resp, req.getRequestURI().replaceAll("/create_user$", ""), req.getParameter(OAuthConstants.CODE));
return;
}
if (req.getParameter(OAuthConstants.REDIRECT_URI) != null) {
req.getSession().setAttribute(OAuthConstants.REDIRECT_URI, req.getParameter(OAuthConstants.REDIRECT_URI));
}
if (req.getParameter(OAuthConstants.CLIENT_ID) != null) {
req.getSession().setAttribute(OAuthConstants.CLIENT_ID, req.getParameter(OAuthConstants.CLIENT_ID));
}
if (isAddService(req) && req.getParameter("user_code") != null) {
req.getSession().setAttribute("service_user_code", req.getParameter("user_code"));
} else if (isAddIdentity(req) && req.getParameter("user_code") != null) {
req.getSession().setAttribute("identity_user_code", req.getParameter("user_code"));
}
if (req.getParameter(OAuthConstants.TOKEN) == null && req.getParameter(OAuthConstants.CODE) == null && req.getParameter("error") == null) {
OAuthService service;
if (getScope() != null) {
service = new ServiceBuilder()
.provider(getApiImplementation())
.apiKey(getApiKey())
.apiSecret(getApiSecret())
.scope(getScope())
.callback(getRedirectURL())
.build();
} else {
service = new ServiceBuilder()
.provider(getApiImplementation())
.apiKey(getApiKey())
.apiSecret(getApiSecret())
.callback(getRedirectURL())
.build();
}
Token requestToken = null;
if (service instanceof OAuth10aServiceImpl) {
requestToken = service.getRequestToken();
req.getSession().setAttribute("secret", requestToken.getSecret());
}
String userAuthURL = service.getAuthorizationUrl(requestToken);
resp.sendRedirect(userAuthURL);
} else if (req.getParameter(OAuthConstants.CODE) != null || req.getParameter(OAuthConstants.TOKEN) != null) {
OAuthService oAuthService;
if (getScope() != null) {
oAuthService = new ServiceBuilder()
.provider(getApiImplementation())
.apiKey(getApiKey())
.apiSecret(getApiSecret())
.scope(getScope())
.callback(getRedirectURL())
.build();
} else {
oAuthService = new ServiceBuilder()
.provider(getApiImplementation())
.apiKey(getApiKey())
.apiSecret(getApiSecret())
.callback(getRedirectURL())
.build();
}
String tokenCode = req.getParameter(OAuthConstants.CODE);
if (tokenCode == null) {
tokenCode = req.getParameter(OAuthConstants.TOKEN);
}
String verifierCode = req.getParameter(OAuthConstants.CODE);
if (verifierCode == null) {
verifierCode = req.getParameter(OAuthConstants.VERIFIER);
}
String secret = getApiSecret();
if (oAuthService instanceof OAuth10aServiceImpl) {
secret = req.getSession().getAttribute("secret").toString();
}
Token token = new Token(tokenCode, secret);
Verifier verifier = new Verifier(verifierCode);
Token accessToken = oAuthService.getAccessToken(token, verifier);
if (req.getSession().getAttribute(OAuthConstants.REDIRECT_URI) != null && req.getSession().getAttribute(OAuthConstants.CLIENT_ID) != null) {
ApplicationResponse application = getCoreBackendService().getApplication();
if (accessToken != null && application != null && application.getActive()) {
ServiceResponse service = getCoreBackendService().getService();
String identity = getUserIdentity(oAuthService, service, accessToken);
String type = getIdentityType();
boolean linkService = false;
String identityUserCode = null;
if (req.getSession().getAttribute("service_user_code") != null) {
linkService = true;
identityUserCode = req.getSession().getAttribute("service_user_code").toString();
} else if (req.getSession().getAttribute("identity_user_code") != null) {
identityUserCode = req.getSession().getAttribute("identity_user_code").toString();
}
req.getSession().removeAttribute("service_user_code");
req.getSession().removeAttribute("identity_user_code");
if (identityUserCode != null) {
UserResponse user = null;
try {
user = getCoreBackendService().addIdentityToUser(identityUserCode, type, identity);
if (user != null && linkService) {
linkUserService(oAuthService, service, accessToken, user.getId());
}
if (user == null && !linkService) {
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?error=identity_already_used");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
} else {
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?status=success");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
} catch (Exception e) {
if (e.getCause() == null || !(e.getCause() instanceof UnauthorizedException)) {
e.printStackTrace();
}
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?error=access_denied");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
} else if (type != null && type.equals("email") && identity != null) {
ServiceCredentials serviceCredentials = getServiceCredentials(oAuthService, service, accessToken);
String code = getCoreBackendService().createAuthorizationCodeForIdentity((String) req.getSession().getAttribute(OAuthConstants.CLIENT_ID), type, identity, serviceCredentials.getServiceIdentity(), serviceCredentials.getAccessToken(), serviceCredentials.getAccessTokenSecret(), serviceCredentials.getRefreshToken(), serviceCredentials.getCustomData(), req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString());
UserResponse user = getCoreBackendService().getUserByIdentity(type, identity);
if (user == null) {
req.getSession().setAttribute("generated_code", code);
renderAccountCreationPage(resp, req.getRequestURI(), code);
} else {
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?" + OAuthConstants.CODE + "=" + code);
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
} else {
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?error=unsupported_response_type");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
} else if (application == null) {
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?error=unauthorized_client");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
} else if (accessToken == null) {
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?error=access_denied");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
} else {
writeOutputWithoutRedirectURI(req, resp, oAuthService, accessToken);
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
} else {
resp.setContentType("text/html");
resp.getWriter().append("ERROR");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
}
private void renderAccountCreationPage(HttpServletResponse resp, String uri, String code) throws IOException {
resp.setContentType("text/html");
resp.setCharacterEncoding("utf-8");
resp.getWriter().append("<HTML>\n<HEAD>\n")
.append(OAuthServiceHelper.getDeviceOptimizedStylesheetHeader())
.append("</HEAD>\n<BODY>\n");
resp.getWriter().append("<div id=\"question\">No previous account exist, do you want to create a new one ?</div>");
resp.getWriter().append("<div id=\"termsofuseheader\">Terms of Use:</div>");
resp.getWriter().append("<textarea id=\"termsofuse\" readonly=\"readonly\" style=\"resize: none;\" data-role=\"none\">");
String termsOfUseUrl = System.getProperty("ickstream-core-termsofuse-url", "https://api.ickstream.com/termsofuse.txt");
String termsOfUse = RestCallHelper.callResourceWithGet(termsOfUseUrl, null);
resp.getWriter().append(termsOfUse);
resp.getWriter().append("</textarea>");
resp.getWriter().append("<FORM action=\"").append(uri).append("\" method=\"post\">");
resp.getWriter().append("<INPUT type=\"hidden\" name=\"" + OAuthConstants.CODE + "\" value=\"").append(code).append("\">");
resp.getWriter().append("<INPUT type=\"checkbox\" name=\"termsofuse_confirmation\" value=\"1\">I agree with Terms of Use</INPUT><BR>");
resp.getWriter().append("<INPUT type=\"submit\" name=\"cancel\" value=\"Cancel\">");
resp.getWriter().append("<INPUT type=\"submit\" name=\"create\" value=\"Create account\">");
resp.getWriter().append("</FORM>");
resp.getWriter().append("</BODY></HTML>");
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
if (req.getSession().getAttribute(OAuthConstants.REDIRECT_URI) != null) {
String redirect_uri = req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString();
String code = req.getParameter(OAuthConstants.CODE);
Boolean create = req.getParameter("create") != null;
if (code != null && create != null && create) {
if (req.getParameter("termsofuse_confirmation") != null && req.getParameter("termsofuse_confirmation").equals("1")) {
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
resp.sendRedirect(redirect_uri + "?" + OAuthConstants.CODE + "=" + code);
} else {
resp.sendRedirect(req.getRequestURI() + "/create_user?" + OAuthConstants.CODE + "=" + code);
}
} else {
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
resp.sendRedirect(redirect_uri + "?error=access_denied");
}
}
}
protected void writeOutputWithoutRedirectURI(HttpServletRequest req, HttpServletResponse resp, OAuthService oAuthService, Token accessToken) throws IOException {
resp.setContentType("text/html");
resp.setCharacterEncoding("utf-8");
resp.getWriter().append("<HTML>");
resp.getWriter().append("<BODY>Welcome to ickStream\nThis is an internal page which you are not supposed to enter\n");
resp.getWriter().append("</BODY>");
resp.getWriter().append("</HTML>");
}
protected String getRedirectURL() {
AuthenticationProviderResponse provider = getCoreBackendService().getAuthenticationProvider();
if (provider != null) {
return provider.getRedirectUrl();
}
return null;
}
protected String getApiSecret() {
AuthenticationProviderResponse provider = getCoreBackendService().getAuthenticationProvider();
if (provider != null && provider.getApiSecret() != null && provider.getApiSecret().trim().length() > 0) {
return provider.getApiSecret();
}
return null;
}
protected String getApiKey() {
AuthenticationProviderResponse provider = getCoreBackendService().getAuthenticationProvider();
if (provider != null && provider.getApiKey() != null && provider.getApiKey().trim().length() > 0) {
return provider.getApiKey();
}
return null;
}
protected void refreshAccessToken(UserServiceResponse userService) throws UnauthorizedAccessException {
if (userService.getRefreshToken() == null) {
throw new IllegalArgumentException("user service must have refresh token");
}
OAuthService oAuthService;
if (getScope() != null) {
oAuthService = new ServiceBuilder()
.provider(getApiImplementation())
.apiKey(getApiKey())
.apiSecret(getApiSecret())
.scope(getScope())
.callback(getRedirectURL())
.build();
} else {
oAuthService = new ServiceBuilder()
.provider(getApiImplementation())
.apiKey(getApiKey())
.apiSecret(getApiSecret())
.callback(getRedirectURL())
.build();
}
if (oAuthService instanceof OAuth20ServiceImpl) {
Token token = new Token(userService.getRefreshToken(), null);
Token accessToken = ((OAuth20ServiceImpl) oAuthService).refreshAccessToken(token);
ApplicationResponse application = getCoreBackendService().getApplication();
if (accessToken != null && application != null && application.getActive()) {
UserServiceRequest updatedUserService = new UserServiceRequest(userService);
setUserService(updatedUserService, accessToken);
userService.setAccessToken(updatedUserService.getAccessToken());
userService.setAccessTokenSecret(updatedUserService.getAccessTokenSecret());
userService.setRefreshToken(updatedUserService.getRefreshToken());
} else {
throw new UnauthorizedAccessException();
}
} else {
throw new RuntimeException("OAuth service must support refresh tokens");
}
}
protected abstract String getScope();
protected abstract Class<? extends Api> getApiImplementation();
protected abstract String getIdentityType();
protected abstract String getUserIdentity(OAuthService oAuthService, ServiceResponse service, Token accessToken);
protected String getServiceIdentity(OAuthService oAuthService, ServiceResponse service, Token accessToken) {
return getUserIdentity(oAuthService, service, accessToken);
}
protected void linkUserService(OAuthService oAuthService, ServiceResponse service, Token accessToken, String userId) {
String identity = getServiceIdentity(oAuthService, service, accessToken);
UserServiceResponse userService = getCoreBackendService().getUserServiceByUser(userId);
if (userService == null) {
userService = new UserServiceResponse();
userService.setUserId(userId);
}
UserServiceRequest updatedUserService = new UserServiceRequest(userService);
updatedUserService.setIdentity(identity);
setUserService(updatedUserService, accessToken);
}
protected void setUserService(UserServiceRequest userService, Token accessToken) {
userService.setAccessToken(accessToken.getToken());
userService.setAccessTokenSecret(accessToken.getSecret());
if (accessToken instanceof TokenWithRefresh) {
userService.setRefreshToken(((TokenWithRefresh) accessToken).getRefreshToken());
}
getCoreBackendService().setUserService(userService);
}
protected ServiceCredentials getServiceCredentials(OAuthService oAuthService, ServiceResponse service, Token accessToken) {
ServiceCredentials serviceCredentials = new ServiceCredentials();
serviceCredentials.setAccessToken(accessToken.getToken());
serviceCredentials.setAccessTokenSecret(accessToken.getSecret());
if (accessToken instanceof TokenWithRefresh) {
serviceCredentials.setRefreshToken(((TokenWithRefresh) accessToken).getRefreshToken());
}
String serviceIdentity = getServiceIdentity(oAuthService, service, accessToken);
serviceCredentials.setServiceIdentity(serviceIdentity);
return serviceCredentials;
}
}
| ickbackendprotocol/oauth/src/main/java/com/ickstream/protocol/oauth/AbstractOAuthService.java | /*
* Copyright (c) 2013-2014, ickStream GmbH
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of ickStream nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.ickstream.protocol.oauth;
import com.ickstream.protocol.backend.common.ServiceCredentials;
import com.ickstream.protocol.backend.common.UnauthorizedAccessException;
import com.ickstream.protocol.common.exception.UnauthorizedException;
import com.ickstream.protocol.service.corebackend.*;
import org.scribe.builder.ServiceBuilder;
import org.scribe.builder.api.Api;
import org.scribe.model.OAuthConstants;
import org.scribe.model.Token;
import org.scribe.model.Verifier;
import org.scribe.oauth.OAuth10aServiceImpl;
import org.scribe.oauth.OAuthService;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
public abstract class AbstractOAuthService extends HttpServlet {
protected abstract CoreBackendService getCoreBackendService();
protected boolean isAddService(HttpServletRequest req) {
return req.getRequestURI().endsWith("/addservice");
}
protected boolean isAddIdentity(HttpServletRequest req) {
return req.getRequestURI().endsWith("/addidentity");
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
if (req.getParameter(OAuthConstants.REDIRECT_URI) != null) {
req.getSession().setAttribute(OAuthConstants.REDIRECT_URI, req.getParameter(OAuthConstants.REDIRECT_URI));
}
if (req.getParameter(OAuthConstants.CLIENT_ID) != null) {
req.getSession().setAttribute(OAuthConstants.CLIENT_ID, req.getParameter(OAuthConstants.CLIENT_ID));
}
if (isAddService(req) && req.getParameter("user_code") != null) {
req.getSession().setAttribute("service_user_code", req.getParameter("user_code"));
} else if (isAddIdentity(req) && req.getParameter("user_code") != null) {
req.getSession().setAttribute("identity_user_code", req.getParameter("user_code"));
}
if (req.getParameter(OAuthConstants.TOKEN) == null && req.getParameter(OAuthConstants.CODE) == null && req.getParameter("error") == null) {
OAuthService service;
if (getScope() != null) {
service = new ServiceBuilder()
.provider(getApiImplementation())
.apiKey(getApiKey())
.apiSecret(getApiSecret())
.scope(getScope())
.callback(getRedirectURL())
.build();
} else {
service = new ServiceBuilder()
.provider(getApiImplementation())
.apiKey(getApiKey())
.apiSecret(getApiSecret())
.callback(getRedirectURL())
.build();
}
Token requestToken = null;
if (service instanceof OAuth10aServiceImpl) {
requestToken = service.getRequestToken();
req.getSession().setAttribute("secret", requestToken.getSecret());
}
String userAuthURL = service.getAuthorizationUrl(requestToken);
resp.sendRedirect(userAuthURL);
} else if (req.getParameter(OAuthConstants.CODE) != null || req.getParameter(OAuthConstants.TOKEN) != null) {
OAuthService oAuthService;
if (getScope() != null) {
oAuthService = new ServiceBuilder()
.provider(getApiImplementation())
.apiKey(getApiKey())
.apiSecret(getApiSecret())
.scope(getScope())
.callback(getRedirectURL())
.build();
} else {
oAuthService = new ServiceBuilder()
.provider(getApiImplementation())
.apiKey(getApiKey())
.apiSecret(getApiSecret())
.callback(getRedirectURL())
.build();
}
String tokenCode = req.getParameter(OAuthConstants.CODE);
if (tokenCode == null) {
tokenCode = req.getParameter(OAuthConstants.TOKEN);
}
String verifierCode = req.getParameter(OAuthConstants.CODE);
if (verifierCode == null) {
verifierCode = req.getParameter(OAuthConstants.VERIFIER);
}
String secret = getApiSecret();
if (oAuthService instanceof OAuth10aServiceImpl) {
secret = req.getSession().getAttribute("secret").toString();
}
Token token = new Token(tokenCode, secret);
Verifier verifier = new Verifier(verifierCode);
Token accessToken = oAuthService.getAccessToken(token, verifier);
if (req.getSession().getAttribute(OAuthConstants.REDIRECT_URI) != null && req.getSession().getAttribute(OAuthConstants.CLIENT_ID) != null) {
ApplicationResponse application = getCoreBackendService().getApplication();
if (accessToken != null && application != null && application.getActive()) {
ServiceResponse service = getCoreBackendService().getService();
String identity = getUserIdentity(oAuthService, service, accessToken);
String type = getIdentityType();
boolean linkService = false;
String identityUserCode = null;
if (req.getSession().getAttribute("service_user_code") != null) {
linkService = true;
identityUserCode = req.getSession().getAttribute("service_user_code").toString();
} else if (req.getSession().getAttribute("identity_user_code") != null) {
identityUserCode = req.getSession().getAttribute("identity_user_code").toString();
}
req.getSession().removeAttribute("service_user_code");
req.getSession().removeAttribute("identity_user_code");
if (identityUserCode != null) {
UserResponse user = null;
try {
user = getCoreBackendService().addIdentityToUser(identityUserCode, type, identity);
if (user != null && linkService) {
linkUserService(oAuthService, service, accessToken, user.getId());
}
if (user == null && !linkService) {
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?error=identity_already_used");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
} else {
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?status=success");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
} catch (Exception e) {
if (e.getCause() == null || !(e.getCause() instanceof UnauthorizedException)) {
e.printStackTrace();
}
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?error=access_denied");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
} else if (type != null && type.equals("email") && identity != null) {
ServiceCredentials serviceCredentials = getServiceCredentials(oAuthService, service, accessToken);
String code = getCoreBackendService().createAuthorizationCodeForIdentity((String) req.getSession().getAttribute(OAuthConstants.CLIENT_ID), type, identity, serviceCredentials.getServiceIdentity(), serviceCredentials.getAccessToken(), serviceCredentials.getAccessTokenSecret(), serviceCredentials.getRefreshToken(), serviceCredentials.getCustomData(), req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString());
UserResponse user = getCoreBackendService().getUserByIdentity(type, identity);
if (user == null) {
String styleSheetUrl = System.getProperty("ickstream-core-stylesheet-url", "https://api.ickstream.com/ickstream-cloud-core");
resp.setContentType("text/html");
resp.setCharacterEncoding("utf-8");
resp.getWriter().append("<HTML>\n<HEAD>\n")
.append(OAuthServiceHelper.getDeviceOptimizedStylesheetHeader())
.append("</HEAD>\n<BODY>\n");
resp.getWriter().append("<div id=\"question\">No previous account exist, do you want to create a new one ?</div>");
resp.getWriter().append("<FORM action=\"").append(req.getRequestURI()).append("\" method=\"post\">");
resp.getWriter().append("<INPUT hidden=\"hidden\" name=\"" + OAuthConstants.CODE + "\" value=\"").append(code).append("\">");
resp.getWriter().append("<INPUT type=\"submit\" name=\"cancel\" value=\"Cancel\">");
resp.getWriter().append("<INPUT type=\"submit\" name=\"create\" value=\"Create new account\">");
resp.getWriter().append("</FORM>");
resp.getWriter().append("</BODY></HTML>");
} else {
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?" + OAuthConstants.CODE + "=" + code);
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
} else {
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?error=unsupported_response_type");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
} else if (application == null) {
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?error=unauthorized_client");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
} else if (accessToken == null) {
resp.sendRedirect(req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString() + "?error=access_denied");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
} else {
writeOutputWithoutRedirectURI(req, resp, oAuthService, accessToken);
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
} else {
resp.setContentType("text/html");
resp.getWriter().append("ERROR");
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
}
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
if (req.getSession().getAttribute(OAuthConstants.REDIRECT_URI) != null) {
String redirect_uri = req.getSession().getAttribute(OAuthConstants.REDIRECT_URI).toString();
req.getSession().removeAttribute(OAuthConstants.REDIRECT_URI);
req.getSession().removeAttribute(OAuthConstants.CLIENT_ID);
String code = req.getParameter(OAuthConstants.CODE);
Boolean create = req.getParameter("create") != null;
if (code != null && create != null && create) {
resp.sendRedirect(redirect_uri + "?" + OAuthConstants.CODE + "=" + code);
} else {
resp.sendRedirect(redirect_uri + "?error=access_denied");
}
}
}
protected void writeOutputWithoutRedirectURI(HttpServletRequest req, HttpServletResponse resp, OAuthService oAuthService, Token accessToken) throws IOException {
resp.setContentType("text/html");
resp.setCharacterEncoding("utf-8");
resp.getWriter().append("<HTML>");
resp.getWriter().append("<BODY>Welcome to ickStream\nThis is an internal page which you are not supposed to enter\n");
resp.getWriter().append("</BODY>");
resp.getWriter().append("</HTML>");
}
protected String getRedirectURL() {
AuthenticationProviderResponse provider = getCoreBackendService().getAuthenticationProvider();
if (provider != null) {
return provider.getRedirectUrl();
}
return null;
}
protected String getApiSecret() {
AuthenticationProviderResponse provider = getCoreBackendService().getAuthenticationProvider();
if (provider != null && provider.getApiSecret() != null && provider.getApiSecret().trim().length() > 0) {
return provider.getApiSecret();
}
return null;
}
protected String getApiKey() {
AuthenticationProviderResponse provider = getCoreBackendService().getAuthenticationProvider();
if (provider != null && provider.getApiKey() != null && provider.getApiKey().trim().length() > 0) {
return provider.getApiKey();
}
return null;
}
protected void refreshAccessToken(UserServiceResponse userService) throws UnauthorizedAccessException {
if (userService.getRefreshToken() == null) {
throw new IllegalArgumentException("user service must have refresh token");
}
OAuthService oAuthService;
if (getScope() != null) {
oAuthService = new ServiceBuilder()
.provider(getApiImplementation())
.apiKey(getApiKey())
.apiSecret(getApiSecret())
.scope(getScope())
.callback(getRedirectURL())
.build();
} else {
oAuthService = new ServiceBuilder()
.provider(getApiImplementation())
.apiKey(getApiKey())
.apiSecret(getApiSecret())
.callback(getRedirectURL())
.build();
}
if (oAuthService instanceof OAuth20ServiceImpl) {
Token token = new Token(userService.getRefreshToken(), null);
Token accessToken = ((OAuth20ServiceImpl) oAuthService).refreshAccessToken(token);
ApplicationResponse application = getCoreBackendService().getApplication();
if (accessToken != null && application != null && application.getActive()) {
UserServiceRequest updatedUserService = new UserServiceRequest(userService);
setUserService(updatedUserService, accessToken);
userService.setAccessToken(updatedUserService.getAccessToken());
userService.setAccessTokenSecret(updatedUserService.getAccessTokenSecret());
userService.setRefreshToken(updatedUserService.getRefreshToken());
} else {
throw new UnauthorizedAccessException();
}
} else {
throw new RuntimeException("OAuth service must support refresh tokens");
}
}
protected abstract String getScope();
protected abstract Class<? extends Api> getApiImplementation();
protected abstract String getIdentityType();
protected abstract String getUserIdentity(OAuthService oAuthService, ServiceResponse service, Token accessToken);
protected String getServiceIdentity(OAuthService oAuthService, ServiceResponse service, Token accessToken) {
return getUserIdentity(oAuthService, service, accessToken);
}
protected void linkUserService(OAuthService oAuthService, ServiceResponse service, Token accessToken, String userId) {
String identity = getServiceIdentity(oAuthService, service, accessToken);
UserServiceResponse userService = getCoreBackendService().getUserServiceByUser(userId);
if (userService == null) {
userService = new UserServiceResponse();
userService.setUserId(userId);
}
UserServiceRequest updatedUserService = new UserServiceRequest(userService);
updatedUserService.setIdentity(identity);
setUserService(updatedUserService, accessToken);
}
protected void setUserService(UserServiceRequest userService, Token accessToken) {
userService.setAccessToken(accessToken.getToken());
userService.setAccessTokenSecret(accessToken.getSecret());
if (accessToken instanceof TokenWithRefresh) {
userService.setRefreshToken(((TokenWithRefresh) accessToken).getRefreshToken());
}
getCoreBackendService().setUserService(userService);
}
protected ServiceCredentials getServiceCredentials(OAuthService oAuthService, ServiceResponse service, Token accessToken) {
ServiceCredentials serviceCredentials = new ServiceCredentials();
serviceCredentials.setAccessToken(accessToken.getToken());
serviceCredentials.setAccessTokenSecret(accessToken.getSecret());
if (accessToken instanceof TokenWithRefresh) {
serviceCredentials.setRefreshToken(((TokenWithRefresh) accessToken).getRefreshToken());
}
String serviceIdentity = getServiceIdentity(oAuthService, service, accessToken);
serviceCredentials.setServiceIdentity(serviceIdentity);
return serviceCredentials;
}
}
| Corrected so license has to be confirmed before user account is created
| ickbackendprotocol/oauth/src/main/java/com/ickstream/protocol/oauth/AbstractOAuthService.java | Corrected so license has to be confirmed before user account is created |
|
Java | mit | a070f8022e452e9fee85d4160114bb37993d0597 | 0 | mgainty/jsch | OpenSSHConfig.java | /* -*-mode:java; c-basic-offset:2; indent-tabs-mode:nil -*- */
/*
Copyright (c) 2013-2015 ymnk, JCraft,Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the distribution.
3. The names of the authors may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jcraft.jsch;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import java.io.FileReader;
import java.io.BufferedReader;
import java.io.IOException;
import java.util.Hashtable;
import java.util.Vector;
/**
* This class implements ConfigRepository interface, and parses
* OpenSSH's configuration file. The following keywords will be recognized,
* <ul>
* <li>Host</li>
* <li>User</li>
* <li>Hostname</li>
* <li>Port</li>
* <li>PreferredAuthentications</li>
* <li>IdentityFile</li>
* <li>NumberOfPasswordPrompts</li>
* <li>ConnectTimeout</li>
* <li>HostKeyAlias</li>
* <li>UserKnownHostsFile</li>
* <li>KexAlgorithms</li>
* <li>HostKeyAlgorithms</li>
* <li>Ciphers</li>
* <li>Macs</li>
* <li>Compression</li>
* <li>CompressionLevel</li>
* <li>ForwardAgent</li>
* <li>RequestTTY</li>
* <li>ServerAliveInterval</li>
* <li>LocalForward</li>
* <li>RemoteForward</li>
* <li>ClearAllForwardings</li>
* </ul>
*
* @see ConfigRepository
*/
public class OpenSSHConfig implements ConfigRepository {
/**
* Parses the given string, and returns an instance of ConfigRepository.
*
* @param conf string, which includes OpenSSH's config
* @return an instanceof OpenSSHConfig
*/
public static OpenSSHConfig parse(String conf) throws IOException {
Reader r = new StringReader(conf);
try {
return new OpenSSHConfig(r);
}
finally {
r.close();
}
}
/**
* Parses the given file, and returns an instance of ConfigRepository.
*
* @param file OpenSSH's config file
* @return an instanceof OpenSSHConfig
*/
public static OpenSSHConfig parseFile(String file) throws IOException {
Reader r = new FileReader(Util.checkTilde(file));
try {
return new OpenSSHConfig(r);
}
finally {
r.close();
}
}
OpenSSHConfig(Reader r) throws IOException {
_parse(r);
}
private final Hashtable config = new Hashtable();
private final Vector hosts = new Vector();
private void _parse(Reader r) throws IOException {
BufferedReader br = new BufferedReader(r);
String host = "";
Vector/*<String[]>*/ kv = new Vector();
String l = null;
while((l = br.readLine()) != null){
l = l.trim();
if(l.length() == 0 || l.startsWith("#"))
continue;
String[] key_value = l.split("[= \t]", 2);
for(int i = 0; i < key_value.length; i++)
key_value[i] = key_value[i].trim();
if(key_value.length <= 1)
continue;
if(key_value[0].equals("Host")){
config.put(host, kv);
hosts.addElement(host);
host = key_value[1];
kv = new Vector();
}
else {
kv.addElement(key_value);
}
}
config.put(host, kv);
hosts.addElement(host);
}
public Config getConfig(String host) {
return new MyConfig(host);
}
private static final Hashtable keymap = new Hashtable();
static {
keymap.put("kex", "KexAlgorithms");
keymap.put("server_host_key", "HostKeyAlgorithms");
keymap.put("cipher.c2s", "Ciphers");
keymap.put("cipher.s2c", "Ciphers");
keymap.put("mac.c2s", "Macs");
keymap.put("mac.s2c", "Macs");
keymap.put("compression.s2c", "Compression");
keymap.put("compression.c2s", "Compression");
keymap.put("compression_level", "CompressionLevel");
keymap.put("MaxAuthTries", "NumberOfPasswordPrompts");
}
class MyConfig implements Config {
private String host;
private Vector _configs = new Vector();
MyConfig(String host){
this.host = host;
_configs.addElement(config.get(""));
byte[] _host = Util.str2byte(host);
if(hosts.size() > 1){
for(int i = 1; i < hosts.size(); i++){
String patterns[] = ((String)hosts.elementAt(i)).split("[ \t]");
for(int j = 0; j < patterns.length; j++){
boolean negate = false;
String foo = patterns[j].trim();
if(foo.startsWith("!")){
negate = true;
foo = foo.substring(1).trim();
}
if(Util.glob(Util.str2byte(foo), _host)){
if(!negate){
_configs.addElement(config.get((String)hosts.elementAt(i)));
}
}
else if(negate){
_configs.addElement(config.get((String)hosts.elementAt(i)));
}
}
}
}
}
private String find(String key) {
if(keymap.get(key)!=null) {
key = (String)keymap.get(key);
}
key = key.toUpperCase();
String value = null;
for(int i = 0; i < _configs.size(); i++) {
Vector v = (Vector)_configs.elementAt(i);
for(int j = 0; j < v.size(); j++) {
String[] kv = (String[])v.elementAt(j);
if(kv[0].toUpperCase().equals(key)) {
value = kv[1];
break;
}
}
if(value != null)
break;
}
return value;
}
private String[] multiFind(String key) {
key = key.toUpperCase();
Vector value = new Vector();
for(int i = 0; i < _configs.size(); i++) {
Vector v = (Vector)_configs.elementAt(i);
for(int j = 0; j < v.size(); j++) {
String[] kv = (String[])v.elementAt(j);
if(kv[0].toUpperCase().equals(key)) {
String foo = kv[1];
if(foo != null) {
value.remove(foo);
value.addElement(foo);
}
}
}
}
String[] result = new String[value.size()];
value.toArray(result);
return result;
}
public String getHostname(){ return find("Hostname"); }
public String getUser(){ return find("User"); }
public int getPort(){
String foo = find("Port");
int port = -1;
try {
port = Integer.parseInt(foo);
}
catch(NumberFormatException e){
// wrong format
}
return port;
}
public String getValue(String key){
if(key.equals("compression.s2c") ||
key.equals("compression.c2s")) {
String foo = find(key);
if(foo == null || foo.equals("no"))
return "none,[email protected],zlib";
return "[email protected],zlib,none";
}
return find(key);
}
public String[] getValues(String key){ return multiFind(key); }
}
}
| Delete OpenSSHConfig.java | OpenSSHConfig.java | Delete OpenSSHConfig.java |
||
Java | mpl-2.0 | 2672ae97103ec41a47353058077423c7b2fb1915 | 0 | choreos/choreos_middleware,choreos/choreos_middleware,choreos/enactment_engine,choreos/choreos_middleware,choreos/enactment_engine,choreos/enactment_engine,choreos/enactment_engine | DeploymentManager/src/main/java/org/ow2/choreos/deployment/nodes/cloudprovider/CloudProviderSelector.java | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.ow2.choreos.deployment.nodes.cloudprovider;
public class CloudProviderSelector {
public CloudProvider newCloudProvider(String cloud) {
if (cloud.endsWith("aws")) {
return new AWSCloudProvider();
} else if (cloud.endsWith("openstack")) {
return new OpenStackKeystoneCloudProvider();
}
return new FixedCloudProvider();
}
}
| deleting useless class
| DeploymentManager/src/main/java/org/ow2/choreos/deployment/nodes/cloudprovider/CloudProviderSelector.java | deleting useless class |
||
Java | mit | 907873e3c07fe5def7d20c83b1b7638716acb5cd | 0 | ljshj/actor-platform,ljshj/actor-platform,ljshj/actor-platform,ljshj/actor-platform,ljshj/actor-platform,ljshj/actor-platform,ljshj/actor-platform | package im.actor.model.network.api;
import java.io.IOException;
import java.util.HashMap;
import java.util.Random;
import im.actor.model.Networking;
import im.actor.model.api.parser.RpcParser;
import im.actor.model.droidkit.actors.Actor;
import im.actor.model.droidkit.actors.ActorCreator;
import im.actor.model.droidkit.actors.ActorRef;
import im.actor.model.droidkit.actors.ActorSystem;
import im.actor.model.droidkit.actors.Environment;
import im.actor.model.droidkit.actors.Props;
import im.actor.model.log.Log;
import im.actor.model.network.ActorApiCallback;
import im.actor.model.network.AuthKeyStorage;
import im.actor.model.network.Endpoints;
import im.actor.model.network.RpcCallback;
import im.actor.model.network.RpcException;
import im.actor.model.network.RpcInternalException;
import im.actor.model.network.mtp.AuthIdRetriever;
import im.actor.model.network.mtp.MTProto;
import im.actor.model.network.mtp.MTProtoCallback;
import im.actor.model.network.mtp.entity.ProtoSerializer;
import im.actor.model.network.mtp.entity.ProtoStruct;
import im.actor.model.network.mtp.entity.rpc.Push;
import im.actor.model.network.mtp.entity.rpc.RpcError;
import im.actor.model.network.mtp.entity.rpc.RpcFloodWait;
import im.actor.model.network.mtp.entity.rpc.RpcInternalError;
import im.actor.model.network.mtp.entity.rpc.RpcOk;
import im.actor.model.network.mtp.entity.rpc.RpcRequest;
import im.actor.model.network.parser.Request;
import im.actor.model.network.parser.Response;
import im.actor.model.network.parser.RpcScope;
import im.actor.model.util.AtomicLongCompat;
/**
* Created by ex3ndr on 08.02.15.
*/
public class ApiBroker extends Actor {
public static ActorRef get(final Endpoints endpoints, final AuthKeyStorage keyStorage, final ActorApiCallback callback,
final Networking networking) {
return ActorSystem.system().actorOf(Props.create(ApiBroker.class, new ActorCreator<ApiBroker>() {
@Override
public ApiBroker create() {
return new ApiBroker(endpoints, keyStorage, callback, networking);
}
}), "api/broker");
}
private static final String TAG = "ApiBroker";
private static final AtomicLongCompat NEXT_RPC_ID = Environment.createAtomicLong(1);
private final Endpoints endpoints;
private final AuthKeyStorage keyStorage;
private final ActorApiCallback callback;
private final HashMap<Long, RequestHolder> requests = new HashMap<Long, RequestHolder>();
private final HashMap<Long, Long> idMap = new HashMap<Long, Long>();
private MTProto proto;
private Networking networking;
public ApiBroker(Endpoints endpoints, AuthKeyStorage keyStorage, ActorApiCallback callback,
Networking networking) {
this.endpoints = endpoints;
this.keyStorage = keyStorage;
this.callback = callback;
this.networking = networking;
}
@Override
public void preStart() {
if (keyStorage.getAuthKey() == 0) {
self().send(new RequestAuthId());
} else {
self().send(new InitMTProto(keyStorage.getAuthKey()));
}
}
@Override
public void onReceive(Object message) {
if (message instanceof RequestAuthId) {
requestAuthId();
} else if (message instanceof InitMTProto) {
createMtProto(((InitMTProto) message).getAuthId());
} else if (message instanceof PerformRequest) {
performRequest(
NEXT_RPC_ID.getAndIncrement(),
((PerformRequest) message).getMessage(),
((PerformRequest) message).getCallback());
} else if (message instanceof CancelRequest) {
cancelRequest(((CancelRequest) message).getRandomId());
} else if (message instanceof ProtoResponse) {
processResponse(((ProtoResponse) message).getResponseId(), ((ProtoResponse) message).getData());
} else if (message instanceof ForceResend) {
forceResend(((ForceResend) message).id);
} else if (message instanceof ProtoUpdate) {
processUpdate(((ProtoUpdate) message).getData());
}
}
private void requestAuthId() {
Log.d(TAG, "Creating auth key...");
AuthIdRetriever.requestAuthId(endpoints, networking, new AuthIdRetriever.AuthIdCallback() {
@Override
public void onSuccess(long authId) {
Log.d(TAG, "Key created");
self().send(new InitMTProto(authId));
}
@Override
public void onFailure() {
Log.d(TAG, "Key creation failure");
// TODO: Add back off
self().send(new RequestAuthId());
}
});
}
private void createMtProto(long key) {
Log.d(TAG, "Creating proto");
keyStorage.saveAuthKey(key);
proto = new MTProto(key, new Random().nextLong(), endpoints, new MTProtoCallback() {
@Override
public void onRpcResponse(long mid, byte[] content) {
self().send(new ProtoResponse(mid, content));
}
@Override
public void onUpdate(byte[] content) {
self().send(new ProtoUpdate(content));
}
@Override
public void onAuthKeyInvalidated(long authKey) {
callback.onAuthIdInvalidated(authKey);
}
@Override
public void onSessionCreated() {
callback.onNewSessionCreated();
}
}, networking);
for (RequestHolder holder : requests.values()) {
holder.protoId = proto.sendRpcMessage(holder.message);
idMap.put(holder.protoId, holder.publicId);
// Log.d(TAG, holder.message + " rid#" + holder.publicId + " <- mid#" + holder.protoId);
}
}
private void performRequest(long randomId, Request message, RpcCallback callback) {
Log.d(TAG, "-> request#" + randomId + ": " + message);
// Log.d(TAG, message + " rid#" + randomId);
RequestHolder holder = new RequestHolder(
randomId,
new RpcRequest(message.getHeaderKey(), message.toByteArray()),
callback);
requests.put(holder.publicId, holder);
if (proto != null) {
long mid = proto.sendRpcMessage(holder.message);
holder.protoId = mid;
idMap.put(mid, randomId);
// Log.d(TAG, message + " rid#" + randomId + " <- mid#" + mid);
}
}
private void processResponse(long mid, byte[] content) {
ProtoStruct protoStruct;
try {
protoStruct = ProtoSerializer.readRpcResponsePayload(content);
} catch (IOException e) {
e.printStackTrace();
Log.w(TAG, "Broken response mid#" + mid);
return;
}
// Log.w(TAG, protoStruct + " mid#" + mid);
long rid;
if (idMap.containsKey(mid)) {
rid = idMap.get(mid);
} else {
return;
}
RequestHolder holder;
if (requests.containsKey(rid)) {
holder = requests.get(rid);
} else {
return;
}
if (protoStruct instanceof RpcOk) {
RpcOk ok = (RpcOk) protoStruct;
requests.remove(rid);
if (holder.protoId != 0) {
idMap.remove(holder.protoId);
}
Response response;
try {
response = (Response) new RpcParser().read(ok.responseType, ok.payload);
} catch (IOException e) {
e.printStackTrace();
return;
}
Log.d(TAG, "<- response#" + holder.publicId + ": " + response);
holder.callback.onResult(response);
} else if (protoStruct instanceof RpcError) {
RpcError e = (RpcError) protoStruct;
requests.remove(rid);
if (holder.protoId != 0) {
idMap.remove(holder.protoId);
}
Log.w(TAG, "<- error#" + holder.publicId + ": " + e.errorTag + " " + e.errorCode + " " + e.userMessage);
holder.callback.onError(new RpcException(e.errorTag, e.errorCode, e.userMessage, e.canTryAgain, e.relatedData));
} else if (protoStruct instanceof RpcInternalError) {
RpcInternalError e = ((RpcInternalError) protoStruct);
Log.d(TAG, "<- internal_error#" + holder.publicId);
if (e.isCanTryAgain()) {
self().send(new ForceResend(rid), e.getTryAgainDelay() * 1000L);
} else {
requests.remove(rid);
if (holder.protoId != 0) {
idMap.remove(holder.protoId);
}
holder.callback.onError(new RpcInternalException());
}
} else if (protoStruct instanceof RpcFloodWait) {
RpcFloodWait f = (RpcFloodWait) protoStruct;
Log.d(TAG, "<- flood_wait#" + holder.publicId + " " + f.getDelay() + " sec");
self().send(new ForceResend(rid), f.getDelay() * 1000L);
} else {
// Unknown
}
}
private void forceResend(long randomId) {
RequestHolder holder = requests.get(randomId);
if (holder != null) {
if (holder.protoId != 0) {
idMap.remove(holder.protoId);
proto.cancelRpc(holder.protoId);
}
proto.sendRpcMessage(holder.message);
}
}
private void cancelRequest(long randomId) {
RequestHolder holder = requests.get(randomId);
if (holder != null) {
requests.remove(randomId);
if (holder.protoId != 0 && proto != null) {
idMap.remove(holder.protoId);
proto.cancelRpc(holder.protoId);
}
}
}
private void processUpdate(byte[] content) {
ProtoStruct protoStruct;
try {
protoStruct = ProtoSerializer.readUpdate(content);
} catch (IOException e) {
e.printStackTrace();
Log.w(TAG, "Broken mt update");
return;
}
if (protoStruct instanceof Push) {
int type = ((Push) protoStruct).updateType;
byte[] body = ((Push) protoStruct).body;
RpcScope updateBox;
try {
updateBox = new RpcParser().read(type, body);
} catch (IOException e) {
e.printStackTrace();
Log.w(TAG, "Broken update box");
return;
}
// Log.w(TAG, "Box: " + updateBox + "");
callback.onUpdateReceived(updateBox);
} else {
// Unknown
}
}
public static class PerformRequest {
private Request message;
private RpcCallback callback;
public PerformRequest(Request message, RpcCallback callback) {
this.message = message;
this.callback = callback;
}
public Request getMessage() {
return message;
}
public RpcCallback getCallback() {
return callback;
}
}
public static class CancelRequest {
private long randomId;
public CancelRequest(long randomId) {
this.randomId = randomId;
}
public long getRandomId() {
return randomId;
}
}
private class RequestAuthId {
}
private class InitMTProto {
private long authId;
public InitMTProto(long authId) {
this.authId = authId;
}
public long getAuthId() {
return authId;
}
}
private class ProtoResponse {
private long responseId;
private byte[] data;
public ProtoResponse(long responseId, byte[] data) {
this.responseId = responseId;
this.data = data;
}
public long getResponseId() {
return responseId;
}
public byte[] getData() {
return data;
}
}
private class ProtoUpdate {
private byte[] data;
public ProtoUpdate(byte[] data) {
this.data = data;
}
public byte[] getData() {
return data;
}
}
private class ForceResend {
private long id;
public ForceResend(long id) {
this.id = id;
}
public long getId() {
return id;
}
}
private class RequestHolder {
private final RpcRequest message;
private final long publicId;
private final RpcCallback callback;
private long protoId;
private RequestHolder(long publicId, RpcRequest message, RpcCallback callback) {
this.message = message;
this.publicId = publicId;
this.callback = callback;
}
}
} | actor-common/src/main/java/im/actor/model/network/api/ApiBroker.java | package im.actor.model.network.api;
import im.actor.model.Networking;
import im.actor.model.droidkit.actors.*;
import im.actor.model.util.AtomicLongCompat;
import im.actor.model.api.parser.RpcParser;
import im.actor.model.log.Log;
import im.actor.model.network.*;
import im.actor.model.network.mtp.AuthIdRetriever;
import im.actor.model.network.mtp.MTProto;
import im.actor.model.network.mtp.MTProtoCallback;
import im.actor.model.network.mtp.entity.ProtoSerializer;
import im.actor.model.network.mtp.entity.ProtoStruct;
import im.actor.model.network.mtp.entity.rpc.*;
import im.actor.model.network.parser.Request;
import im.actor.model.network.parser.Response;
import im.actor.model.network.parser.RpcScope;
import java.io.IOException;
import java.util.HashMap;
import java.util.Random;
/**
* Created by ex3ndr on 08.02.15.
*/
public class ApiBroker extends Actor {
public static ActorRef get(final Endpoints endpoints, final AuthKeyStorage keyStorage, final ActorApiCallback callback,
final Networking networking) {
return ActorSystem.system().actorOf(Props.create(ApiBroker.class, new ActorCreator<ApiBroker>() {
@Override
public ApiBroker create() {
return new ApiBroker(endpoints, keyStorage, callback, networking);
}
}), "api/broker");
}
private static final String TAG = "ApiBroker";
private static final AtomicLongCompat NEXT_RPC_ID = Environment.createAtomicLong(1);
private final Endpoints endpoints;
private final AuthKeyStorage keyStorage;
private final ActorApiCallback callback;
private final HashMap<Long, RequestHolder> requests = new HashMap<Long, RequestHolder>();
private final HashMap<Long, Long> idMap = new HashMap<Long, Long>();
private MTProto proto;
private Networking networking;
public ApiBroker(Endpoints endpoints, AuthKeyStorage keyStorage, ActorApiCallback callback,
Networking networking) {
this.endpoints = endpoints;
this.keyStorage = keyStorage;
this.callback = callback;
this.networking = networking;
}
@Override
public void preStart() {
if (keyStorage.getAuthKey() == 0) {
self().send(new RequestAuthId());
} else {
self().send(new InitMTProto(keyStorage.getAuthKey()));
}
}
@Override
public void onReceive(Object message) {
if (message instanceof RequestAuthId) {
requestAuthId();
} else if (message instanceof InitMTProto) {
createMtProto(((InitMTProto) message).getAuthId());
} else if (message instanceof PerformRequest) {
performRequest(
NEXT_RPC_ID.getAndIncrement(),
((PerformRequest) message).getMessage(),
((PerformRequest) message).getCallback());
} else if (message instanceof CancelRequest) {
cancelRequest(((CancelRequest) message).getRandomId());
} else if (message instanceof ProtoResponse) {
processResponse(((ProtoResponse) message).getResponseId(), ((ProtoResponse) message).getData());
} else if (message instanceof ForceResend) {
forceResend(((ForceResend) message).id);
} else if (message instanceof ProtoUpdate) {
processUpdate(((ProtoUpdate) message).getData());
}
}
private void requestAuthId() {
Log.d(TAG, "Creating auth key...");
AuthIdRetriever.requestAuthId(endpoints, networking, new AuthIdRetriever.AuthIdCallback() {
@Override
public void onSuccess(long authId) {
Log.d(TAG, "Key created");
self().send(new InitMTProto(authId));
}
@Override
public void onFailure() {
Log.d(TAG, "Key creation failure");
// TODO: Add back off
self().send(new RequestAuthId());
}
});
}
private void createMtProto(long key) {
Log.d(TAG, "Creating proto");
keyStorage.saveAuthKey(key);
proto = new MTProto(key, new Random().nextLong(), endpoints, new MTProtoCallback() {
@Override
public void onRpcResponse(long mid, byte[] content) {
self().send(new ProtoResponse(mid, content));
}
@Override
public void onUpdate(byte[] content) {
self().send(new ProtoUpdate(content));
}
@Override
public void onAuthKeyInvalidated(long authKey) {
callback.onAuthIdInvalidated(authKey);
}
@Override
public void onSessionCreated() {
callback.onNewSessionCreated();
}
}, networking);
for (RequestHolder holder : requests.values()) {
holder.protoId = proto.sendRpcMessage(holder.message);
idMap.put(holder.protoId, holder.publicId);
// Log.d(TAG, holder.message + " rid#" + holder.publicId + " <- mid#" + holder.protoId);
}
}
private void performRequest(long randomId, Request message, RpcCallback callback) {
Log.d(TAG, "-> request#" + randomId + ": " + message);
// Log.d(TAG, message + " rid#" + randomId);
RequestHolder holder = new RequestHolder(
randomId,
new RpcRequest(message.getHeaderKey(), message.toByteArray()),
callback);
requests.put(holder.publicId, holder);
if (proto != null) {
long mid = proto.sendRpcMessage(holder.message);
holder.protoId = mid;
idMap.put(mid, randomId);
// Log.d(TAG, message + " rid#" + randomId + " <- mid#" + mid);
}
}
private void processResponse(long mid, byte[] content) {
ProtoStruct protoStruct;
try {
protoStruct = ProtoSerializer.readRpcResponsePayload(content);
} catch (IOException e) {
e.printStackTrace();
Log.w(TAG, "Broken response mid#" + mid);
return;
}
// Log.w(TAG, protoStruct + " mid#" + mid);
long rid;
if (idMap.containsKey(mid)) {
rid = idMap.get(mid);
} else {
return;
}
RequestHolder holder;
if (requests.containsKey(rid)) {
holder = requests.get(rid);
} else {
return;
}
if (protoStruct instanceof RpcOk) {
RpcOk ok = (RpcOk) protoStruct;
requests.remove(rid);
if (holder.protoId != 0) {
idMap.remove(holder.protoId);
}
Response response;
try {
response = (Response) new RpcParser().read(ok.responseType, ok.payload);
} catch (IOException e) {
e.printStackTrace();
return;
}
Log.d(TAG, "<- response#" + holder.publicId + ": " + response);
holder.callback.onResult(response);
} else if (protoStruct instanceof RpcError) {
RpcError e = (RpcError) protoStruct;
requests.remove(rid);
if (holder.protoId != 0) {
idMap.remove(holder.protoId);
}
holder.callback.onError(new RpcException(e.errorTag, e.errorCode, e.userMessage, e.canTryAgain, e.relatedData));
} else if (protoStruct instanceof RpcInternalError) {
RpcInternalError e = ((RpcInternalError) protoStruct);
if (e.isCanTryAgain()) {
self().send(new ForceResend(rid), e.getTryAgainDelay() * 1000L);
} else {
requests.remove(rid);
if (holder.protoId != 0) {
idMap.remove(holder.protoId);
}
holder.callback.onError(new RpcInternalException());
}
} else if (protoStruct instanceof RpcFloodWait) {
RpcFloodWait f = (RpcFloodWait) protoStruct;
self().send(new ForceResend(rid), f.getDelay() * 1000L);
} else {
// Unknown
}
}
private void forceResend(long randomId) {
RequestHolder holder = requests.get(randomId);
if (holder != null) {
if (holder.protoId != 0) {
idMap.remove(holder.protoId);
proto.cancelRpc(holder.protoId);
}
proto.sendRpcMessage(holder.message);
}
}
private void cancelRequest(long randomId) {
RequestHolder holder = requests.get(randomId);
if (holder != null) {
requests.remove(randomId);
if (holder.protoId != 0 && proto != null) {
idMap.remove(holder.protoId);
proto.cancelRpc(holder.protoId);
}
}
}
private void processUpdate(byte[] content) {
ProtoStruct protoStruct;
try {
protoStruct = ProtoSerializer.readUpdate(content);
} catch (IOException e) {
e.printStackTrace();
Log.w(TAG, "Broken mt update");
return;
}
if (protoStruct instanceof Push) {
int type = ((Push) protoStruct).updateType;
byte[] body = ((Push) protoStruct).body;
RpcScope updateBox;
try {
updateBox = new RpcParser().read(type, body);
} catch (IOException e) {
e.printStackTrace();
Log.w(TAG, "Broken update box");
return;
}
// Log.w(TAG, "Box: " + updateBox + "");
callback.onUpdateReceived(updateBox);
} else {
// Unknown
}
}
public static class PerformRequest {
private Request message;
private RpcCallback callback;
public PerformRequest(Request message, RpcCallback callback) {
this.message = message;
this.callback = callback;
}
public Request getMessage() {
return message;
}
public RpcCallback getCallback() {
return callback;
}
}
public static class CancelRequest {
private long randomId;
public CancelRequest(long randomId) {
this.randomId = randomId;
}
public long getRandomId() {
return randomId;
}
}
private class RequestAuthId {
}
private class InitMTProto {
private long authId;
public InitMTProto(long authId) {
this.authId = authId;
}
public long getAuthId() {
return authId;
}
}
private class ProtoResponse {
private long responseId;
private byte[] data;
public ProtoResponse(long responseId, byte[] data) {
this.responseId = responseId;
this.data = data;
}
public long getResponseId() {
return responseId;
}
public byte[] getData() {
return data;
}
}
private class ProtoUpdate {
private byte[] data;
public ProtoUpdate(byte[] data) {
this.data = data;
}
public byte[] getData() {
return data;
}
}
private class ForceResend {
private long id;
public ForceResend(long id) {
this.id = id;
}
public long getId() {
return id;
}
}
private class RequestHolder {
private final RpcRequest message;
private final long publicId;
private final RpcCallback callback;
private long protoId;
private RequestHolder(long publicId, RpcRequest message, RpcCallback callback) {
this.message = message;
this.publicId = publicId;
this.callback = callback;
}
}
} | feat(common): Added error response logging
| actor-common/src/main/java/im/actor/model/network/api/ApiBroker.java | feat(common): Added error response logging |
|
Java | mit | 7d8e557d5741676b9842874cba3ece8818df8a05 | 0 | Kaufland/andksnackbar | package kaufland.com.snackbarlibrary;
import android.content.Context;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import kaufland.com.snackbarlibrary.view.SnackbarView;
public class SnackbarQueue {
private static final String TAG = SnackbarQueue.class.getName();
private final Queue<SnackbarView> mQueue = new ConcurrentLinkedQueue<>();
private final ExecutorService mExecutionWorker;
private Snackbar mSnackbar;
private volatile boolean mExecutionOnHold = true;
private Handler mainThreadHandler = new Handler(Looper.getMainLooper());
private class WaitingThread extends Thread {
private int wait;
public WaitingThread(int wait) {
this.wait = wait;
}
@Override
public void run() {
super.run();
try {
sleep(wait);
} catch (InterruptedException e) {
//nope
}
}
}
public SnackbarQueue(SnackbarConfiguration snackbarConfiguration) {
mSnackbar = new Snackbar(snackbarConfiguration);
mExecutionWorker = Executors.newFixedThreadPool(snackbarConfiguration.getSnackbarMaxCount());
Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
if(mSnackbar != null){
mainThreadHandler.post(new Runnable() {
@Override
public void run() {
mSnackbar.verifyVisible();
}
});
}
}
}, 800, 800, TimeUnit.MILLISECONDS);
}
public void rebindContext(Context context) {
mExecutionOnHold = context == null;
if (mSnackbar != null) {
mSnackbar.updateContext(context);
mSnackbar.show();
}
}
public void add(final SnackbarView view) {
mQueue.add(view);
workOnQueue();
}
private synchronized void workOnQueue() {
if (mExecutionOnHold) {
Log.i(TAG, "SnackbarQueue is on hold. Not executing any executors.");
return;
}
final SnackbarView view = mQueue.peek();
mExecutionWorker.execute(new Runnable() {
@Override
public void run() {
if (mExecutionOnHold || !mQueue.contains(view)) {
// periodic schedule may have scheduled nextExecutor while it was being executed
return;
}
mainThreadHandler.post(new Runnable() {
@Override
public void run() {
mSnackbar.addSnackbarView(view);
mQueue.remove(view);
}
});
final WaitingThread waiting = new WaitingThread(view.getDuration() != null && view.getDuration() > 0 ? view.getDuration() : Integer.MAX_VALUE);
view.setCallback(new SnackbarView.Callback() {
@Override
public void onDismiss(SnackbarView view) {
waiting.interrupt();
}
});
waiting.start();
try {
waiting.join();
} catch (InterruptedException e) {
//nope
}
mainThreadHandler.post(new Runnable() {
@Override
public void run() {
mSnackbar.removeSnackbarView(view);
}
});
workOnQueue();
}
});
}
}
| library/src/main/java/kaufland/com/snackbarlibrary/SnackbarQueue.java | package kaufland.com.snackbarlibrary;
import android.content.Context;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import kaufland.com.snackbarlibrary.view.SnackbarView;
public class SnackbarQueue {
private static final String TAG = SnackbarQueue.class.getName();
private final Queue<SnackbarView> mCartExecutors = new ConcurrentLinkedQueue<>();
private final ExecutorService mExecutionWorker;
private Snackbar sSnackbar;
private volatile boolean mExecutionOnHold = true;
private Handler mainThreadHandler = new Handler(Looper.getMainLooper());
private class WaitingThread extends Thread {
private int wait;
public WaitingThread(int wait) {
this.wait = wait;
}
@Override
public void run() {
super.run();
try {
sleep(wait);
} catch (InterruptedException e) {
//nope
}
}
}
public SnackbarQueue(SnackbarConfiguration snackbarConfiguration) {
sSnackbar = new Snackbar(snackbarConfiguration);
mExecutionWorker = Executors.newFixedThreadPool(snackbarConfiguration.getSnackbarMaxCount());
Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
if(sSnackbar != null){
mainThreadHandler.post(new Runnable() {
@Override
public void run() {
sSnackbar.verifyVisible();
}
});
}
}
}, 800, 800, TimeUnit.MILLISECONDS);
}
public void rebindContext(Context context) {
mExecutionOnHold = context == null;
if (sSnackbar != null) {
sSnackbar.updateContext(context);
sSnackbar.show();
}
}
public void add(final SnackbarView view) {
mCartExecutors.add(view);
workOnQueue();
}
private synchronized void workOnQueue() {
if (mExecutionOnHold) {
Log.i(TAG, "SnackbarQueue is on hold. Not executing any executors.");
return;
}
final SnackbarView view = mCartExecutors.peek();
mExecutionWorker.execute(new Runnable() {
@Override
public void run() {
if (mExecutionOnHold || !mCartExecutors.contains(view)) {
// periodic schedule may have scheduled nextExecutor while it was being executed
return;
}
mainThreadHandler.post(new Runnable() {
@Override
public void run() {
sSnackbar.addSnackbarView(view);
mCartExecutors.remove(view);
}
});
final WaitingThread waiting = new WaitingThread(view.getDuration() != null && view.getDuration() > 0 ? view.getDuration() : Integer.MAX_VALUE);
view.setCallback(new SnackbarView.Callback() {
@Override
public void onDismiss(SnackbarView view) {
waiting.interrupt();
}
});
waiting.start();
try {
waiting.join();
} catch (InterruptedException e) {
//nope
}
mainThreadHandler.post(new Runnable() {
@Override
public void run() {
sSnackbar.removeSnackbarView(view);
}
});
workOnQueue();
}
});
}
}
| # renamed some stuff
| library/src/main/java/kaufland/com/snackbarlibrary/SnackbarQueue.java | # renamed some stuff |
|
Java | mit | a5d747809e046beb61b6618ba712fdbc6ee18fa2 | 0 | dtm/ProvToolbox,dtm/ProvToolbox,joansmith/ProvToolbox,joansmith/ProvToolbox,dtm/ProvToolbox,joansmith/ProvToolbox,joansmith/ProvToolbox | package org.openprovenance.prov.interop;
import java.io.File;
import java.io.IOException;
import java.util.Hashtable;
import javax.xml.bind.JAXBException;
import org.openprovenance.prov.xml.Document;
import org.openprovenance.prov.xml.ProvDeserialiser;
import org.openprovenance.prov.xml.ProvSerialiser;
import org.openprovenance.prov.xml.ProvFactory;
import org.openprovenance.prov.notation.Utility;
import org.antlr.runtime.tree.CommonTree;
import org.openrdf.rio.RDFFormat;
import org.openprovenance.prov.dot.ProvToDot;
import org.apache.log4j.Logger;
/**
* The interoperability framework for PROV.
*/
public class InteropFramework
{
static Logger logger = Logger.getLogger(InteropFramework.class);
public static final String UNKNOWN = "unknown";
public static final String PC1_NS="http://www.ipaw.info/pc1/";
public static final String PC1_PREFIX="pc1";
public static final String PRIM_NS="http://openprovenance.org/primitives#";
public static final String PRIM_PREFIX="prim";
final Utility u=new Utility();
final ProvFactory pFactory=ProvFactory.getFactory();
final private String verbose;
final private String debug;
final private String logfile;
final private String infile;
final private String outfile;
final private String namespaces;
public final Hashtable<ProvFormat,String> extensionMap;
public final Hashtable<String,ProvFormat> extensionRevMap;
public final Hashtable<ProvFormat,String> mimeTypeMap;
public InteropFramework() {
this(null, null, null, null, null, null);
}
public InteropFramework(String verbose,
String debug, String logfile, String infile, String outfile,
String namespaces) {
this.verbose=verbose;
this.debug=debug;
this.logfile=logfile;
this.infile=infile;
this.outfile=outfile;
this.namespaces=namespaces;
extensionMap=new Hashtable<InteropFramework.ProvFormat, String>();
extensionRevMap=new Hashtable<String, InteropFramework.ProvFormat>();
mimeTypeMap=new Hashtable<InteropFramework.ProvFormat, String>();
initializeExtensionMap(extensionMap, extensionRevMap);
}
public void initializeExtensionMap(Hashtable<ProvFormat,String> extensionMap,
Hashtable<String, InteropFramework.ProvFormat> extensionRevMap) {
for (ProvFormat f: ProvFormat.values()) {
switch (f) {
case DOT:
extensionMap.put(ProvFormat.DOT,"dot");
extensionRevMap.put("dot", ProvFormat.DOT);
break;
case JPEG:
extensionMap.put(ProvFormat.JPEG,"jpg");
extensionRevMap.put("jpeg", ProvFormat.JPEG);
extensionRevMap.put("jpg", ProvFormat.JPEG);
mimeTypeMap.put(ProvFormat.JPEG,"image/jpeg");
break;
case JSON:
extensionMap.put(ProvFormat.JSON,"json");
extensionRevMap.put("json", ProvFormat.JSON);
mimeTypeMap.put(ProvFormat.JSON,"application/json");
break;
case PDF:
extensionMap.put(ProvFormat.PDF,"pdf");
extensionRevMap.put("pdf", ProvFormat.PDF);
mimeTypeMap.put(ProvFormat.PDF,"application/pdf");
break;
case PROVN:
extensionMap.put(ProvFormat.PROVN,"provn");
extensionRevMap.put("provn", ProvFormat.PROVN);
extensionRevMap.put("pn", ProvFormat.PROVN);
extensionRevMap.put("asn", ProvFormat.PROVN);
extensionRevMap.put("prov-asn", ProvFormat.PROVN);
mimeTypeMap.put(ProvFormat.PROVN,"text/provenance-notation");
break;
case RDFXML:
extensionMap.put(ProvFormat.RDFXML,"rdf");
extensionRevMap.put("rdf", ProvFormat.RDFXML);
mimeTypeMap.put(ProvFormat.RDFXML,"application/rdf+xml");
break;
case SVG:
extensionMap.put(ProvFormat.SVG,"svg");
extensionRevMap.put("svg", ProvFormat.SVG);
mimeTypeMap.put(ProvFormat.SVG,"image/svg+xml");
break;
case TRIG:
extensionMap.put(ProvFormat.TRIG,"trig");
extensionRevMap.put("trig", ProvFormat.TRIG);
mimeTypeMap.put(ProvFormat.TURTLE,"application/x-trig");
break;
case TURTLE:
extensionMap.put(ProvFormat.TURTLE,"ttl");
extensionRevMap.put("ttl", ProvFormat.TURTLE);
mimeTypeMap.put(ProvFormat.TURTLE,"text/turtle");
break;
case XML:
extensionMap.put(ProvFormat.XML,"provx");
extensionRevMap.put("provx", ProvFormat.XML);
extensionRevMap.put("xml", ProvFormat.XML);
mimeTypeMap.put(ProvFormat.XML,"text/xml");
break;
default:
break;
}
}
}
public String getExtension(ProvFormat format) {
String extension=UNKNOWN;
if (format!=null) {
extension=extensionMap.get(format);
}
return extension;
}
public String convertToMimeType(String type) {
ProvFormat format=extensionRevMap.get(type);
if (format==null) return null;
return mimeTypeMap.get(format);
}
public void provn2html(String file, String file2) throws java.io.IOException, JAXBException, Throwable {
Document doc = (Document)u.convertASNToJavaBean(file);
String s=u.convertBeanToHTML(doc);
u.writeTextToFile(s,file2);
}
public static final String RDF_TURTLE="turtle";
public static final String RDF_XML="rdf/xml";
public static final String RDF_TRIG="trig";
public static final String RDF_N3="n3";
public RDFFormat convert(String type) {
if (RDF_TURTLE.equals(type)) return RDFFormat.TURTLE;
if (RDF_XML.equals(type)) return RDFFormat.RDFXML;
if (RDF_N3.equals(type)) return RDFFormat.N3;
if (RDF_TRIG.equals(type)) return RDFFormat.TRIG;
return null;
}
/** Reads a file into java bean. */
public Object loadProvGraph(String filename) throws java.io.IOException, JAXBException, Throwable {
try {
return loadProvKnownGraph(filename);
} catch (Throwable e) {
e.printStackTrace();
return null;
//return loadProvUnknownGraph(filename);
}
}
public enum ProvFormat { PROVN, XML, TURTLE, RDFXML, TRIG, JSON, DOT, JPEG, SVG, PDF }
public ProvFormat getTypeForFile(String filename) {
int count=filename.lastIndexOf(".");
if (count==-1) return null;
String extension=filename.substring(count+1);
return extensionRevMap.get(extension);
}
public void writeDocument(String filename, Document doc) {
try {
ProvFormat format = getTypeForFile(filename);
if (format == null) {
System.err.println("Unknown output file format: " + filename);
return;
}
logger.debug("writing " + format);
logger.debug("writing " + filename);
setNamespaces(doc);
switch (format) {
case PROVN: {
u.writeDocument(doc,filename);
break;
}
case XML: {
ProvSerialiser serial = ProvSerialiser.getThreadProvSerialiser();
logger.debug("namespaces " + doc.getNss());
serial.serialiseDocument(new File(filename), doc, true);
break;
}
case TURTLE: {
new org.openprovenance.prov.rdf.Utility().dumpRDF(pFactory, doc, RDFFormat.TURTLE, filename);
break;
}
case RDFXML: {
new org.openprovenance.prov.rdf.Utility().dumpRDF(pFactory, doc, RDFFormat.RDFXML, filename);
break;
}
case TRIG: {
new org.openprovenance.prov.rdf.Utility().dumpRDF(pFactory, doc, RDFFormat.TRIG, filename);
break;
}
case JSON: {
new org.openprovenance.prov.json.Converter().writeDocument(doc, filename);
break;
}
case PDF: {
String configFile=null; // TODO: get it as option
File tmp=File.createTempFile("viz-", ".dot");
String dotFileOut=tmp.getAbsolutePath(); //give it as option, if not available create tmp file
ProvToDot toDot=
(configFile==null)? new ProvToDot(ProvToDot.Config.ROLE_NO_LABEL) : new ProvToDot (configFile);
toDot.convert(doc, dotFileOut, filename);
break;
}
case DOT: {
String configFile=null; // TODO: get it as option
ProvToDot toDot=
(configFile==null)? new ProvToDot(ProvToDot.Config.ROLE_NO_LABEL) : new ProvToDot (configFile);
toDot.convert(doc, filename);
break;
}
case JPEG:
case SVG:{
String configFile=null; // give it as option
File tmp=File.createTempFile("viz-", ".dot");
String dotFileOut=tmp.getAbsolutePath(); //give it as option, if not available create tmp file
//ProvToDot toDot=new ProvToDot((configFile==null)? "../../ProvToolbox/prov-dot/src/main/resources/defaultConfigWithRoleNoLabel.xml" : configFile);
ProvToDot toDot;
if (configFile!=null) {
toDot=new ProvToDot(configFile);
} else {
toDot=new ProvToDot(ProvToDot.Config.ROLE_NO_LABEL);
}
toDot.convert(doc, dotFileOut, filename, "svg");
tmp.delete();
break;
}
default:
break;}
} catch (JAXBException e) {
if (verbose!=null) e.printStackTrace();
throw new InteropException(e);
} catch (Exception e) {
if (verbose!=null) e.printStackTrace();
throw new InteropException(e);
}
}
public void setNamespaces(Document doc) {
if (doc.getNss()==null) doc.setNss(new Hashtable<String, String>());
}
public Object loadProvKnownGraph(String filename) {
try {
ProvFormat format = getTypeForFile(filename);
if (format == null) {
throw new InteropException("Unknown output file format: " + filename);
}
switch (format) {
case DOT:
case JPEG:
case SVG:
throw new UnsupportedOperationException(); //we don't load PROV from these formats
case JSON: {
return new org.openprovenance.prov.json.Converter().readDocument(filename);
}
case PROVN: {
Utility u=new Utility();
CommonTree tree = u.convertASNToTree(filename);
Object o=u.convertTreeToJavaBean(tree);
return o;
}
case RDFXML:
case TRIG:
case TURTLE:{
org.openprovenance.prov.rdf.Utility rdfU=new org.openprovenance.prov.rdf.Utility();
Document doc=rdfU.parseRDF(filename);
return doc;
}
case XML: {
File in=new File(filename);
ProvDeserialiser deserial=ProvDeserialiser.getThreadProvDeserialiser();
Document c=deserial.deserialiseDocument(in);
return c;
}
default: {
System.out.println("Unknown format " + filename);
throw new UnsupportedOperationException();
}
}
} catch (IOException e) {
throw new InteropException(e);
} catch (Throwable e) {
e.printStackTrace();
throw new InteropException(e);
}
}
public Object loadProvUnknownGraph(String filename)
throws java.io.IOException, JAXBException, Throwable {
try {
Utility u=new Utility();
CommonTree tree = u.convertASNToTree(filename);
Object o=u.convertTreeToJavaBean(tree);
if (o!=null) {
return o;
}
} catch (Throwable t1) {
// OK, we failed, let's try next format.
}
try {
File in=new File(filename);
ProvDeserialiser deserial=ProvDeserialiser.getThreadProvDeserialiser();
Document c=deserial.deserialiseDocument(in);
if (c!=null) {
return c;
}
} catch (Throwable t2) {
// OK, we failed, let's try next format.
}
try {
Object o=new org.openprovenance.prov.json.Converter().readDocument(filename);
if (o!=null) {
return o;
}
} catch (RuntimeException e) {
// OK, we failed, let's try next format.
}
try {
org.openprovenance.prov.rdf.Utility rdfU=new org.openprovenance.prov.rdf.Utility();
Document doc=rdfU.parseRDF(filename);
if (doc!=null) {
return doc;
}
} catch (RuntimeException e) {
//OK, we failed, let's try next format
}
System.out.println("Unparseable format " + filename);
throw new UnsupportedOperationException();
}
public void run() {
if (infile==null) return;
if (outfile==null) return;
try {
Document doc=(Document) loadProvKnownGraph(infile);
//doc.setNss(new Hashtable<String, String>());
//doc.getNss().put("pc1",PC1_NS);
//doc.getNss().put("prim",PRIM_NS);
//doc.getNss().put("prov","http://www.w3.org/ns/prov#");
//doc.getNss().put("xsd","http://www.w3.org/2001/XMLSchema");
//doc.getNss().put("xsi","http://www.w3.org/2001/XMLSchema-instance");
System.out.println("InteropFramework run() -> " + doc.getNss());
writeDocument(outfile, doc);
} catch (Throwable e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
| prov-interop/src/main/java/org/openprovenance/prov/interop/InteropFramework.java | package org.openprovenance.prov.interop;
import java.io.File;
import java.io.IOException;
import java.util.Hashtable;
import javax.xml.bind.JAXBException;
import org.openprovenance.prov.xml.Document;
import org.openprovenance.prov.xml.ProvDeserialiser;
import org.openprovenance.prov.xml.ProvSerialiser;
import org.openprovenance.prov.xml.ProvFactory;
import org.openprovenance.prov.notation.Utility;
import org.antlr.runtime.tree.CommonTree;
import org.openrdf.rio.RDFFormat;
import org.openprovenance.prov.dot.ProvToDot;
import org.apache.log4j.Logger;
/**
* The interoperability framework for PROV.
*/
public class InteropFramework
{
static Logger logger = Logger.getLogger(InteropFramework.class);
public static final String UNKNOWN = "unknown";
public static final String PC1_NS="http://www.ipaw.info/pc1/";
public static final String PC1_PREFIX="pc1";
public static final String PRIM_NS="http://openprovenance.org/primitives#";
public static final String PRIM_PREFIX="prim";
final Utility u=new Utility();
final ProvFactory pFactory=ProvFactory.getFactory();
final private String verbose;
final private String debug;
final private String logfile;
final private String infile;
final private String outfile;
final private String namespaces;
public final Hashtable<ProvFormat,String> extensionMap;
public final Hashtable<String,ProvFormat> extensionRevMap;
public final Hashtable<ProvFormat,String> mimeTypeMap;
public InteropFramework() {
this(null, null, null, null, null, null);
}
public InteropFramework(String verbose,
String debug, String logfile, String infile, String outfile,
String namespaces) {
this.verbose=verbose;
this.debug=debug;
this.logfile=logfile;
this.infile=infile;
this.outfile=outfile;
this.namespaces=namespaces;
extensionMap=new Hashtable<InteropFramework.ProvFormat, String>();
extensionRevMap=new Hashtable<String, InteropFramework.ProvFormat>();
mimeTypeMap=new Hashtable<InteropFramework.ProvFormat, String>();
initializeExtensionMap(extensionMap, extensionRevMap);
}
public void initializeExtensionMap(Hashtable<ProvFormat,String> extensionMap,
Hashtable<String, InteropFramework.ProvFormat> extensionRevMap) {
for (ProvFormat f: ProvFormat.values()) {
switch (f) {
case DOT:
extensionMap.put(ProvFormat.DOT,"dot");
extensionRevMap.put("dot", ProvFormat.DOT);
break;
case JPEG:
extensionMap.put(ProvFormat.JPEG,"jpg");
extensionRevMap.put("jpeg", ProvFormat.JPEG);
extensionRevMap.put("jpg", ProvFormat.JPEG);
mimeTypeMap.put(ProvFormat.JPEG,"image/jpeg");
break;
case JSON:
extensionMap.put(ProvFormat.JSON,"json");
extensionRevMap.put("json", ProvFormat.JSON);
mimeTypeMap.put(ProvFormat.JSON,"application/json");
break;
case PDF:
extensionMap.put(ProvFormat.PDF,"pdf");
extensionRevMap.put("pdf", ProvFormat.PDF);
mimeTypeMap.put(ProvFormat.PDF,"application/pdf");
break;
case PROVN:
extensionMap.put(ProvFormat.PROVN,"provn");
extensionRevMap.put("provn", ProvFormat.PROVN);
extensionRevMap.put("pn", ProvFormat.PROVN);
extensionRevMap.put("asn", ProvFormat.PROVN);
extensionRevMap.put("prov-asn", ProvFormat.PROVN);
mimeTypeMap.put(ProvFormat.PROVN,"text/provenance-notation");
break;
case RDFXML:
extensionMap.put(ProvFormat.RDFXML,"rdf");
extensionRevMap.put("rdf", ProvFormat.RDFXML);
mimeTypeMap.put(ProvFormat.RDFXML,"application/rdf+xml");
break;
case SVG:
extensionMap.put(ProvFormat.SVG,"svg");
extensionRevMap.put("svg", ProvFormat.SVG);
mimeTypeMap.put(ProvFormat.SVG,"image/svg+xml");
break;
case TRIG:
extensionMap.put(ProvFormat.TRIG,"trig");
extensionRevMap.put("trig", ProvFormat.TRIG);
mimeTypeMap.put(ProvFormat.TURTLE,"application/x-trig");
break;
case TURTLE:
extensionMap.put(ProvFormat.TURTLE,"ttl");
extensionRevMap.put("ttl", ProvFormat.TURTLE);
mimeTypeMap.put(ProvFormat.TURTLE,"text/turtle");
break;
case XML:
extensionMap.put(ProvFormat.XML,"provx");
extensionRevMap.put("provx", ProvFormat.XML);
extensionRevMap.put("xml", ProvFormat.XML);
mimeTypeMap.put(ProvFormat.XML,"text/xml");
break;
default:
break;
}
}
}
public String getExtension(ProvFormat format) {
String extension=UNKNOWN;
if (format!=null) {
extension=extensionMap.get(format);
}
return extension;
}
public String convertToMimeType(String type) {
ProvFormat format=extensionRevMap.get(type);
if (format==null) return null;
return mimeTypeMap.get(format);
}
public void provn2html(String file, String file2) throws java.io.IOException, JAXBException, Throwable {
Document doc = (Document)u.convertASNToJavaBean(file);
String s=u.convertBeanToHTML(doc);
u.writeTextToFile(s,file2);
}
public static final String RDF_TURTLE="turtle";
public static final String RDF_XML="rdf/xml";
public static final String RDF_TRIG="trig";
public static final String RDF_N3="n3";
public RDFFormat convert(String type) {
if (RDF_TURTLE.equals(type)) return RDFFormat.TURTLE;
if (RDF_XML.equals(type)) return RDFFormat.RDFXML;
if (RDF_N3.equals(type)) return RDFFormat.N3;
if (RDF_TRIG.equals(type)) return RDFFormat.TRIG;
return null;
}
/** Reads a file into java bean. */
public Object loadProvGraph(String filename) throws java.io.IOException, JAXBException, Throwable {
try {
return loadProvKnownGraph(filename);
} catch (Throwable e) {
e.printStackTrace();
return null;
//return loadProvUnknownGraph(filename);
}
}
public enum ProvFormat { PROVN, XML, TURTLE, RDFXML, TRIG, JSON, DOT, JPEG, SVG, PDF }
public ProvFormat getTypeForFile(String filename) {
int count=filename.lastIndexOf(".");
if (count==-1) return null;
String extension=filename.substring(count+1);
return extensionRevMap.get(extension);
}
public void writeDocument(String filename, Document doc) {
try {
ProvFormat format = getTypeForFile(filename);
if (format == null) {
System.err.println("Unknown output file format: " + filename);
return;
}
logger.debug("writing " + format);
logger.debug("writing " + filename);
setNamespaces(doc);
switch (format) {
case PROVN: {
u.writeDocument(doc,filename);
break;
}
case XML: {
ProvSerialiser serial = ProvSerialiser.getThreadProvSerialiser();
logger.debug("namespaces " + doc.getNss());
serial.serialiseDocument(new File(filename), doc, true);
break;
}
case TURTLE: {
new org.openprovenance.prov.rdf.Utility().dumpRDF(pFactory, doc, RDFFormat.TURTLE, filename);
break;
}
case RDFXML: {
new org.openprovenance.prov.rdf.Utility().dumpRDF(pFactory, doc, RDFFormat.RDFXML, filename);
break;
}
case TRIG: {
new org.openprovenance.prov.rdf.Utility().dumpRDF(pFactory, doc, RDFFormat.TRIG, filename);
break;
}
case JSON: {
new org.openprovenance.prov.json.Converter().writeDocument(doc, filename);
break;
}
case PDF: {
String configFile=null; // TODO: get it as option
File tmp=File.createTempFile("viz-", ".dot",new File("/tmp"));
String dotFileOut=tmp.getAbsolutePath(); //give it as option, if not available create tmp file
ProvToDot toDot=
(configFile==null)? new ProvToDot(ProvToDot.Config.ROLE_NO_LABEL) : new ProvToDot (configFile);
toDot.convert(doc, dotFileOut, filename);
break;
}
case DOT: {
String configFile=null; // TODO: get it as option
ProvToDot toDot=
(configFile==null)? new ProvToDot(ProvToDot.Config.ROLE_NO_LABEL) : new ProvToDot (configFile);
toDot.convert(doc, filename);
break;
}
case JPEG:
case SVG:{
String configFile=null; // give it as option
File tmp=File.createTempFile("viz-", ".dot",new File("/tmp"));
String dotFileOut=tmp.getAbsolutePath(); //give it as option, if not available create tmp file
//ProvToDot toDot=new ProvToDot((configFile==null)? "../../ProvToolbox/prov-dot/src/main/resources/defaultConfigWithRoleNoLabel.xml" : configFile);
ProvToDot toDot;
if (configFile!=null) {
toDot=new ProvToDot(configFile);
} else {
toDot=new ProvToDot(ProvToDot.Config.ROLE_NO_LABEL);
}
toDot.convert(doc, dotFileOut, filename, "svg");
tmp.delete();
break;
}
default:
break;}
} catch (JAXBException e) {
if (verbose!=null) e.printStackTrace();
throw new InteropException(e);
} catch (Exception e) {
if (verbose!=null) e.printStackTrace();
throw new InteropException(e);
}
}
public void setNamespaces(Document doc) {
if (doc.getNss()==null) doc.setNss(new Hashtable<String, String>());
}
public Object loadProvKnownGraph(String filename) {
try {
ProvFormat format = getTypeForFile(filename);
if (format == null) {
throw new InteropException("Unknown output file format: " + filename);
}
switch (format) {
case DOT:
case JPEG:
case SVG:
throw new UnsupportedOperationException(); //we don't load PROV from these formats
case JSON: {
return new org.openprovenance.prov.json.Converter().readDocument(filename);
}
case PROVN: {
Utility u=new Utility();
CommonTree tree = u.convertASNToTree(filename);
Object o=u.convertTreeToJavaBean(tree);
return o;
}
case RDFXML:
case TRIG:
case TURTLE:{
org.openprovenance.prov.rdf.Utility rdfU=new org.openprovenance.prov.rdf.Utility();
Document doc=rdfU.parseRDF(filename);
return doc;
}
case XML: {
File in=new File(filename);
ProvDeserialiser deserial=ProvDeserialiser.getThreadProvDeserialiser();
Document c=deserial.deserialiseDocument(in);
return c;
}
default: {
System.out.println("Unknown format " + filename);
throw new UnsupportedOperationException();
}
}
} catch (IOException e) {
throw new InteropException(e);
} catch (Throwable e) {
e.printStackTrace();
throw new InteropException(e);
}
}
public Object loadProvUnknownGraph(String filename)
throws java.io.IOException, JAXBException, Throwable {
try {
Utility u=new Utility();
CommonTree tree = u.convertASNToTree(filename);
Object o=u.convertTreeToJavaBean(tree);
if (o!=null) {
return o;
}
} catch (Throwable t1) {
// OK, we failed, let's try next format.
}
try {
File in=new File(filename);
ProvDeserialiser deserial=ProvDeserialiser.getThreadProvDeserialiser();
Document c=deserial.deserialiseDocument(in);
if (c!=null) {
return c;
}
} catch (Throwable t2) {
// OK, we failed, let's try next format.
}
try {
Object o=new org.openprovenance.prov.json.Converter().readDocument(filename);
if (o!=null) {
return o;
}
} catch (RuntimeException e) {
// OK, we failed, let's try next format.
}
try {
org.openprovenance.prov.rdf.Utility rdfU=new org.openprovenance.prov.rdf.Utility();
Document doc=rdfU.parseRDF(filename);
if (doc!=null) {
return doc;
}
} catch (RuntimeException e) {
//OK, we failed, let's try next format
}
System.out.println("Unparseable format " + filename);
throw new UnsupportedOperationException();
}
public void run() {
if (infile==null) return;
if (outfile==null) return;
try {
Document doc=(Document) loadProvKnownGraph(infile);
//doc.setNss(new Hashtable<String, String>());
//doc.getNss().put("pc1",PC1_NS);
//doc.getNss().put("prim",PRIM_NS);
//doc.getNss().put("prov","http://www.w3.org/ns/prov#");
//doc.getNss().put("xsd","http://www.w3.org/2001/XMLSchema");
//doc.getNss().put("xsi","http://www.w3.org/2001/XMLSchema-instance");
System.out.println("InteropFramework run() -> " + doc.getNss());
writeDocument(outfile, doc);
} catch (Throwable e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
| Don't hardcode /tmp (which don't exist on Windows)
File.createTempFile() will do the right thing
| prov-interop/src/main/java/org/openprovenance/prov/interop/InteropFramework.java | Don't hardcode /tmp (which don't exist on Windows) |
|
Java | mit | 9c95c60eab87d6633bdcd489778784cce31da093 | 0 | yusufyildirim/react-native-navigation,iotize/react-native-navigation,wix/react-native-navigation,guyca/react-native-navigation,thanhzusu/react-native-navigation,ceyhuno/react-native-navigation,pqkluan/react-native-navigation,shahen94/react-native-navigation,thanhzusu/react-native-navigation,Jpoliachik/react-native-navigation,chicojasl/react-native-navigation,chicojasl/react-native-navigation,yusufyildirim/react-native-navigation,wix/react-native-navigation,Jpoliachik/react-native-navigation,guyca/react-native-navigation,Jpoliachik/react-native-navigation,3sidedcube/react-native-navigation,shahen94/react-native-navigation,thanhzusu/react-native-navigation,iotize/react-native-navigation,thanhzusu/react-native-navigation,Jpoliachik/react-native-navigation,pqkluan/react-native-navigation,yusufyildirim/react-native-navigation,guyca/react-native-navigation,chicojasl/react-native-navigation,yusufyildirim/react-native-navigation,shahen94/react-native-navigation,3sidedcube/react-native-navigation,ceyhuno/react-native-navigation,ceyhuno/react-native-navigation,3sidedcube/react-native-navigation,ceyhuno/react-native-navigation,Jpoliachik/react-native-navigation,ceyhuno/react-native-navigation,iotize/react-native-navigation,ceyhuno/react-native-navigation,wix/react-native-navigation,thanhzusu/react-native-navigation,pqkluan/react-native-navigation,chicojasl/react-native-navigation,guyca/react-native-navigation,iotize/react-native-navigation,shahen94/react-native-navigation,pqkluan/react-native-navigation,Jpoliachik/react-native-navigation,chicojasl/react-native-navigation,thanhzusu/react-native-navigation,wix/react-native-navigation,wix/react-native-navigation,wix/react-native-navigation,3sidedcube/react-native-navigation,chicojasl/react-native-navigation | package com.reactnativenavigation.layouts;
import android.annotation.SuppressLint;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.RelativeLayout;
import com.aurelhubert.ahbottomnavigation.AHBottomNavigation;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Callback;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.WritableMap;
import com.reactnativenavigation.NavigationApplication;
import com.reactnativenavigation.events.EventBus;
import com.reactnativenavigation.events.ScreenChangedEvent;
import com.reactnativenavigation.params.ActivityParams;
import com.reactnativenavigation.params.AppStyle;
import com.reactnativenavigation.params.ContextualMenuParams;
import com.reactnativenavigation.params.FabParams;
import com.reactnativenavigation.params.LightBoxParams;
import com.reactnativenavigation.params.ScreenParams;
import com.reactnativenavigation.params.SideMenuParams;
import com.reactnativenavigation.params.SlidingOverlayParams;
import com.reactnativenavigation.params.SnackbarParams;
import com.reactnativenavigation.params.StyleParams;
import com.reactnativenavigation.params.TitleBarButtonParams;
import com.reactnativenavigation.params.TitleBarLeftButtonParams;
import com.reactnativenavigation.screens.NavigationType;
import com.reactnativenavigation.screens.Screen;
import com.reactnativenavigation.screens.ScreenStack;
import com.reactnativenavigation.utils.Task;
import com.reactnativenavigation.utils.ViewUtils;
import com.reactnativenavigation.views.BottomTabs;
import com.reactnativenavigation.views.LightBox;
import com.reactnativenavigation.views.SideMenu;
import com.reactnativenavigation.views.SideMenu.Side;
import com.reactnativenavigation.views.SnackbarAndFabContainer;
import com.reactnativenavigation.views.slidingOverlay.SlidingOverlay;
import com.reactnativenavigation.views.slidingOverlay.SlidingOverlaysQueue;
import java.util.List;
import static android.view.ViewGroup.LayoutParams.MATCH_PARENT;
import static android.view.ViewGroup.LayoutParams.WRAP_CONTENT;
@SuppressLint("ViewConstructor")
public class BottomTabsLayout extends BaseLayout implements AHBottomNavigation.OnTabSelectedListener {
private ActivityParams params;
private SnackbarAndFabContainer snackbarAndFabContainer;
private BottomTabs bottomTabs;
private ScreenStack[] screenStacks;
private final SideMenuParams leftSideMenuParams;
private final SideMenuParams rightSideMenuParams;
private final SlidingOverlaysQueue slidingOverlaysQueue = new SlidingOverlaysQueue();
private
@Nullable
SideMenu sideMenu;
private int currentStackIndex = 0;
private LightBox lightBox;
public BottomTabsLayout(AppCompatActivity activity, ActivityParams params) {
super(activity);
this.params = params;
leftSideMenuParams = params.leftSideMenuParams;
rightSideMenuParams = params.rightSideMenuParams;
screenStacks = new ScreenStack[params.tabParams.size()];
createLayout();
}
private void createLayout() {
createSideMenu();
createBottomTabs();
addBottomTabs();
addScreenStacks();
createSnackbarContainer();
showInitialScreenStack();
setInitialTabIndex();
}
private void setInitialTabIndex() {
bottomTabs.setCurrentItem(AppStyle.appStyle.bottomTabsInitialIndex);
}
private void createSideMenu() {
if (leftSideMenuParams == null && rightSideMenuParams == null) {
return;
}
sideMenu = new SideMenu(getContext(), leftSideMenuParams, rightSideMenuParams);
RelativeLayout.LayoutParams lp = new LayoutParams(MATCH_PARENT, MATCH_PARENT);
addView(sideMenu, lp);
}
private void addScreenStacks() {
for (int i = screenStacks.length - 1; i >= 0; i--) {
createAndAddScreens(i);
}
}
private void createAndAddScreens(int position) {
ScreenParams screenParams = params.tabParams.get(position);
ScreenStack newStack = new ScreenStack(getActivity(), getScreenStackParent(), screenParams.getNavigatorId(), this);
newStack.pushInitialScreen(screenParams, createScreenLayoutParams(screenParams));
screenStacks[position] = newStack;
}
private RelativeLayout getScreenStackParent() {
return sideMenu == null ? this : sideMenu.getContentContainer();
}
@NonNull
private LayoutParams createScreenLayoutParams(ScreenParams params) {
LayoutParams lp = new LayoutParams(MATCH_PARENT, MATCH_PARENT);
if (params.styleParams.drawScreenAboveBottomTabs) {
lp.addRule(RelativeLayout.ABOVE, bottomTabs.getId());
}
return lp;
}
private void createBottomTabs() {
bottomTabs = new BottomTabs(getContext());
bottomTabs.addTabs(params.tabParams, this);
}
private void addBottomTabs() {
LayoutParams lp = new LayoutParams(MATCH_PARENT, WRAP_CONTENT);
lp.addRule(ALIGN_PARENT_BOTTOM);
getScreenStackParent().addView(bottomTabs, lp);
}
private void createSnackbarContainer() {
snackbarAndFabContainer = new SnackbarAndFabContainer(getContext(), this);
RelativeLayout.LayoutParams lp = new RelativeLayout.LayoutParams(MATCH_PARENT, MATCH_PARENT);
alignSnackbarContainerWithBottomTabs(lp, getCurrentScreen().getStyleParams());
snackbarAndFabContainer.setClickable(false);
getScreenStackParent().addView(snackbarAndFabContainer, lp);
}
private void showInitialScreenStack() {
bottomTabs.setVisibilityByInitialScreen(getInitialScreenStack().peek().getStyleParams());
showStackAndUpdateStyle(getInitialScreenStack(), NavigationType.InitialScreen);
EventBus.instance.post(new ScreenChangedEvent(screenStacks[0].peek().getScreenParams()));
}
private ScreenStack getInitialScreenStack() {
return screenStacks[AppStyle.appStyle.bottomTabsInitialIndex];
}
@Override
public View asView() {
return this;
}
@Override
public boolean onBackPressed() {
if (handleBackInJs()) {
return true;
}
if (getCurrentScreenStack().canPop()) {
getCurrentScreenStack().pop(true, System.currentTimeMillis());
setBottomTabsStyleFromCurrentScreen();
EventBus.instance.post(new ScreenChangedEvent(getCurrentScreenStack().peek().getScreenParams()));
return true;
} else {
return false;
}
}
@Override
public boolean handleBackInJs() {
return getCurrentScreenStack().handleBackPressInJs();
}
@Override
public void setTopBarVisible(String screenInstanceId, boolean hidden, boolean animated) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].setScreenTopBarVisible(screenInstanceId, hidden, animated);
}
}
public void setBottomTabsVisible(boolean hidden, boolean animated) {
getCurrentScreenStack().peek().updateBottomTabsVisibility(hidden);
bottomTabs.setVisibility(hidden, animated);
}
@Override
public void setTitleBarTitle(String screenInstanceId, String title) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].setScreenTitleBarTitle(screenInstanceId, title);
}
}
@Override
public void setTitleBarSubtitle(String screenInstanceId, String subtitle) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].setScreenTitleBarSubtitle(screenInstanceId, subtitle);
}
}
@Override
public void setTitleBarRightButtons(String screenInstanceId, String navigatorEventId, List<TitleBarButtonParams> titleBarButtons) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].setScreenTitleBarRightButtons(screenInstanceId, navigatorEventId, titleBarButtons);
}
}
@Override
public void setTitleBarLeftButton(String screenInstanceId, String navigatorEventId, TitleBarLeftButtonParams titleBarLeftButtonParams) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].setScreenTitleBarLeftButton(screenInstanceId, navigatorEventId, titleBarLeftButtonParams);
}
}
@Override
public void setFab(String screenInstanceId, String navigatorEventId, FabParams fabParams) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].setFab(screenInstanceId, fabParams);
}
}
@Override
public void updateScreenStyle(String screenInstanceId, Bundle styleParams) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].updateScreenStyle(screenInstanceId, styleParams);
}
}
@Override
public String getCurrentlyVisibleScreenId() {
return getCurrentScreen().getScreenInstanceId();
}
@Override
public void selectTopTabByTabIndex(String screenInstanceId, int index) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].selectTopTabByTabIndex(screenInstanceId, index);
}
}
@Override
public void selectTopTabByScreen(String screenInstanceId) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].selectTopTabByScreen(screenInstanceId);
}
}
@Override
public void toggleSideMenuVisible(boolean animated, Side side) {
if (sideMenu != null) {
sideMenu.toggleVisible(animated, side);
}
}
@Override
public void setSideMenuVisible(boolean animated, boolean visible, Side side) {
if (sideMenu != null) {
sideMenu.setVisible(visible, animated, side);
}
}
@Override
public void setSideMenuEnabled(boolean enabled, Side side) {
if (sideMenu != null) {
sideMenu.setDrawerLockMode(enabled ? DrawerLayout.LOCK_MODE_UNLOCKED : DrawerLayout.LOCK_MODE_LOCKED_CLOSED);
}
}
@Override
public void showSnackbar(SnackbarParams params) {
final String eventId = getCurrentScreenStack().peek().getNavigatorEventId();
snackbarAndFabContainer.showSnackbar(eventId, params);
}
@Override
public void dismissSnackbar() {
snackbarAndFabContainer.dismissSnackbar();
}
@Override
public void showLightBox(LightBoxParams params) {
if (lightBox == null) {
lightBox = new LightBox(getActivity(), new Runnable() {
@Override
public void run() {
lightBox = null;
}
}, params);
lightBox.show();
}
}
@Override
public void dismissLightBox() {
if (lightBox != null) {
lightBox.hide();
lightBox = null;
}
}
@Override
public void showSlidingOverlay(final SlidingOverlayParams params) {
slidingOverlaysQueue.add(new SlidingOverlay(this, params));
}
@Override
public void hideSlidingOverlay() {
slidingOverlaysQueue.remove();
}
@Override
public void onModalDismissed() {
getCurrentScreenStack().peek().getScreenParams().timestamp = System.currentTimeMillis();
NavigationApplication.instance.getEventEmitter().sendWillAppearEvent(getCurrentScreenStack().peek().getScreenParams(), NavigationType.DismissModal);
NavigationApplication.instance.getEventEmitter().sendDidAppearEvent(getCurrentScreenStack().peek().getScreenParams(), NavigationType.DismissModal);
EventBus.instance.post(new ScreenChangedEvent(getCurrentScreenStack().peek().getScreenParams()));
}
@Override
public boolean containsNavigator(String navigatorId) {
// Unused
return false;
}
@Override
public void showContextualMenu(String screenInstanceId, ContextualMenuParams params, Callback onButtonClicked) {
getCurrentScreenStack().peek().showContextualMenu(params, onButtonClicked);
}
@Override
public void dismissContextualMenu(String screenInstanceId) {
getCurrentScreenStack().peek().dismissContextualMenu();
}
@Override
public Screen getCurrentScreen() {
return getCurrentScreenStack().peek();
}
public void selectBottomTabByTabIndex(Integer index) {
if (bottomTabs.getCurrentItem() != index) {
bottomTabs.setCurrentItem(index);
}
}
public void selectBottomTabByNavigatorId(final String navigatorId) {
performOnStack(navigatorId, new Task<ScreenStack>() {
@Override
public void run(ScreenStack param) {
selectBottomTabByTabIndex(getScreenStackIndex(navigatorId));
}
});
}
private boolean hasBackgroundColor(StyleParams params) {
return params.screenBackgroundColor != null &&
params.screenBackgroundColor.hasColor();
}
private void setStyleFromScreen(StyleParams params) {
bottomTabs.setStyleFromScreen(params);
if (snackbarAndFabContainer != null && snackbarAndFabContainer.getLayoutParams() instanceof RelativeLayout.LayoutParams)
alignSnackbarContainerWithBottomTabs((LayoutParams) snackbarAndFabContainer.getLayoutParams(), params);
if (hasBackgroundColor(params)) {
asView().setBackgroundColor(params.screenBackgroundColor.getColor());
}
}
@Override
public void push(final ScreenParams params, final Promise onPushComplete) {
performOnStack(params.getNavigatorId(), new Task<ScreenStack>() {
@Override
public void run(ScreenStack screenStack) {
screenStack.push(params, createScreenLayoutParams(params), onPushComplete);
if (isCurrentStack(screenStack)) {
setStyleFromScreen(params.styleParams);
EventBus.instance.post(new ScreenChangedEvent(params));
}
}
});
}
@Override
public void pop(final ScreenParams params) {
performOnStack(params.getNavigatorId(), new Task<ScreenStack>() {
@Override
public void run(ScreenStack stack) {
stack.pop(params.animateScreenTransitions, params.timestamp, new ScreenStack.OnScreenPop() {
@Override
public void onScreenPopAnimationEnd() {
setBottomTabsStyleFromCurrentScreen();
EventBus.instance.post(new ScreenChangedEvent(getCurrentScreenStack().peek().getScreenParams()));
}
});
}
});
}
@Override
public void popToRoot(final ScreenParams params) {
performOnStack(params.getNavigatorId(), new Task<ScreenStack>() {
@Override
public void run(final ScreenStack stack) {
stack.popToRoot(params.animateScreenTransitions, params.timestamp, new ScreenStack.OnScreenPop() {
@Override
public void onScreenPopAnimationEnd() {
if (isCurrentStack(stack)) {
setBottomTabsStyleFromCurrentScreen();
alignSnackbarContainerWithBottomTabs((LayoutParams) snackbarAndFabContainer.getLayoutParams(), params.styleParams);
EventBus.instance.post(new ScreenChangedEvent(stack.peek().getScreenParams()));
}
}
});
}
});
}
@Override
public void newStack(final ScreenParams params) {
performOnStack(params.getNavigatorId(), new Task<ScreenStack>() {
@Override
public void run(ScreenStack screenStack) {
screenStack.newStack(params, createScreenLayoutParams(params));
if (isCurrentStack(screenStack)) {
setStyleFromScreen(params.styleParams);
alignSnackbarContainerWithBottomTabs((LayoutParams) snackbarAndFabContainer.getLayoutParams(), params.styleParams);
EventBus.instance.post(new ScreenChangedEvent(params));
}
}
});
}
private void alignSnackbarContainerWithBottomTabs(LayoutParams lp, StyleParams styleParams) {
if (styleParams.drawScreenAboveBottomTabs || !styleParams.bottomTabsHidden) {
lp.addRule(ABOVE, bottomTabs.getId());
} else {
ViewUtils.removeRuleCompat(lp, ABOVE);
}
}
private void performOnStack(String navigatorId, Task<ScreenStack> task) {
try {
ScreenStack screenStack = getScreenStack(navigatorId);
task.run(screenStack);
} catch (ScreenStackNotFoundException e) {
Log.e("Navigation", "Could not perform action on stack [" + navigatorId + "]." +
"This should not have happened, it probably means a navigator action" +
"was called from an unmounted tab.");
}
}
@Override
public void destroy() {
snackbarAndFabContainer.destroy();
for (ScreenStack screenStack : screenStacks) {
screenStack.destroy();
}
if (sideMenu != null) {
sideMenu.destroy();
}
if (lightBox != null) {
lightBox.destroy();
lightBox = null;
}
slidingOverlaysQueue.destroy();
}
@Override
public boolean onTabSelected(int position, boolean wasSelected) {
if (wasSelected) {
sendTabReselectedEventToJs();
return false;
}
final int unselectedTabIndex = currentStackIndex;
hideCurrentStack();
showNewStack(position, NavigationType.BottomTabSelected);
EventBus.instance.post(new ScreenChangedEvent(getCurrentScreenStack().peek().getScreenParams()));
sendTabSelectedEventToJs(position, unselectedTabIndex);
return true;
}
private void sendTabSelectedEventToJs(int selectedTabIndex, int unselectedTabIndex) {
String navigatorEventId = getCurrentScreenStack().peek().getNavigatorEventId();
WritableMap data = createTabSelectedEventData(selectedTabIndex, unselectedTabIndex);
NavigationApplication.instance.getEventEmitter().sendNavigatorEvent("bottomTabSelected", navigatorEventId, data);
data = createTabSelectedEventData(selectedTabIndex, unselectedTabIndex);
NavigationApplication.instance.getEventEmitter().sendNavigatorEvent("bottomTabSelected", data);
}
private WritableMap createTabSelectedEventData(int selectedTabIndex, int unselectedTabIndex) {
WritableMap data = Arguments.createMap();
data.putInt("selectedTabIndex", selectedTabIndex);
data.putInt("unselectedTabIndex", unselectedTabIndex);
return data;
}
private void sendTabReselectedEventToJs() {
WritableMap data = Arguments.createMap();
String navigatorEventId = getCurrentScreenStack().peek().getNavigatorEventId();
NavigationApplication.instance.getEventEmitter().sendNavigatorEvent("bottomTabReselected", navigatorEventId, data);
}
private void showNewStack(int position, NavigationType type) {
showStackAndUpdateStyle(screenStacks[position], type);
currentStackIndex = position;
}
private void showStackAndUpdateStyle(ScreenStack newStack, NavigationType type) {
newStack.show(type);
setStyleFromScreen(newStack.getCurrentScreenStyleParams());
}
private void hideCurrentStack() {
ScreenStack currentScreenStack = getCurrentScreenStack();
currentScreenStack.hide(NavigationType.BottomTabSelected);
}
private ScreenStack getCurrentScreenStack() {
return screenStacks[currentStackIndex];
}
private
@NonNull
ScreenStack getScreenStack(String navigatorId) throws ScreenStackNotFoundException {
int index = getScreenStackIndex(navigatorId);
return screenStacks[index];
}
public void setBottomTabBadgeByIndex(Integer index, String badge) {
bottomTabs.setNotification(badge, index);
}
public void setBottomTabBadgeByNavigatorId(String navigatorId, String badge) {
bottomTabs.setNotification(badge, getScreenStackIndex(navigatorId));
}
public void setBottomTabButtonByIndex(Integer index, ScreenParams params) {
bottomTabs.setTabButton(params, index);
}
public void setBottomTabButtonByNavigatorId(String navigatorId, ScreenParams params) {
bottomTabs.setTabButton(params, getScreenStackIndex(navigatorId));
}
private int getScreenStackIndex(String navigatorId) throws ScreenStackNotFoundException {
for (int i = 0; i < screenStacks.length; i++) {
if (screenStacks[i].getNavigatorId().equals(navigatorId)) {
return i;
}
}
throw new ScreenStackNotFoundException("Stack " + navigatorId + " not found");
}
private class ScreenStackNotFoundException extends RuntimeException {
ScreenStackNotFoundException(String navigatorId) {
super(navigatorId);
}
}
private boolean isCurrentStack(ScreenStack screenStack) {
return getCurrentScreenStack() == screenStack;
}
private void setBottomTabsStyleFromCurrentScreen() {
setStyleFromScreen(getCurrentScreenStack().getCurrentScreenStyleParams());
}
@Override
public boolean onTitleBarBackButtonClick() {
if (getCurrentScreenStack().canPop()) {
getCurrentScreenStack().pop(true, System.currentTimeMillis(), new ScreenStack.OnScreenPop() {
@Override
public void onScreenPopAnimationEnd() {
setBottomTabsStyleFromCurrentScreen();
EventBus.instance.post(new ScreenChangedEvent(getCurrentScreenStack().peek().getScreenParams()));
}
});
return true;
}
return false;
}
@Override
public void onSideMenuButtonClick() {
final String navigatorEventId = getCurrentScreenStack().peek().getNavigatorEventId();
NavigationApplication.instance.getEventEmitter().sendNavigatorEvent("sideMenu", navigatorEventId);
if (sideMenu != null) {
sideMenu.openDrawer(Side.Left);
}
}
}
| android/app/src/main/java/com/reactnativenavigation/layouts/BottomTabsLayout.java | package com.reactnativenavigation.layouts;
import android.annotation.SuppressLint;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.RelativeLayout;
import com.aurelhubert.ahbottomnavigation.AHBottomNavigation;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Callback;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.WritableMap;
import com.reactnativenavigation.NavigationApplication;
import com.reactnativenavigation.events.EventBus;
import com.reactnativenavigation.events.ScreenChangedEvent;
import com.reactnativenavigation.params.ActivityParams;
import com.reactnativenavigation.params.AppStyle;
import com.reactnativenavigation.params.ContextualMenuParams;
import com.reactnativenavigation.params.FabParams;
import com.reactnativenavigation.params.LightBoxParams;
import com.reactnativenavigation.params.ScreenParams;
import com.reactnativenavigation.params.SideMenuParams;
import com.reactnativenavigation.params.SlidingOverlayParams;
import com.reactnativenavigation.params.SnackbarParams;
import com.reactnativenavigation.params.StyleParams;
import com.reactnativenavigation.params.TitleBarButtonParams;
import com.reactnativenavigation.params.TitleBarLeftButtonParams;
import com.reactnativenavigation.screens.NavigationType;
import com.reactnativenavigation.screens.Screen;
import com.reactnativenavigation.screens.ScreenStack;
import com.reactnativenavigation.utils.Task;
import com.reactnativenavigation.utils.ViewUtils;
import com.reactnativenavigation.views.BottomTabs;
import com.reactnativenavigation.views.LightBox;
import com.reactnativenavigation.views.SideMenu;
import com.reactnativenavigation.views.SideMenu.Side;
import com.reactnativenavigation.views.SnackbarAndFabContainer;
import com.reactnativenavigation.views.slidingOverlay.SlidingOverlay;
import com.reactnativenavigation.views.slidingOverlay.SlidingOverlaysQueue;
import java.util.List;
import static android.view.ViewGroup.LayoutParams.MATCH_PARENT;
import static android.view.ViewGroup.LayoutParams.WRAP_CONTENT;
@SuppressLint("ViewConstructor")
public class BottomTabsLayout extends BaseLayout implements AHBottomNavigation.OnTabSelectedListener {
private ActivityParams params;
private SnackbarAndFabContainer snackbarAndFabContainer;
private BottomTabs bottomTabs;
private ScreenStack[] screenStacks;
private final SideMenuParams leftSideMenuParams;
private final SideMenuParams rightSideMenuParams;
private final SlidingOverlaysQueue slidingOverlaysQueue = new SlidingOverlaysQueue();
private
@Nullable
SideMenu sideMenu;
private int currentStackIndex = 0;
private LightBox lightBox;
public BottomTabsLayout(AppCompatActivity activity, ActivityParams params) {
super(activity);
this.params = params;
leftSideMenuParams = params.leftSideMenuParams;
rightSideMenuParams = params.rightSideMenuParams;
screenStacks = new ScreenStack[params.tabParams.size()];
createLayout();
}
private void createLayout() {
createSideMenu();
createBottomTabs();
addBottomTabs();
addScreenStacks();
createSnackbarContainer();
showInitialScreenStack();
setInitialTabIndex();
}
private void setInitialTabIndex() {
bottomTabs.setCurrentItem(AppStyle.appStyle.bottomTabsInitialIndex);
}
private void createSideMenu() {
if (leftSideMenuParams == null && rightSideMenuParams == null) {
return;
}
sideMenu = new SideMenu(getContext(), leftSideMenuParams, rightSideMenuParams);
RelativeLayout.LayoutParams lp = new LayoutParams(MATCH_PARENT, MATCH_PARENT);
addView(sideMenu, lp);
}
private void addScreenStacks() {
for (int i = screenStacks.length - 1; i >= 0; i--) {
createAndAddScreens(i);
}
}
private void createAndAddScreens(int position) {
ScreenParams screenParams = params.tabParams.get(position);
ScreenStack newStack = new ScreenStack(getActivity(), getScreenStackParent(), screenParams.getNavigatorId(), this);
newStack.pushInitialScreen(screenParams, createScreenLayoutParams(screenParams));
screenStacks[position] = newStack;
}
private RelativeLayout getScreenStackParent() {
return sideMenu == null ? this : sideMenu.getContentContainer();
}
@NonNull
private LayoutParams createScreenLayoutParams(ScreenParams params) {
LayoutParams lp = new LayoutParams(MATCH_PARENT, MATCH_PARENT);
if (params.styleParams.drawScreenAboveBottomTabs) {
lp.addRule(RelativeLayout.ABOVE, bottomTabs.getId());
}
return lp;
}
private void createBottomTabs() {
bottomTabs = new BottomTabs(getContext());
bottomTabs.addTabs(params.tabParams, this);
}
private void addBottomTabs() {
LayoutParams lp = new LayoutParams(MATCH_PARENT, WRAP_CONTENT);
lp.addRule(ALIGN_PARENT_BOTTOM);
getScreenStackParent().addView(bottomTabs, lp);
}
private void createSnackbarContainer() {
snackbarAndFabContainer = new SnackbarAndFabContainer(getContext(), this);
RelativeLayout.LayoutParams lp = new RelativeLayout.LayoutParams(MATCH_PARENT, MATCH_PARENT);
alignSnackbarContainerWithBottomTabs(lp, getCurrentScreen().getStyleParams());
snackbarAndFabContainer.setClickable(false);
getScreenStackParent().addView(snackbarAndFabContainer, lp);
}
private void showInitialScreenStack() {
bottomTabs.setVisibilityByInitialScreen(getInitialScreenStack().peek().getStyleParams());
showStackAndUpdateStyle(getInitialScreenStack(), NavigationType.InitialScreen);
EventBus.instance.post(new ScreenChangedEvent(screenStacks[0].peek().getScreenParams()));
}
private ScreenStack getInitialScreenStack() {
return screenStacks[AppStyle.appStyle.bottomTabsInitialIndex];
}
@Override
public View asView() {
return this;
}
@Override
public boolean onBackPressed() {
if (handleBackInJs()) {
return true;
}
if (getCurrentScreenStack().canPop()) {
getCurrentScreenStack().pop(true, System.currentTimeMillis());
setBottomTabsStyleFromCurrentScreen();
EventBus.instance.post(new ScreenChangedEvent(getCurrentScreenStack().peek().getScreenParams()));
return true;
} else {
return false;
}
}
@Override
public boolean handleBackInJs() {
return getCurrentScreenStack().handleBackPressInJs();
}
@Override
public void setTopBarVisible(String screenInstanceId, boolean hidden, boolean animated) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].setScreenTopBarVisible(screenInstanceId, hidden, animated);
}
}
public void setBottomTabsVisible(boolean hidden, boolean animated) {
getCurrentScreenStack().peek().updateBottomTabsVisibility(hidden);
bottomTabs.setVisibility(hidden, animated);
}
@Override
public void setTitleBarTitle(String screenInstanceId, String title) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].setScreenTitleBarTitle(screenInstanceId, title);
}
}
@Override
public void setTitleBarSubtitle(String screenInstanceId, String subtitle) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].setScreenTitleBarSubtitle(screenInstanceId, subtitle);
}
}
@Override
public void setTitleBarRightButtons(String screenInstanceId, String navigatorEventId, List<TitleBarButtonParams> titleBarButtons) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].setScreenTitleBarRightButtons(screenInstanceId, navigatorEventId, titleBarButtons);
}
}
@Override
public void setTitleBarLeftButton(String screenInstanceId, String navigatorEventId, TitleBarLeftButtonParams titleBarLeftButtonParams) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].setScreenTitleBarLeftButton(screenInstanceId, navigatorEventId, titleBarLeftButtonParams);
}
}
@Override
public void setFab(String screenInstanceId, String navigatorEventId, FabParams fabParams) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].setFab(screenInstanceId, fabParams);
}
}
@Override
public void updateScreenStyle(String screenInstanceId, Bundle styleParams) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].updateScreenStyle(screenInstanceId, styleParams);
}
}
@Override
public String getCurrentlyVisibleScreenId() {
return getCurrentScreen().getScreenInstanceId();
}
@Override
public void selectTopTabByTabIndex(String screenInstanceId, int index) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].selectTopTabByTabIndex(screenInstanceId, index);
}
}
@Override
public void selectTopTabByScreen(String screenInstanceId) {
for (int i = 0; i < bottomTabs.getItemsCount(); i++) {
screenStacks[i].selectTopTabByScreen(screenInstanceId);
}
}
@Override
public void toggleSideMenuVisible(boolean animated, Side side) {
if (sideMenu != null) {
sideMenu.toggleVisible(animated, side);
}
}
@Override
public void setSideMenuVisible(boolean animated, boolean visible, Side side) {
if (sideMenu != null) {
sideMenu.setVisible(visible, animated, side);
}
}
@Override
public void setSideMenuEnabled(boolean enabled, Side side) {
if (sideMenu != null) {
sideMenu.setDrawerLockMode(enabled ? DrawerLayout.LOCK_MODE_UNLOCKED : DrawerLayout.LOCK_MODE_LOCKED_CLOSED);
}
}
@Override
public void showSnackbar(SnackbarParams params) {
final String eventId = getCurrentScreenStack().peek().getNavigatorEventId();
snackbarAndFabContainer.showSnackbar(eventId, params);
}
@Override
public void dismissSnackbar() {
snackbarAndFabContainer.dismissSnackbar();
}
@Override
public void showLightBox(LightBoxParams params) {
if (lightBox == null) {
lightBox = new LightBox(getActivity(), new Runnable() {
@Override
public void run() {
lightBox = null;
}
}, params);
lightBox.show();
}
}
@Override
public void dismissLightBox() {
if (lightBox != null) {
lightBox.hide();
lightBox = null;
}
}
@Override
public void showSlidingOverlay(final SlidingOverlayParams params) {
slidingOverlaysQueue.add(new SlidingOverlay(this, params));
}
@Override
public void hideSlidingOverlay() {
slidingOverlaysQueue.remove();
}
@Override
public void onModalDismissed() {
getCurrentScreenStack().peek().getScreenParams().timestamp = System.currentTimeMillis();
NavigationApplication.instance.getEventEmitter().sendWillAppearEvent(getCurrentScreenStack().peek().getScreenParams(), NavigationType.DismissModal);
NavigationApplication.instance.getEventEmitter().sendDidAppearEvent(getCurrentScreenStack().peek().getScreenParams(), NavigationType.DismissModal);
EventBus.instance.post(new ScreenChangedEvent(getCurrentScreenStack().peek().getScreenParams()));
}
@Override
public boolean containsNavigator(String navigatorId) {
// Unused
return false;
}
@Override
public void showContextualMenu(String screenInstanceId, ContextualMenuParams params, Callback onButtonClicked) {
getCurrentScreenStack().peek().showContextualMenu(params, onButtonClicked);
}
@Override
public void dismissContextualMenu(String screenInstanceId) {
getCurrentScreenStack().peek().dismissContextualMenu();
}
@Override
public Screen getCurrentScreen() {
return getCurrentScreenStack().peek();
}
public void selectBottomTabByTabIndex(Integer index) {
if (bottomTabs.getCurrentItem() != index) {
bottomTabs.setCurrentItem(index);
}
}
public void selectBottomTabByNavigatorId(final String navigatorId) {
performOnStack(navigatorId, new Task<ScreenStack>() {
@Override
public void run(ScreenStack param) {
bottomTabs.setCurrentItem(getScreenStackIndex(navigatorId));
}
});
}
private boolean hasBackgroundColor(StyleParams params) {
return params.screenBackgroundColor != null &&
params.screenBackgroundColor.hasColor();
}
private void setStyleFromScreen(StyleParams params) {
bottomTabs.setStyleFromScreen(params);
if (snackbarAndFabContainer != null && snackbarAndFabContainer.getLayoutParams() instanceof RelativeLayout.LayoutParams)
alignSnackbarContainerWithBottomTabs((LayoutParams) snackbarAndFabContainer.getLayoutParams(), params);
if (hasBackgroundColor(params)) {
asView().setBackgroundColor(params.screenBackgroundColor.getColor());
}
}
@Override
public void push(final ScreenParams params, final Promise onPushComplete) {
performOnStack(params.getNavigatorId(), new Task<ScreenStack>() {
@Override
public void run(ScreenStack screenStack) {
screenStack.push(params, createScreenLayoutParams(params), onPushComplete);
if (isCurrentStack(screenStack)) {
setStyleFromScreen(params.styleParams);
EventBus.instance.post(new ScreenChangedEvent(params));
}
}
});
}
@Override
public void pop(final ScreenParams params) {
performOnStack(params.getNavigatorId(), new Task<ScreenStack>() {
@Override
public void run(ScreenStack stack) {
stack.pop(params.animateScreenTransitions, params.timestamp, new ScreenStack.OnScreenPop() {
@Override
public void onScreenPopAnimationEnd() {
setBottomTabsStyleFromCurrentScreen();
EventBus.instance.post(new ScreenChangedEvent(getCurrentScreenStack().peek().getScreenParams()));
}
});
}
});
}
@Override
public void popToRoot(final ScreenParams params) {
performOnStack(params.getNavigatorId(), new Task<ScreenStack>() {
@Override
public void run(final ScreenStack stack) {
stack.popToRoot(params.animateScreenTransitions, params.timestamp, new ScreenStack.OnScreenPop() {
@Override
public void onScreenPopAnimationEnd() {
if (isCurrentStack(stack)) {
setBottomTabsStyleFromCurrentScreen();
alignSnackbarContainerWithBottomTabs((LayoutParams) snackbarAndFabContainer.getLayoutParams(), params.styleParams);
EventBus.instance.post(new ScreenChangedEvent(stack.peek().getScreenParams()));
}
}
});
}
});
}
@Override
public void newStack(final ScreenParams params) {
performOnStack(params.getNavigatorId(), new Task<ScreenStack>() {
@Override
public void run(ScreenStack screenStack) {
screenStack.newStack(params, createScreenLayoutParams(params));
if (isCurrentStack(screenStack)) {
setStyleFromScreen(params.styleParams);
alignSnackbarContainerWithBottomTabs((LayoutParams) snackbarAndFabContainer.getLayoutParams(), params.styleParams);
EventBus.instance.post(new ScreenChangedEvent(params));
}
}
});
}
private void alignSnackbarContainerWithBottomTabs(LayoutParams lp, StyleParams styleParams) {
if (styleParams.drawScreenAboveBottomTabs || !styleParams.bottomTabsHidden) {
lp.addRule(ABOVE, bottomTabs.getId());
} else {
ViewUtils.removeRuleCompat(lp, ABOVE);
}
}
private void performOnStack(String navigatorId, Task<ScreenStack> task) {
try {
ScreenStack screenStack = getScreenStack(navigatorId);
task.run(screenStack);
} catch (ScreenStackNotFoundException e) {
Log.e("Navigation", "Could not perform action on stack [" + navigatorId + "]." +
"This should not have happened, it probably means a navigator action" +
"was called from an unmounted tab.");
}
}
@Override
public void destroy() {
snackbarAndFabContainer.destroy();
for (ScreenStack screenStack : screenStacks) {
screenStack.destroy();
}
if (sideMenu != null) {
sideMenu.destroy();
}
if (lightBox != null) {
lightBox.destroy();
lightBox = null;
}
slidingOverlaysQueue.destroy();
}
@Override
public boolean onTabSelected(int position, boolean wasSelected) {
if (wasSelected) {
sendTabReselectedEventToJs();
return false;
}
final int unselectedTabIndex = currentStackIndex;
hideCurrentStack();
showNewStack(position, NavigationType.BottomTabSelected);
EventBus.instance.post(new ScreenChangedEvent(getCurrentScreenStack().peek().getScreenParams()));
sendTabSelectedEventToJs(position, unselectedTabIndex);
return true;
}
private void sendTabSelectedEventToJs(int selectedTabIndex, int unselectedTabIndex) {
String navigatorEventId = getCurrentScreenStack().peek().getNavigatorEventId();
WritableMap data = createTabSelectedEventData(selectedTabIndex, unselectedTabIndex);
NavigationApplication.instance.getEventEmitter().sendNavigatorEvent("bottomTabSelected", navigatorEventId, data);
data = createTabSelectedEventData(selectedTabIndex, unselectedTabIndex);
NavigationApplication.instance.getEventEmitter().sendNavigatorEvent("bottomTabSelected", data);
}
private WritableMap createTabSelectedEventData(int selectedTabIndex, int unselectedTabIndex) {
WritableMap data = Arguments.createMap();
data.putInt("selectedTabIndex", selectedTabIndex);
data.putInt("unselectedTabIndex", unselectedTabIndex);
return data;
}
private void sendTabReselectedEventToJs() {
WritableMap data = Arguments.createMap();
String navigatorEventId = getCurrentScreenStack().peek().getNavigatorEventId();
NavigationApplication.instance.getEventEmitter().sendNavigatorEvent("bottomTabReselected", navigatorEventId, data);
}
private void showNewStack(int position, NavigationType type) {
showStackAndUpdateStyle(screenStacks[position], type);
currentStackIndex = position;
}
private void showStackAndUpdateStyle(ScreenStack newStack, NavigationType type) {
newStack.show(type);
setStyleFromScreen(newStack.getCurrentScreenStyleParams());
}
private void hideCurrentStack() {
ScreenStack currentScreenStack = getCurrentScreenStack();
currentScreenStack.hide(NavigationType.BottomTabSelected);
}
private ScreenStack getCurrentScreenStack() {
return screenStacks[currentStackIndex];
}
private
@NonNull
ScreenStack getScreenStack(String navigatorId) throws ScreenStackNotFoundException {
int index = getScreenStackIndex(navigatorId);
return screenStacks[index];
}
public void setBottomTabBadgeByIndex(Integer index, String badge) {
bottomTabs.setNotification(badge, index);
}
public void setBottomTabBadgeByNavigatorId(String navigatorId, String badge) {
bottomTabs.setNotification(badge, getScreenStackIndex(navigatorId));
}
public void setBottomTabButtonByIndex(Integer index, ScreenParams params) {
bottomTabs.setTabButton(params, index);
}
public void setBottomTabButtonByNavigatorId(String navigatorId, ScreenParams params) {
bottomTabs.setTabButton(params, getScreenStackIndex(navigatorId));
}
private int getScreenStackIndex(String navigatorId) throws ScreenStackNotFoundException {
for (int i = 0; i < screenStacks.length; i++) {
if (screenStacks[i].getNavigatorId().equals(navigatorId)) {
return i;
}
}
throw new ScreenStackNotFoundException("Stack " + navigatorId + " not found");
}
private class ScreenStackNotFoundException extends RuntimeException {
ScreenStackNotFoundException(String navigatorId) {
super(navigatorId);
}
}
private boolean isCurrentStack(ScreenStack screenStack) {
return getCurrentScreenStack() == screenStack;
}
private void setBottomTabsStyleFromCurrentScreen() {
setStyleFromScreen(getCurrentScreenStack().getCurrentScreenStyleParams());
}
@Override
public boolean onTitleBarBackButtonClick() {
if (getCurrentScreenStack().canPop()) {
getCurrentScreenStack().pop(true, System.currentTimeMillis(), new ScreenStack.OnScreenPop() {
@Override
public void onScreenPopAnimationEnd() {
setBottomTabsStyleFromCurrentScreen();
EventBus.instance.post(new ScreenChangedEvent(getCurrentScreenStack().peek().getScreenParams()));
}
});
return true;
}
return false;
}
@Override
public void onSideMenuButtonClick() {
final String navigatorEventId = getCurrentScreenStack().peek().getNavigatorEventId();
NavigationApplication.instance.getEventEmitter().sendNavigatorEvent("sideMenu", navigatorEventId);
if (sideMenu != null) {
sideMenu.openDrawer(Side.Left);
}
}
}
| Select bottom bottom tab by navigatorId only if it's not selected
This is potentially a breaking change as calling navigator.switchToTab()
won't do anything now if the current tab is already selected.
| android/app/src/main/java/com/reactnativenavigation/layouts/BottomTabsLayout.java | Select bottom bottom tab by navigatorId only if it's not selected |
|
Java | epl-1.0 | 710ea04a3fa3d4285af498f0c805674eeea97961 | 0 | usethesource/rascal-value | package io.usethesource.vallang.util;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.WeakReference;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentLinkedDeque;
public class WeakWriteLockingHashConsingMap<T> implements HashConsingMap<T> {
private static class WeakReferenceWrap<T> extends WeakReference<T> {
private final int hash;
public WeakReferenceWrap(int hash, T referent, ReferenceQueue<? super T> q) {
super(referent, q);
this.hash = hash;
}
@Override
public int hashCode() {
return hash;
}
@SuppressWarnings("unchecked")
@Override
public boolean equals(Object obj) {
assert obj instanceof WeakReferenceWrap<?> && obj != null;
@SuppressWarnings("unchecked")
WeakReferenceWrap<T> wrappedObj = (WeakReferenceWrap<T>) obj;
if (wrappedObj.hash == hash) {
T self = get();
if (self == null) {
return false;
}
T other = wrappedObj.get();
return other != null && self.equals(other);
}
return false;
}
}
private static final class LookupWrapper<T> {
private final int hash;
private final T ref;
public LookupWrapper(int hash, T ref) {
this.hash = hash;
this.ref = ref;
}
@Override
public int hashCode() {
return hash;
}
@Override
public boolean equals(Object obj) {
// only internal use of this class
assert obj instanceof WeakReferenceWrap<?> && obj != null;
@SuppressWarnings("unchecked")
WeakReferenceWrap<T> wrappedObj = (WeakReferenceWrap<T>) obj;
if (wrappedObj.hash == hash) {
T other = wrappedObj.get();
return other != null && ref.equals(other);
}
return false;
}
}
private final Map<WeakReferenceWrap<T>,WeakReferenceWrap<T>> data = new HashMap<>();
private final ReferenceQueue<T> cleared = new ReferenceQueue<>();
public WeakWriteLockingHashConsingMap() {
Cleanup.register(this);
}
@Override
public T get(T key) {
LookupWrapper<T> keyLookup = new LookupWrapper<>(key.hashCode(), key);
@SuppressWarnings("unlikely-arg-type")
WeakReferenceWrap<T> result = data.get(keyLookup);
if (result != null) {
T actualResult = result.get();
if (actualResult != null) {
return actualResult;
}
}
synchronized (this) {
WeakReferenceWrap<T> keyPut = new WeakReferenceWrap<>(keyLookup.hash, key, cleared);
while (true) {
result = data.merge(keyPut, keyPut, (oldValue, newValue) -> oldValue.get() == null ? newValue : oldValue);
if (result == keyPut) {
// a regular put
return key;
}
else {
T actualResult = result.get();
if (actualResult != null) {
// value was already in there, and also still held a reference (which is true for most cases)
keyPut.clear(); // avoid getting a cleared reference in the queue
return actualResult;
}
}
}
}
}
private void cleanup() {
WeakReferenceWrap<?> c = (WeakReferenceWrap<?>) cleared.poll();
if (c != null) {
synchronized (this) {
while (c != null) {
data.remove(c);
c = (WeakReferenceWrap<?>) cleared.poll();
}
}
}
}
private static class Cleanup extends Thread {
private final ConcurrentLinkedDeque<WeakReference<WeakWriteLockingHashConsingMap<?>>> caches;
private Cleanup() {
caches = new ConcurrentLinkedDeque<>();
setDaemon(true);
setName("Cleanup Thread for " + WeakWriteLockingHashConsingMap.class.getName());
start();
}
private static class InstanceHolder {
static final Cleanup INSTANCE = new Cleanup();
}
public static void register(WeakWriteLockingHashConsingMap<?> cache) {
InstanceHolder.INSTANCE.caches.add(new WeakReference<>(cache));
}
@Override
public void run() {
while (true) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
return;
}
try {
Iterator<WeakReference<WeakWriteLockingHashConsingMap<?>>> it = caches.iterator();
while (it.hasNext()) {
WeakWriteLockingHashConsingMap<?> cur = it.next().get();
if (cur == null) {
it.remove();
}
else {
cur.cleanup();
}
}
}
catch (Throwable e) {
System.err.println("Cleanup thread failed with: " + e.getMessage());
e.printStackTrace(System.err);
}
}
}
}
}
| src/main/java/io/usethesource/vallang/util/WeakWriteLockingHashConsingMap.java | package io.usethesource.vallang.util;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.WeakReference;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentLinkedDeque;
public class WeakWriteLockingHashConsingMap<T> implements HashConsingMap<T> {
private static class WeakReferenceWrap<T> extends WeakReference<T> {
private final int hash;
public WeakReferenceWrap(int hash, T referent, ReferenceQueue<? super T> q) {
super(referent, q);
this.hash = hash;
}
@Override
public int hashCode() {
return hash;
}
@SuppressWarnings("unchecked")
@Override
public boolean equals(Object obj) {
if (obj == null || obj.hashCode() != hash) {
return false;
}
T self = get();
if (self == null) {
return false;
}
T other;
if ((obj instanceof WeakReferenceWrap<?>)) {
other = ((WeakReferenceWrap<T>) obj).get();
}
else {
other = ((LookupWrapper<T>)obj).ref;
}
return other != null && other.equals(self);
}
}
private static final class LookupWrapper<T> {
private final int hash;
private final T ref;
public LookupWrapper(int hash, T ref) {
this.hash = hash;
this.ref = ref;
}
@Override
public int hashCode() {
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof WeakReferenceWrap<?>) {
return obj.equals(this);
}
return false;
}
}
private final Map<WeakReferenceWrap<T>,WeakReferenceWrap<T>> data = new HashMap<>();
private final ReferenceQueue<T> cleared = new ReferenceQueue<>();
public WeakWriteLockingHashConsingMap() {
Cleanup.register(this);
}
@Override
public T get(T key) {
LookupWrapper<T> keyLookup = new LookupWrapper<>(key.hashCode(), key);
@SuppressWarnings("unlikely-arg-type")
WeakReferenceWrap<T> result = data.get(keyLookup);
if (result != null) {
T actualResult = result.get();
if (actualResult != null) {
return actualResult;
}
}
synchronized (this) {
WeakReferenceWrap<T> keyPut = new WeakReferenceWrap<>(keyLookup.hash, key, cleared);
while (true) {
result = data.merge(keyPut, keyPut, (oldValue, newValue) -> oldValue.get() == null ? newValue : oldValue);
if (result == keyPut) {
// a regular put
return key;
}
else {
T actualResult = result.get();
if (actualResult != null) {
// value was already in there, and also still held a reference (which is true for most cases)
keyPut.clear(); // avoid getting a cleared reference in the queue
return actualResult;
}
}
}
}
}
private void cleanup() {
WeakReferenceWrap<?> c = (WeakReferenceWrap<?>) cleared.poll();
if (c != null) {
synchronized (this) {
while (c != null) {
data.remove(c);
c = (WeakReferenceWrap<?>) cleared.poll();
}
}
}
}
private static class Cleanup extends Thread {
private final ConcurrentLinkedDeque<WeakReference<WeakWriteLockingHashConsingMap<?>>> caches;
private Cleanup() {
caches = new ConcurrentLinkedDeque<>();
setDaemon(true);
setName("Cleanup Thread for " + WeakWriteLockingHashConsingMap.class.getName());
start();
}
private static class InstanceHolder {
static final Cleanup INSTANCE = new Cleanup();
}
public static void register(WeakWriteLockingHashConsingMap<?> cache) {
InstanceHolder.INSTANCE.caches.add(new WeakReference<>(cache));
}
@Override
public void run() {
while (true) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
return;
}
try {
Iterator<WeakReference<WeakWriteLockingHashConsingMap<?>>> it = caches.iterator();
while (it.hasNext()) {
WeakWriteLockingHashConsingMap<?> cur = it.next().get();
if (cur == null) {
it.remove();
}
else {
cur.cleanup();
}
}
}
catch (Throwable e) {
System.err.println("Cleanup thread failed with: " + e.getMessage());
e.printStackTrace(System.err);
}
}
}
}
}
| Optimized equals method inside wrapped containers
| src/main/java/io/usethesource/vallang/util/WeakWriteLockingHashConsingMap.java | Optimized equals method inside wrapped containers |
|
Java | agpl-3.0 | 2e96d5ca75501a744bf90f7d4e759a4a06324498 | 0 | acontes/scheduling,acontes/scheduling,acontes/scheduling,acontes/scheduling,acontes/scheduling,acontes/scheduling,acontes/scheduling | package org.ow2.proactive.scheduler.ext.mapreduce;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.security.KeyException;
import java.security.PublicKey;
import java.util.ArrayList;
import java.util.List;
import javax.security.auth.login.LoginException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.StringUtils;
import org.ow2.proactive.authentication.crypto.CredData;
import org.ow2.proactive.authentication.crypto.Credentials;
import org.ow2.proactive.scheduler.common.Scheduler;
import org.ow2.proactive.scheduler.common.SchedulerAuthenticationInterface;
import org.ow2.proactive.scheduler.common.SchedulerConnection;
import org.ow2.proactive.scheduler.common.exception.AlreadyConnectedException;
import org.ow2.proactive.scheduler.common.exception.ConnectionException;
import org.ow2.proactive.scheduler.common.exception.JobCreationException;
import org.ow2.proactive.scheduler.common.exception.NotConnectedException;
import org.ow2.proactive.scheduler.common.exception.PermissionException;
import org.ow2.proactive.scheduler.common.exception.SubmissionClosedException;
import org.ow2.proactive.scheduler.common.exception.UserException;
import org.ow2.proactive.scheduler.common.job.JobEnvironment;
import org.ow2.proactive.scheduler.common.job.JobId;
import org.ow2.proactive.scheduler.common.job.JobPriority;
import org.ow2.proactive.scheduler.common.job.TaskFlowJob;
import org.ow2.proactive.scheduler.common.task.ForkEnvironment;
import org.ow2.proactive.scheduler.common.task.JavaTask;
import org.ow2.proactive.scheduler.common.task.dataspaces.InputAccessMode;
import org.ow2.proactive.scheduler.common.task.flow.FlowScript;
import org.ow2.proactive.scheduler.ext.mapreduce.exception.PAJobConfigurationException;
import org.ow2.proactive.scheduler.ext.mapreduce.fs.PADataSpacesFileSystem;
import org.ow2.proactive.scheduler.ext.mapreduce.logging.DefaultLogger;
import org.ow2.proactive.scheduler.ext.mapreduce.logging.Logger;
import org.ow2.proactive.scheduler.task.launcher.TaskLauncher.SchedulerVars;
import org.ow2.proactive.scripting.InvalidScriptException;
import org.ow2.proactive.scripting.Script;
import org.ow2.proactive.scripting.SimpleScript;
/**
* {@link PAMapReduceJob} creates the ProActive MapReduce Workflow to submit to
* the ProActive Scheduler. To do this it translates the Hadoop Job (created
* using the new Hadoop MapReduce API) into a ProActive MapReduce Workflow,
* using some additional information the user must provide specifying them in
* the PAMapReduceJobConfiguration object.
*
* We have to notice that some code in this class is copied and pasted from the
* Hadoop classes: - see the method hasWindowsDrive - see the method
* changeHadoopPath - see the method changeHadoopPathList
*
* In the ProActive MapReduce framework we choose to not support the Hadoop
* {@link JobConf} class because it belongs to the old Hadoop API and it is deprecated.
*
* Concerning the logs of the ProActive MapReduce job we must notice that they are
* enabled/disabled via the method "Task.setPreciousLogs(boolean preciousLogs)".
* If "preciousLogs" is true, the logs produced by the task are stored in a
* "TaskLogs-[jobid]-[taskname].log" file in localspace, and transferred to
* outputspace at the end of the execution.
*
* @author The ProActive Team
*
*/
public class PAMapReduceJob {
protected static final Logger logger = DefaultLogger.getInstance();
protected TaskFlowJob mapReduceWorkflow = null;
protected Job hadoopJob = null;
protected PAMapReduceJobConfiguration paMapReduceJobConfiguration = null;
/**
* Store the id of this job when it is submitted to the ProActive Scheduler
*/
protected JobId jobId = null;
public PAMapReduceJob(Job job, PAMapReduceJobConfiguration pamrjc) throws PAJobConfigurationException {
this.hadoopJob = job;
this.paMapReduceJobConfiguration = pamrjc;
initLogger(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
checkConfiguration(paMapReduceJobConfiguration, initRequiredConfigurationProperties());
init();
}
/**
* Initialize the logger to use during the configuration of the ProActive
* MapReduce job
*
* @param debugLevelString
* the string representation of the boolean value that indicates
* if the debug log level must be enabled or not
*/
protected void initLogger(String debugLevelString) {
logger.setDebugLogLevel(Boolean.parseBoolean(debugLevelString));
}
/**
* Translate the Hadoop MapReduce Job in the ProActive one
*
* @throws JobCreationException
* @throws PAJobConfigurationException
*/
protected void init() throws PAJobConfigurationException {
if (!isInitialized()) {
// execute only if the Hadoop Job and the
// PAMapReduceJobConfiguration are not null
if ((hadoopJob != null) && (paMapReduceJobConfiguration != null)) {
/*
* Define the fork environment the tasks must tasks must use.
* TODO NOTICE 1: we cannot use the
* PASchedulerProperties.SCHEDULER_HOME property to retrieve the
* value of the ProActive Scheduler home because it will be null
* (because the HadoopMapReduceApplication is executed on the
* client side, when the user main class is executed. This means
* if the HadoopMapReduceApplication try to get ProActive
* Scheduler properties, those properties are null). So we force
* the user specify the ProActive Scheduler home folder. The
* user must not specify the folder to use to retrieve jars to
* add as additional classpaths to the ForkEnvironment (in fact
* that folder, "addons/", is directly related to the ProActive
* MapReduce Framework and is defined by the developer only once
* and cannot be changed). It will be better if in some way we
* retrieve the value of the ProActive Scheduler home from the
* Scheduler to which the mapreduce job will be submitted (...
* but it seams we cannot do that) TODO NOTICE 2: the additional
* classpaths added to the ForkEnvironment are extended with all
* the files (not only jars) contained in the
* "$SCHEDULER_HOME/addons/" folder. Hidden files are left out.
*
* Lastly, we must notice that by default the max size of the
* jvm heap depends on various factor such as the available
* memory on the host, the architecture of the host (32 bit, 64
* bit, ...) etc... In the case of the Eon cluster, the default
* heap size seams to be 1 GB and anyways it is sure it is more
* than 512MB
*/
String schedulerHomeString = paMapReduceJobConfiguration
.getPropertyAsString(PAMapReduceFrameworkProperties.SCHEDULER_HOME.key);
logger.debug("The ProActive Scheduler home is '" + schedulerHomeString + "'");
String schedulerAdditionalClasspathFolder = schedulerHomeString +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.SCHEDULER_ADDITIONAL_CLASSPATH_FOLDER.key);
logger.debug("The path of the additional classpath folder is '" +
schedulerAdditionalClasspathFolder + "'");
ForkEnvironment forkEnvironment = new ForkEnvironment();
/*
* A workaround for SCHEDULING-1307: some classes from the
* scheduler core are required by MapReduce on the node side, so
* use envScript to add ProActive_Scheduler-core.jar to the
* classpath of the forkEnvironment
*/
String envScript = "home = org.objectweb.proactive.core.runtime.ProActiveRuntimeImpl.getProActiveRuntime().getProActiveHome();\n"
+ "forkEnvironment.addAdditionalClasspath(home + \"/dist/lib/ProActive_Scheduler-core.jar\");";
logger.debug("Setting envScript");
try {
forkEnvironment.setEnvScript(new SimpleScript(envScript, "javascript"));
} catch (InvalidScriptException e) {
logger.warning("Failed to set envScript");
e.printStackTrace();
}
/*
* Specify the parameter for the forked environment
*/
String[] jvmArgumentArray = paMapReduceJobConfiguration.getJVMArguments();
if (jvmArgumentArray != null) {
for (int i = 0; i < jvmArgumentArray.length; i++) {
forkEnvironment.addJVMArgument(jvmArgumentArray[i]);
logger.debug("Setting JVM argument '" + jvmArgumentArray[i] + "'");
}
}
String reducerInputIndexFileSelector = null;
String reducerInputFileSelector = null;
String outputFileName = null;
Configuration hadoopJobConfiguration = hadoopJob.getConfiguration();
/*
* Since in the ProActive MapReduce framework actual class of
* the configuration instance is a PAHadoopJobConfiguration the
* first stuff we do is to create a PAHadoopJobConfiguration
* instance from the Hadoop Configuration one. Then all the
* getters and setters must be invoked on that instance and not
* on the Hadoop one.
*/
PAHadoopJobConfiguration paHadoopJobConfiguration = getPAHadoopJobConfiguration(hadoopJobConfiguration);
/*
* We must set some properties to be able to use the file system
* implementation based on the ProActive DataSpaces. This will
* overwrite the already existing properties in the Hadoop
* Configuration instance. In particular we must add: - a
* property whose name is "fs.<fsUriScheme>.impl" and whose
* value is the name of the class that implements the file
* system through the ProActive DataSpaces; - a property whose
* name is "fs.default.name" and whose value is the name of the
* file system implemented through the ProActive DataSpaces - a
* property whose name is "fs.<fsUriScheme>.impl.disable.cache"
* and whose value is a boolean that if "true" means the cache
* for the file system whose scheme is "fsUriScheme" is disabled
* (In the case of the file system implemented through the
* ProActive DataSpaces we leave the cache disabled)
*/
paHadoopJobConfiguration
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_FS_IMPLEMENTATION_PROPERTY_NAME.key),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_FILE_SYSTEM_DEFAULT_IMPLEMENTATION.key));
logger
.debug("The Hadoop Abstract File System implementation is '" +
paHadoopJobConfiguration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_FS_IMPLEMENTATION_PROPERTY_NAME.key)) +
"'");
paHadoopJobConfiguration
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_FS_DEFAULT_NAME_PROPERTY_NAME.key),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_FILE_SYSTEM_DEFAULT_NAME.key));
logger
.debug("The Hadoop Abstract File System implementation default name is '" +
paHadoopJobConfiguration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_FS_DEFAULT_NAME_PROPERTY_NAME.key)) +
"'");
paHadoopJobConfiguration
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_FS_DISABLE_CACHE_PROPERTY_NAME.key),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_FILE_SYSTEM_DISABLE_CACHE.key));
logger
.debug("The Hadoop Abstract File System implementation enabled cache is '" +
paHadoopJobConfiguration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_FS_DISABLE_CACHE_PROPERTY_NAME.key)) +
"'");
String[] inputPathStringList = paHadoopJobConfiguration
.get(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_INPUT_DIRECTORY_PROPERTY_NAME.key))
.split(StringUtils.COMMA_STR);
logger
.debug("The input files of the Hadoop MapREduce job are '" +
paHadoopJobConfiguration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_INPUT_DIRECTORY_PROPERTY_NAME.key)) +
"'");
/*
* We start the build of the ProActive MapReduce job
*/
TaskFlowJob tmpMapReduceWorkflow = new TaskFlowJob();
tmpMapReduceWorkflow.setCancelJobOnError(paMapReduceJobConfiguration.getJobCancelOnError());
logger
.debug("The value of the cancelJobOnError attribute of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getJobCancelOnError() + "'");
tmpMapReduceWorkflow.setRestartTaskOnError(paMapReduceJobConfiguration
.getRestartTaskOnError());
logger.debug("The value of the restartTaskOnError of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getRestartTaskOnError() + "'");
tmpMapReduceWorkflow.setMaxNumberOfExecution(paMapReduceJobConfiguration
.getMaxNumberOfExecutions());
logger.debug("The value of the maxNumberOfExecutions of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getMaxNumberOfExecutions() + "'");
// tmpMapReduceWorkflow.setLogFile(paMapReduceJobConfiguration
// .getLogFilePath());
// logger.debug("The value of the logFilePath of the ProActive MapReduce job is '"
// + paMapReduceJobConfiguration.getLogFilePath() + "'");
tmpMapReduceWorkflow.setName(hadoopJob.getJobName());
logger.debug("The value of the name of the ProActive MapReduce job is '" +
hadoopJob.getJobName() + "'");
String hadoopJobPriorityString = paHadoopJobConfiguration.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_JOB_PRIORITY.key));
tmpMapReduceWorkflow.setPriority(getPriority(hadoopJobPriorityString));
logger.debug("The value of the priority of the ProActive MapReduce job is '" +
getPriority(hadoopJobPriorityString) + "'");
tmpMapReduceWorkflow.setProjectName(paMapReduceJobConfiguration.getProjectName());
logger.debug("The value of the projectName of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getProjectName() + "'");
tmpMapReduceWorkflow.setDescription(paMapReduceJobConfiguration.getDescription());
logger.debug("The value of the description of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getDescription() + "'");
tmpMapReduceWorkflow.setInputSpace(paMapReduceJobConfiguration.getInputSpace());
logger.debug("The value of the input space of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getInputSpace() + "'");
tmpMapReduceWorkflow.setOutputSpace(paMapReduceJobConfiguration.getOutputSpace());
logger.debug("The value of the output space of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getOutputSpace() + "'");
// Set the classpath of the job
String[] classpath = paMapReduceJobConfiguration.getClasspath();
if (classpath != null) {
JobEnvironment je = new JobEnvironment();
try {
je.setJobClasspath(classpath);
} catch (IOException ioe) {
ioe.printStackTrace();
}
tmpMapReduceWorkflow.setEnvironment(je);
}
// start of create the splitter task
JavaTask splitterPATask = new JavaTask();
splitterPATask.setName(paMapReduceJobConfiguration
.getTaskName(PAMapReduceFramework.SPLITTER_PA_TASK));
logger.debug("The value of the name of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration.getTaskName(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
splitterPATask.setCancelJobOnError(paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.SPLITTER_PA_TASK));
logger
.debug("The value of the cancelJobOnError of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
splitterPATask.setRestartTaskOnError(paMapReduceJobConfiguration
.getRestartMode(PAMapReduceFramework.SPLITTER_PA_TASK));
logger.debug("The value of the restartMode of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration.getRestartMode(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
splitterPATask.setMaxNumberOfExecution(paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.SPLITTER_PA_TASK));
logger
.debug("The value of the maxNumberOfExecutions of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration.getMaxNumberOfExecutions() + "'");
splitterPATask.setDescription(paMapReduceJobConfiguration
.getDescription(PAMapReduceFramework.SPLITTER_PA_TASK));
logger.debug("The value of the description of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration.getDescription(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
splitterPATask
.setPreciousLogs(PAMapReduceFrameworkProperties
.getPropertyAsBoolean(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
logger
.debug("The value of the precious logs of the ProActive MapReduce SplitterPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key) +
"'");
/*
* we must add the input files to the SplitterPATask only when
* the ReadMode of the SplitterPATask is equal to fullLocalRead
*/
if (paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.SPLITTER_PA_TASK).equals(
ReadMode.fullLocalRead)) {
for (int i = 0; i < inputPathStringList.length; i++) {
splitterPATask.addInputFiles(inputPathStringList[i], paMapReduceJobConfiguration
.getInputAccessMode(PAMapReduceFramework.SPLITTER_PA_TASK));
}
}
logger.debug("The value of the readMode of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
logger
.debug("The value of the inputAccessMode of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration
.getInputAccessMode(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
splitterPATask
.setExecutableClassName(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_SPLITTER_PA_TASK_EXECUTABLE_CLASS.key));
logger
.debug("The value of the executablClassName of the ProActive MapReduce SplitterPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_SPLITTER_PA_TASK_EXECUTABLE_CLASS.key) +
"'");
splitterPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_SPLITTER_PA_TASK_INPUT_DATASPACE.key,
paMapReduceJobConfiguration.getInputSpace(PAMapReduceFramework.SPLITTER_PA_TASK));
logger.debug("The value of the input space of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration.getInputSpace(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
splitterPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
splitterPATask.addArgument(PAMapReduceFrameworkProperties.HADOOP_JOB_CONFIGURATION.key,
paHadoopJobConfiguration);
InputStream replicateMapperPATaskInputStream = Thread.currentThread().getContextClassLoader()
.getResourceAsStream(PAMapReduceFramework.REPLICATE_MAPPER_PA_TASK_SCRIPT_NAME);
Script replicateMapperScript = null;
try {
replicateMapperScript = new SimpleScript(
readScriptFile(replicateMapperPATaskInputStream),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_SCRIPT_ENGINE.key),
new String[] {
SchedulerVars.JAVAENV_TASK_ID_VARNAME.toString(),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key) });
} catch (InvalidScriptException ise) {
ise.printStackTrace();
}
FlowScript replicateMapperFlowScript = null;
try {
replicateMapperFlowScript = FlowScript.createReplicateFlowScript(replicateMapperScript);
} catch (InvalidScriptException ise) {
ise.printStackTrace();
}
splitterPATask.setFlowScript(replicateMapperFlowScript);
splitterPATask.setForkEnvironment(forkEnvironment);
try {
tmpMapReduceWorkflow.addTask(splitterPATask);
} catch (UserException ue) {
ue.printStackTrace();
}
// end of create splitter task
// start of create mapper task
JavaTask mapperPATask = new JavaTask();
mapperPATask.setName(paMapReduceJobConfiguration
.getTaskName(PAMapReduceFramework.MAPPER_PA_TASK));
logger.debug("The value of the name of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getTaskName(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
mapperPATask.setCancelJobOnError(paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.MAPPER_PA_TASK));
logger
.debug("The value of the cancelJobOnError of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
mapperPATask.setRestartTaskOnError(paMapReduceJobConfiguration
.getRestartMode(PAMapReduceFramework.MAPPER_PA_TASK));
logger.debug("The value of the restartMode of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getRestartMode(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
mapperPATask.setMaxNumberOfExecution(paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.MAPPER_PA_TASK));
logger
.debug("The value of the maxNumberOfExecutions of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getMaxNumberOfExecutions() + "'");
mapperPATask.setDescription(paMapReduceJobConfiguration
.getDescription(PAMapReduceFramework.MAPPER_PA_TASK));
logger.debug("The value of the description of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getDescription(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
mapperPATask
.setPreciousLogs(PAMapReduceFrameworkProperties
.getPropertyAsBoolean(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
logger
.debug("The value of the precious logs of the ProActive MapReduce MapperPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key) +
"'");
mapperPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_READ_MODE.key,
paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.MAPPER_PA_TASK).key);
logger.debug("The value of the readMode of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
mapperPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_INPUT_DATASPACE.key,
paMapReduceJobConfiguration.getInputSpace(PAMapReduceFramework.MAPPER_PA_TASK));
logger.debug("The value of the input space of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getInputSpace(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
/*
* we must add the input files to the MapperPATask only when the
* ReadMode of the MapperPATask is equal to fullLocalRead
*/
if (paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.MAPPER_PA_TASK).equals(
ReadMode.fullLocalRead)) {
for (int i = 0; i < inputPathStringList.length; i++) {
mapperPATask.addInputFiles(inputPathStringList[i], paMapReduceJobConfiguration
.getInputAccessMode(PAMapReduceFramework.MAPPER_PA_TASK));
}
}
mapperPATask
.setExecutableClassName(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_EXECUTABLE_CLASS.key));
logger
.debug("The value of the executableClassName of the ProActive MapReduce MapperPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_EXECUTABLE_CLASS.key) +
"'");
mapperPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_OUTPUT_DATASPACE.key,
paMapReduceJobConfiguration.getOutputSpace(PAMapReduceFramework.MAPPER_PA_TASK));
logger.debug("The value of the output space of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getOutputSpace(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
/*
* the string to use to select the output file of a mapper will
* be the following: intermediate_$REP.out TODO check that when
* the writeMode is
* "PAMapReduceFramework.WRITE_MODE_REMOTE_WRITE" (this means
* that the output access mode is "none") then we do not need to
* specify the output files of the MapperPATask TODO do the same
* for the ReducerPATask TODO when the readMode is equal to
* PAMapReduceFramework.READ_MODE_REMOTE_READ or
* PAMapReduceFramework.READ_MODE_PARTIAL_LOCAL_READ (so that
* the input access mode is "none") we do not need to add input
* files to the task (SplitterPATask, MapperPATask and
* ReducerPATask)
*/
if (!(paMapReduceJobConfiguration.getWriteMode(PAMapReduceFramework.MAPPER_PA_TASK)
.equals(WriteMode.remoteWrite))) {
outputFileName = PAMapReduceFramework
.getMapperIntermediateFileSelector(PAMapReduceFramework.REPLICATION_INDEX_TAG);
logger
.debug("The value of the intermediateFileSelector of the ProActive MapReduce MapperPATask is '" +
outputFileName + "'");
mapperPATask.addOutputFiles(outputFileName, paMapReduceJobConfiguration
.getOutputAccessMode(PAMapReduceFramework.MAPPER_PA_TASK));
logger
.debug("The value of the outputAccessMode of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration
.getOutputAccessMode(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
/*
* the string to use to select the output index file of a
* mapper will be the following: intermediate_$REP.index
*/
outputFileName = PAMapReduceFramework
.getMapperIntermediateIndexFileSelector(PAMapReduceFramework.REPLICATION_INDEX_TAG);
logger
.debug("The value of the intermediateIndexFileSelector of the ProActive MapReduce MapperPATask is '" +
outputFileName + "'");
mapperPATask.addOutputFiles(outputFileName, paMapReduceJobConfiguration
.getOutputAccessMode(PAMapReduceFramework.MAPPER_PA_TASK));
}
logger.debug("The value of the writeMode of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getWriteMode(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
mapperPATask.addArgument(PAMapReduceFrameworkProperties.HADOOP_JOB_CONFIGURATION.key,
paHadoopJobConfiguration);
mapperPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
mapperPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_PROFILE.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_PROFILE.key));
mapperPATask.addDependence(splitterPATask);
mapperPATask.setForkEnvironment(forkEnvironment);
try {
tmpMapReduceWorkflow.addTask(mapperPATask);
} catch (UserException ue) {
ue.printStackTrace();
}
// end of create mapper task
// start of create the mapper join task
JavaTask mapperJoinPATask = new JavaTask();
mapperJoinPATask.setName(paMapReduceJobConfiguration
.getTaskName(PAMapReduceFramework.MAPPER_JOIN_PA_TASK));
logger.debug("The value of the name of the ProActive MapReduce MapperJoinPATask is '" +
paMapReduceJobConfiguration.getTaskName(PAMapReduceFramework.MAPPER_JOIN_PA_TASK) + "'");
mapperJoinPATask.setDescription(paMapReduceJobConfiguration
.getDescription(PAMapReduceFramework.MAPPER_JOIN_PA_TASK));
logger.debug("The value of the description of the ProActive MapReduce MapperJoinPATask is '" +
paMapReduceJobConfiguration.getDescription(PAMapReduceFramework.MAPPER_JOIN_PA_TASK) +
"'");
mapperJoinPATask
.setPreciousLogs(PAMapReduceFrameworkProperties
.getPropertyAsBoolean(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
logger
.debug("The value of the precious logs of the ProActive MapReduce MapperJoinPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key) +
"'");
mapperJoinPATask.setCancelJobOnError(paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.MAPPER_JOIN_PA_TASK));
logger
.debug("The value of the cancelJobOnError of the ProActive MapReduce MapperJoinPATask is '" +
paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.MAPPER_JOIN_PA_TASK) + "'");
mapperJoinPATask.setRestartTaskOnError(paMapReduceJobConfiguration
.getRestartMode(PAMapReduceFramework.MAPPER_JOIN_PA_TASK));
logger.debug("The value of the restartMode of the ProActive MapReduce MapperJoinPATask is '" +
paMapReduceJobConfiguration.getRestartMode(PAMapReduceFramework.MAPPER_JOIN_PA_TASK) +
"'");
mapperJoinPATask.setMaxNumberOfExecution(paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.MAPPER_JOIN_PA_TASK));
logger
.debug("The value of the maxNumberOfExecutions of the ProActive MapReduce MapperJoinPATask is '" +
paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.MAPPER_JOIN_PA_TASK) + "'");
mapperJoinPATask
.setExecutableClassName(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_JOIN_PA_TASK_EXECUTABLE_CLASS.key));
logger
.debug("The value of the executableClassName of the ProActive MapReduce MapperJoinPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_JOIN_PA_TASK_EXECUTABLE_CLASS.key) +
"'");
mapperJoinPATask.addDependence(mapperPATask);
mapperJoinPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
mapperJoinPATask
.addArgument(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_OUTPUT_DIRECTORY_PROPERTY_NAME.key),
paHadoopJobConfiguration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_OUTPUT_DIRECTORY_PROPERTY_NAME.key)));
InputStream replicateReducerPATaskInputStream = Thread.currentThread()
.getContextClassLoader().getResourceAsStream(
PAMapReduceFramework.REPLICATE_REDUCER_PA_TASK_SCRIPT_NAME);
Script<?> replicateReducerScript = null;
try {
replicateReducerScript = new SimpleScript(
this.readScriptFile(replicateReducerPATaskInputStream),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_SCRIPT_ENGINE.key),
new String[] {
SchedulerVars.JAVAENV_TASK_ID_VARNAME.toString(),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key),
"" +
paHadoopJobConfiguration
.getInt(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_NUMBER_OF_REDUCER_TASKS_PROPERTY_NAME.key),
1) });
} catch (InvalidScriptException ise) {
ise.printStackTrace();
}
FlowScript replicateReducerFlowScript = null;
try {
replicateReducerFlowScript = FlowScript.createReplicateFlowScript(replicateReducerScript);
} catch (InvalidScriptException ise) {
ise.printStackTrace();
}
mapperJoinPATask.setFlowScript(replicateReducerFlowScript);
mapperJoinPATask.setForkEnvironment(forkEnvironment);
try {
tmpMapReduceWorkflow.addTask(mapperJoinPATask);
} catch (UserException ue) {
ue.printStackTrace();
}
// end of create the mapper join task
// start of: create the reducer task
JavaTask reducerPATask = new JavaTask();
reducerPATask.setName(paMapReduceJobConfiguration
.getTaskName(PAMapReduceFramework.REDUCER_PA_TASK));
logger.debug("The value of the name of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getTaskName(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask.setCancelJobOnError(paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.REDUCER_PA_TASK));
logger
.debug("The value of the cancelJobOnError of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask.setRestartTaskOnError(paMapReduceJobConfiguration
.getRestartMode(PAMapReduceFramework.REDUCER_PA_TASK));
logger.debug("The value of the restartMode of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getRestartMode(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask.setMaxNumberOfExecution(paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.REDUCER_PA_TASK));
logger
.debug("The value of the maxNumberOfExecutions of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask.setDescription(paMapReduceJobConfiguration
.getDescription(PAMapReduceFramework.REDUCER_PA_TASK));
logger.debug("The value of the description of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getDescription(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask
.setPreciousLogs(PAMapReduceFrameworkProperties
.getPropertyAsBoolean(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
logger
.debug("The value of the precious logs of the ProActive MapReduce ReducerPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key) +
"'");
reducerPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_PA_TASK_READ_MODE.key,
paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.REDUCER_PA_TASK).key);
logger.debug("The value of the readMode of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_PA_TASK_INPUT_DATASPACE.key,
paMapReduceJobConfiguration.getInputSpace(PAMapReduceFramework.REDUCER_PA_TASK));
logger.debug("The value of the input space of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getInputSpace(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
/*
* We must test if the "readMode" of the ReducerPATask is
* 'fullLocalRead' then we must transfer on the node the
* ReducerPATask will execute on the MapperPATask output "index"
* files and the MapperPATask output "actual data" files. If the
* "readMode" is "remoteRead" we must transfer on the node the
* ReducerPATask will execute on only the index files. This
* means the index files are ALWAYS transferred on the node the
* ReducerPATask will execute on (so we use
* "InputAccessMode.TransferFromOutputSpace" directly without
* retrieving the input access mode information from the
* configuration).
*/
reducerInputIndexFileSelector = PAMapReduceFramework
.getReducerIntermediateIndexFileSelector();
logger
.debug("The value of the intermediateIndexFileSelector of the ProActive MapReduce ReducerPATask is '" +
reducerInputIndexFileSelector + "'");
reducerPATask.addInputFiles(reducerInputIndexFileSelector,
InputAccessMode.TransferFromOutputSpace);
if (paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.REDUCER_PA_TASK).equals(
ReadMode.fullLocalRead)) {
/*
* the string to use to select the output files of a mapper
* is the following: intermediate_*.out
*/
reducerInputFileSelector = PAMapReduceFramework.getReducerIntermediateFileSelector();
logger
.debug("The value of the intermediateFileSelector of the ProActive MapReduce ReducerPATask is '" +
reducerInputFileSelector + "'");
reducerPATask.addInputFiles(reducerInputFileSelector, paMapReduceJobConfiguration
.getInputAccessMode(PAMapReduceFramework.REDUCER_PA_TASK));
}
reducerPATask
.setExecutableClassName(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_PA_TASK_EXECUTABLE_CLASS.key));
reducerPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_PA_TASK_OUTPUT_DATASPACE.key,
paMapReduceJobConfiguration.getOutputSpace(PAMapReduceFramework.REDUCER_PA_TASK));
logger.debug("The value of the output space of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getOutputSpace(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
/*
* re-using the Hadoop OutputFormat we must notice the output
* will be written into a directory like the following one:
* "$OUTPUT_DIRECTORY/_temporary/_attempt_<jtIdentifier>_<jobId>_r_<taskId>_<taskAttemptId>/part-r-<taskId>"
* for which a possible selection string will be:
* "$OUTPUT_DIRECTORY/_temporary/_attempt_* /part-r-*"
*/
String outputFilesSelectionString = paHadoopJobConfiguration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_OUTPUT_DIRECTORY_PROPERTY_NAME.key)) +
File.separator + PAMapReduceFramework.getTemporaryOutputDirectoryRegex();
logger
.debug("The value of the outputFileSelectionString of the ProActive MapReduce ReducerPATask is '" +
outputFilesSelectionString + "'");
/*
* we must add the output files to the ReducerPATask only when
* the WriteMode of the ReducerPATask is equal to localWrite
*/
if (paMapReduceJobConfiguration.getWriteMode(PAMapReduceFramework.REDUCER_PA_TASK).equals(
WriteMode.localWrite)) {
reducerPATask.addOutputFiles(outputFilesSelectionString, paMapReduceJobConfiguration
.getOutputAccessMode(PAMapReduceFramework.REDUCER_PA_TASK));
}
logger
.debug("The value of the outputAccessMode of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration
.getOutputAccessMode(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
logger.debug("The value of the writeMode of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getWriteMode(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask.addArgument(PAMapReduceFrameworkProperties.HADOOP_JOB_CONFIGURATION.key,
paHadoopJobConfiguration);
reducerPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
reducerPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_PROFILE.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_PROFILE.key));
reducerPATask.addDependence(mapperJoinPATask);
reducerPATask.setForkEnvironment(forkEnvironment);
try {
tmpMapReduceWorkflow.addTask(reducerPATask);
} catch (UserException ue) {
ue.printStackTrace();
}
// end of create the reduce task
// start of create the reducer join task
JavaTask reducerJoinPATask = new JavaTask();
reducerJoinPATask.setName(paMapReduceJobConfiguration
.getTaskName(PAMapReduceFramework.REDUCER_JOIN_PA_TASK));
logger.debug("The value of the name of the ProActive MapReduce ReducerJoinPATask is '" +
paMapReduceJobConfiguration.getTaskName(PAMapReduceFramework.REDUCER_JOIN_PA_TASK) + "'");
reducerJoinPATask.setDescription(paMapReduceJobConfiguration
.getDescription(PAMapReduceFramework.REDUCER_JOIN_PA_TASK));
logger
.debug("The value of the description of the ProActive MapReduce ReducerJoinPATask is '" +
paMapReduceJobConfiguration
.getDescription(PAMapReduceFramework.REDUCER_JOIN_PA_TASK) + "'");
reducerJoinPATask
.setPreciousLogs(PAMapReduceFrameworkProperties
.getPropertyAsBoolean(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
logger
.debug("The value of the precious logs of the ProActive MapReduce ReducerJoinPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key) +
"'");
reducerJoinPATask.setCancelJobOnError(paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.REDUCER_JOIN_PA_TASK));
logger
.debug("The value of the cancelJobOnError of the ProActive MapReduce ReducerJoinPATask is '" +
paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.REDUCER_JOIN_PA_TASK) + "'");
reducerJoinPATask.setRestartTaskOnError(paMapReduceJobConfiguration
.getRestartMode(PAMapReduceFramework.REDUCER_JOIN_PA_TASK));
logger
.debug("The value of the restartTaskOnError of the ProActive MapReduce ReducerJoinPATask is '" +
paMapReduceJobConfiguration
.getRestartMode(PAMapReduceFramework.REDUCER_JOIN_PA_TASK) + "'");
reducerJoinPATask.setMaxNumberOfExecution(paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.REDUCER_JOIN_PA_TASK));
logger
.debug("The value of the maxNumberOfExecutions of the ProActive MapReduce ReducerJoinPATask is '" +
paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.REDUCER_JOIN_PA_TASK) +
"'");
reducerJoinPATask
.setExecutableClassName(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_JOIN_PA_TASK_EXECUTABLE_CLASS.key));
logger
.debug("The value of the executableClassName of the ProActive MapReduce ReducerJoinPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_JOIN_PA_TASK_EXECUTABLE_CLASS.key) +
"'");
reducerJoinPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
InputStream fileTransferPostScriptInputStream = Thread.currentThread()
.getContextClassLoader().getResourceAsStream(
PAMapReduceFramework.OUTPUT_FILE_TRANSFER_POST_SCRIPT_NAME);
Script<?> fileTransferScript = null;
try {
/*
* we must notice that when we reuse the Hadoop OutputFormat
* classes the name of the output file in which the
* ReducerPATask will put its output data is implicitly
* defined (the output folder directory will be something
* like "_temporary/_attempt..."). This means if we want to
* put the ReducerPATask output files in the directory the
* user specified and we want to give the output files the
* name the user desires we must execute a post script to
* move files (renaming them at the same time). The name of
* the ReducerPATak output file will be compliant to the
* following format: "<userDefinedPrefix><reducerId>". By
* default, if the user does not specify any prefix for the
* ReducerPATask output file names then the String retrieved
* by the method
* PAMapReduceFramework.getReducerOutputFileNamePrefix()
* will be used (that string until now is "reducer_").
* Look at the ReducerPATask class to get more information
* about the name of output file of the reducer task.
*/
String reducerOutputFileNamePrefix = null;
reducerOutputFileNamePrefix = paMapReduceJobConfiguration
.getReducerOutputFileNamePrefix();
if (reducerOutputFileNamePrefix == null) {
reducerOutputFileNamePrefix = PAMapReduceFramework.getReducerOutputFileNamePrefix();
}
fileTransferScript = new SimpleScript(
this.readScriptFile(fileTransferPostScriptInputStream),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_SCRIPT_ENGINE.key),
new String[] {
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key),
outputFilesSelectionString, reducerOutputFileNamePrefix });
} catch (InvalidScriptException ise) {
ise.printStackTrace();
}
reducerJoinPATask.setPostScript(fileTransferScript);
reducerJoinPATask.addDependence(reducerPATask);
reducerJoinPATask.setForkEnvironment(forkEnvironment);
try {
tmpMapReduceWorkflow.addTask(reducerJoinPATask);
} catch (UserException ue) {
ue.printStackTrace();
}
// end of create the reducer join task
mapReduceWorkflow = tmpMapReduceWorkflow;
/*
* TODO verify if there is a getUser() and getCredentials()
* method we could use to as the username and password (in
* general as the credentials) to establsh a connection to the
* scheduler
*/
/*
* TODO verify if we can use an hadoop method as
* hadoopJobConfiguration.getMaxMapAttempts(), that give us the
* maximum number of attempts that will be made to run a map
* task
*/
/*
* TODO verify if we can use an hadoop method as
* hadoopJobConfiguration.getMaxMapAttempts(), that give us the
* maximum number of attempts that will be made to run a reduce
* task
*/
}
}
}
/**
* Run the Hadoop MapReduce Job
*
* @return true if the Hadoop MapReduce Job is submitted correctly to the
* ProActive Scheduler
*/
public boolean run() {
if (isInitialized()) {
return (submitJob(mapReduceWorkflow));
}
return false;
}
/**
* Check if the ProActive MapReduce Workflow is already initialized
*
* @return true if the ProActive MapReduce Workflow is already initialized,
* false otherwise
*/
protected boolean isInitialized() {
if (mapReduceWorkflow != null) {
return true;
}
return false;
}
/**
* Submit the TaskFlowJob representation of the Hadoop Job to the ProActive
* Scheduler
*
* @param taskFlowJob
* : the ProActive TaksFlowJob representation of the Hadoop Job
* @return boolean true if the job is submitted successfully, false
* otherwise
*/
protected boolean submitJob(TaskFlowJob mapReduceWorkflow) {
SchedulerAuthenticationInterface sai = null;
try {
sai = SchedulerConnection.join(paMapReduceJobConfiguration
.getPropertyAsString(PAMapReduceFrameworkProperties.SCHEDULER_URL.key));
} catch (ConnectionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Scheduler scheduler = null;
try {
scheduler = sai.login(Credentials.getCredentials());
} catch (LoginException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (AlreadyConnectedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (KeyException e) {
try {
// (2) alternative authentication method
PublicKey pubKey = null;
try {
pubKey = sai.getPublicKey();
} catch (LoginException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
if (pubKey == null) {
pubKey = Credentials.getPublicKey(Credentials.getPubKeyPath());
}
try {
String username = paMapReduceJobConfiguration
.getPropertyAsString(PAMapReduceFrameworkProperties.SCHEDULER_USERNAME.key);
String password = paMapReduceJobConfiguration
.getPropertyAsString(PAMapReduceFrameworkProperties.SCHEDULER_PASSWORD.key);
scheduler = sai.login(Credentials.createCredentials(new CredData(username, password),
pubKey));
} catch (LoginException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (AlreadyConnectedException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
} catch (KeyException ke2) {
// cannot find public key !
}
}
if (scheduler != null) {
try {
jobId = scheduler.submit(mapReduceWorkflow);
} catch (NotConnectedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (PermissionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SubmissionClosedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (JobCreationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
try {
scheduler.disconnect();
} catch (NotConnectedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (PermissionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (jobId != null) {
return true;
}
return false;
}
/**
* Read the content of the file containing the script
*
* @param inputStream
* the InputStream to use to read from the file containing the
* script
* @return String the content of the script file
*/
protected String readScriptFile(InputStream inputStream) {
if (inputStream != null) {
final char[] buffer = new char[0x10000];
StringBuilder out = new StringBuilder();
Reader in;
try {
in = new InputStreamReader(inputStream, "UTF-8");
int read;
do {
read = in.read(buffer, 0, buffer.length);
if (read > 0) {
out.append(buffer, 0, read);
}
} while (read >= 0);
} catch (UnsupportedEncodingException e) {
// thrown by in.read(buffer, 0, buffer.length);
e.printStackTrace();
} catch (IOException e) {
// thrown by in.read(buffer, 0, buffer.length);
e.printStackTrace();
}
return out.toString();
}
return null;
}
/**
* Build a {@link Serializable} {@link Configuration} object so that we can
* pass it to the ProActive tasks as an argument
*
* @param configuration
* the Hadoop {@link Configuration} object from which we have to
* create the Serializable configuration object
* @return {@link PAHadoopJobConfiguration} the {@link Serializable}
* configuration object
* @throws PAJobConfigurationException
*/
protected PAHadoopJobConfiguration getPAHadoopJobConfiguration(Configuration configuration)
throws PAJobConfigurationException {
if (configuration != null) {
changeHadoopInputPathList(configuration);
changeHadoopOutputPath(configuration);
PAHadoopJobConfiguration pahjc = new PAHadoopJobConfiguration(configuration);
/*
* To force Hadoop to create InputSplit instances whose dimension
* will be the one the user specified we must set the properties
* "mapred.min.split.size" and "mapred.max.split.size" to the value
* of the split size the user defined (in the configuration file or
* invoking the method
* PAMapReduceJobConfiguration.setInputSplitSize()). We must notice
* that in the following code we set the Hadoop properties that
* represent the minimum and maximum split size equal to the size of
* the input split the user defined ONLY if the user has not already
* defined the "mapred.min.split.size" and "mapred.max.split.size"
* Hadoop property for the Hadoop job. To do that we check if the
* value of the minimum and maximum of the Hadoop properties
* "mapred.min.split.size" and "mapred.max.split.size" are equal to
* the Hadoop defined default values and if that is the case we do
* not change the values of the properties "mapred.min.split.size"
* and "mapred.max.split.size". We must also notice that, if the
* size the user defined for the input split is greater than the
* size of input file, we must want that the Hadoop FileInputFormat
* set the size of the input split equal to the size of the input
* file. To grant that we must not alter the default value of the
* "mapred.min.split.size" property. The problem is that when we
* build the ProActive MapReduce job we do not know the
* DataSpacesFileObject that refers to the input file. This means we
* cannot check if the size the user defined for the input split is
* greater than the size of the input file. Hence, to get an input
* split whose size is equal to the size of the input file we must
* not alter the default values of "mapred.min.split.size" and
* "mapred.max.split.size" properties. This means if the user does
* not specify the size of the input split the
* "mapred.min.split.size" and "mapred.max.split.size" properties
* maintain their default value and the Hadoop FileInputFormat
* creates input splits whose size is equal to the size of the input
* file. This means the input split size is not a REQUIRED property
* and that we must check if the user has defined or not the input
* split size to see when we must alter the default value of the
* "mapred.min.split.size" and "mapred.max.split.size" properties.
* In conclusion, we must notice that to obtain one input split
* whose size is equal to the size of the input file the user must
* define a size for the input split that is greater than the size
* of the input file. In that case the FileInputFormat ends to build
* the input split when the EOF is encountered in the input file. At
* that point the size of the input split is equal to the size of
* the input file. But the simple way to obtain an input split whose
* size is equal to the size of the input file is to tell the user
* not to define the property that represents the value of the input
* split size since in some cases the user cannot know the size of
* the input file in advance (i.e., he cannot define a size for the
* input split greater than the size of the input file).
*/
if (paMapReduceJobConfiguration.getInputSplitSize() != Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) {
/*
* if we are here it means the user defined the size of the
* input split since the
* PAMapReduceJobcConfiguration.getInputSplitSize() method did
* not return the default value (that is equal to the Hadoop
* maximum value for the size of the input split)
*/
logger
.debug("The value to use to set the minimum size for the input split is '" +
pahjc
.getLong(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey()),
Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) + "'");
logger
.debug("The default value of the minimum size of the input split is '" +
Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey()))) + "'");
if (pahjc
.getLong(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey()),
Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) == Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) {
pahjc
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey()), "" +
paMapReduceJobConfiguration.getInputSplitSize());
logger
.debug("The minimum size of the input split in the ProActive MapReduce job is '" +
pahjc
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey())) + "'");
}
logger
.debug("The value to use to set the maximum size for the input split is '" +
pahjc
.getLong(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey()),
Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) + "'");
logger
.debug("The default value of the maximum size of the input split is '" +
Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey()))) + "'");
if (pahjc
.getLong(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey()),
Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) == Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) {
pahjc
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey()), "" +
paMapReduceJobConfiguration.getInputSplitSize());
logger
.debug("The maximum size of the input split in the ProActive MapReduce job is '" +
pahjc
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey())) + "'");
}
} else {
logger.debug("The user did not define the size of the input split");
}
return pahjc;
}
return null;
}
/**
* Modify each {@link Path} in the list of input path of the Hadoop job
* using the method {@link PAMapReduceJob#changeHadoopPath(String)}.
*
* This method build a new comma separated list of input paths and set it in
* the {@link Configuration} received as parameter, substituting the
* original comma separated list of input paths. Each path in the new comma
* separated list of input paths is modified according to the
* {@link PAMapReduceJob#changeHadoopPath(String)} method.
*
* We must notice that this method contains some code coped and pasted from
* the
* {@link FileInputFormat#getInputPaths(org.apache.hadoop.mapreduce.JobContext)}
* and {@link FileInputFormat#addInputPaths(Job, String)}.
*
* @param configuration
* the configuration to use to retrieve the list of the job input
* {@link Path}
*
* We must notice that this method contains some code copied and
* pasted from the code of the methods getInputPaths(JobContext
* jobContext) addInputPath(Job job, Path path) of the class
* org.apache.hadoop.mapreduce.lib.input.FileInputFormat
*/
protected void changeHadoopInputPathList(Configuration configuration) {
String inputPathStringList = configuration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_INPUT_DIRECTORY_PROPERTY_NAME.key));
if ((inputPathStringList != null) && (!inputPathStringList.trim().equalsIgnoreCase(""))) {
String newInputPathStringList = "";
String[] list = StringUtils.split(inputPathStringList);
for (int i = 0; i < list.length; i++) {
if (i == 0) {
newInputPathStringList += changeHadoopPath(StringUtils.escapeString(list[i]));
} else {
newInputPathStringList += StringUtils.COMMA_STR +
changeHadoopPath(StringUtils.escapeString(list[i]));
}
}
configuration
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_INPUT_DIRECTORY_PROPERTY_NAME.key),
newInputPathStringList);
}
}
/**
* Modify each {@link Path} in the list of output path of the Hadoop job
* according to the method {@link PAMapReduceJob#changeHadoopPath(String)}
*
* @param configuration
* the configuration to use to retrieve the job output
* {@link Path}
*/
protected void changeHadoopOutputPath(Configuration configuration) {
String outputPathString = configuration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_OUTPUT_DIRECTORY_PROPERTY_NAME.key));
if ((outputPathString != null) && (!outputPathString.trim().equalsIgnoreCase(""))) {
outputPathString = changeHadoopPath(StringUtils.escapeString(outputPathString));
configuration
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_OUTPUT_DIRECTORY_PROPERTY_NAME.key),
outputPathString);
}
}
/**
* Modify the string representation of an Hadoop path in the way the new
* path will be relative (i.e., no initial "/" under unix). In such a way
* when we instantiate an Hadoop {@link Path} no scheme will be added and
* the Hadoop classes that need the {@link FileSystem} implementation to
* which that path belongs to will be forced to retrieve that information
* from the configuration. This means that in the case of the ProActive
* MapReduce configuration the returned FileSystem implementation will be
* {@link PADataSpacesFileSystem} (whose configuration properties is
* {@link PAMapReduceFrameworkProperties#WORKFLOW_FILE_SYSTEM_DEFAULT_NAME}
* ). We must look at {@link Path#getFileSystem(Configuration)} (and
* following the invocation chain at
* {@link FileSystem#getDefaultUri(Configuration)}) to get more details. In
* the case of the ProActive MapReduce framework the name of the file system
* is "pads:///" and this tells to the Hadoop FileSystem to instantiate via
* reflection the class referred by the property "fs.pads.impl" that in the
* case of ProActive MapReduce framework is
* "org.ow2.proactive.scheduler.ext.hadoopmapreduce.fs.PADataSpacesFileSystem"
* . This class corresponds to the {@link FileSystem} implementation based
* on DataSpacesFileObject
*
* @param pathString
* the String representation of the Hadoop {@link Path}. The
* string will identify a relative path (i.e., no initial "/"
* under unix)
*
* We have to notice that the code of this method is a copied and
* pasted from the constructor Path(String pathString) of the
* Hadoop class org.apache.hadoop.fs.Path
*/
protected String changeHadoopPath(String pathString) {
String modifiedPath = pathString;
// add a slash in front of paths with Windows drive letters
if (hasWindowsDrive(pathString, false))
pathString = "/" + pathString;
// parse uri components
String scheme = null;
String authority = null;
int start = 0;
// parse uri scheme, if any
int colon = pathString.indexOf(':');
int slash = pathString.indexOf('/');
if ((colon != -1) && ((slash == -1) || (colon < slash))) { // has a
// scheme
scheme = pathString.substring(0, colon);
start = colon + 1;
/*
* We must substitute the existing scheme with our "pads" scheme
* preserving the colon and the slash. Then remembering that Hadoop
* MapReduce resolve relative paths using the Java System Property
* "user.dir" we have to delete the occurrence of the value of that
* string with the path part of the user configured input dataspace.
* E.g., if the Hadoop created path is something like
* "file:/home/theproactiveteam/workspace/proactive_mapreduce_client/current_input"
* and and the value of the "user.dir" Java System property is
* "/home/theproactiveteam/workspace/proactive_mapreduce_client/"
* then the non scheme string part will be:
* ":/home/theproactiveteam/workspace/proactive_mapreduce_client/current_input"
* and the "user.dir" will begin at index 1 in the no scheme string
* part. The string after the "user.dir" string in the no scheme
* part string will be "/current_input" and to obtain the relative
* path we must left out the initial "/" (this is the reason of the
* "substring(1)")
*
* Lastly after all is executed we must obtain "current_input".
*/
String noSchemeStringPart = pathString.substring(colon);
String userDirString = System.getProperty(PAMapReduceFramework.USER_DIR);
int startOfUserDirString = pathString.indexOf(userDirString);
if (startOfUserDirString > 0) {
/*
* startOfUserDirString is greater than 0 because the character
* whose index is zero is the colon (because the scheme part is
* defined, in the Java URI, as the string that comes before the
* colon)
*/
/*
* delete the "user.dir" string from the no scheme string part
*/
String afterUserDirString = pathString.substring(startOfUserDirString +
userDirString.length());
/*
* we left out the initial "/" (or "\") character TODO test if
* with windows it will work because maybe Java under windows
* can use "\\" so that we must do "substring(2)"
*/
modifiedPath = afterUserDirString.substring(1);
} else {
/*
* the "user.dir" string is not contained in the no scheme
* string part so we only we left out the first two characters
* ":/" of the no scheme string
*/
modifiedPath = noSchemeStringPart.substring(2);
}
}
return modifiedPath;
}
/**
* Check if the String representation Hadoop {@link Path} refers to a window
* system or not TODO delete the hard coding
*
* @param pathString
* the String representation of the Hadoop path
* @param slashed
* boolean that indicates if the first character of the string
* representation of the path is a "/" or not
* @return true if the string representation of the Hadoop {@link Path} has
* a windows drive letter false otherwise
*
* We have to notice that this code is copied and pasted from the
* code of the same method of the Hadoop {@link Path} class
*/
protected boolean hasWindowsDrive(String pathString, boolean slashed) {
boolean windows = System.getProperty("os.name").startsWith("Windows");
if (!windows)
return false;
int start = slashed ? 1 : 0;
return pathString.length() >= start + 2 &&
(slashed ? pathString.charAt(0) == '/' : true) &&
pathString.charAt(start + 1) == ':' &&
((pathString.charAt(start) >= 'A' && pathString.charAt(start) <= 'Z') || (pathString
.charAt(start) >= 'a' && pathString.charAt(start) <= 'z'));
}
/**
* Translate the string representation of the priority of the Hadoop Job
* into the equivalent priority of the ProActive Job
*
* @param hadoopJobPriorityString
* the string representation of the HadoopJob
* @return {@link JobPriority} the priority of the ProActive Job
*/
protected JobPriority getPriority(String hadoopJobPriorityString) {
if ((hadoopJobPriorityString == null) || (hadoopJobPriorityString.trim().equalsIgnoreCase(""))) {
return JobPriority.NORMAL;
} else {
if (org.apache.hadoop.mapred.JobPriority.valueOf(hadoopJobPriorityString).equals(
org.apache.hadoop.mapred.JobPriority.VERY_HIGH)) {
return JobPriority.HIGHEST;
} else if (org.apache.hadoop.mapred.JobPriority.valueOf(hadoopJobPriorityString).equals(
org.apache.hadoop.mapred.JobPriority.HIGH)) {
return JobPriority.HIGH;
} else if (org.apache.hadoop.mapred.JobPriority.valueOf(hadoopJobPriorityString).equals(
org.apache.hadoop.mapred.JobPriority.NORMAL)) {
return JobPriority.NORMAL;
} else if (org.apache.hadoop.mapred.JobPriority.valueOf(hadoopJobPriorityString).equals(
org.apache.hadoop.mapred.JobPriority.LOW)) {
return JobPriority.LOW;
} else if (org.apache.hadoop.mapred.JobPriority.valueOf(hadoopJobPriorityString).equals(
org.apache.hadoop.mapred.JobPriority.VERY_LOW)) {
return JobPriority.LOWEST;
} else {
return JobPriority.IDLE;
}
}
}
/**
* Check if the given configuration is a valid configuration
*
* @param pamrjc
* the configuration to check
* @param requiredConfigurationPropertyList
* the list of the properties that is required that they are set
* @return true if the configuration is valid, false otherwise
* @throws PAJobConfigurationException
*/
protected boolean checkConfiguration(PAMapReduceJobConfiguration pamrjc,
List<String> requiredConfigurationPropertyList) throws PAJobConfigurationException {
for (String currentProperty : requiredConfigurationPropertyList) {
if (pamrjc.getPropertyAsString(currentProperty) == null) {
throw new PAJobConfigurationException("Property '" + currentProperty +
"' is required but it is not set!");
}
}
return true;
}
/**
* Initialize the list that represents the required properties that must be
* set to be able to build the ProActive MapReduce taskflow
*
* @param requiredConfigurationProperties
*/
protected List<String> initRequiredConfigurationProperties() {
List<String> requiredConfigurationPropertyList = new ArrayList<String>();
/*
* The property that stores the ProActive Scheduler home is required
* because the ProActive MapReduce API/framework configuration would use
* that information to add the "/addons" directory to the
* ForkEnvironment a task must use
*/
requiredConfigurationPropertyList.add(PAMapReduceFrameworkProperties.SCHEDULER_HOME.getKey());
/*
* The property that stores the value of the input space the ProActive
* MapReduce job must use is needed because otherwise we do not know
* where input files are stored
*/
requiredConfigurationPropertyList.add(PAMapReduceFrameworkProperties.WORKFLOW_INPUT_SPACE.getKey());
/*
* The property that stores the value of the output space the ProActive
* MapReduce job must use is needed because otherwise we do not know
* where output files must be stored
*/
requiredConfigurationPropertyList.add(PAMapReduceFrameworkProperties.WORKFLOW_INPUT_SPACE.getKey());
/*
* The property that stores the "readMode" of the MapperPATask is needed
* because if it is set we can be sure the input space and
* InputAccessMode of the MapperPATask input files are set
*/
requiredConfigurationPropertyList
.add(PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_READ_MODE.getKey());
/*
* The property that stores the "readMode" of the ReducerPATask is
* needed because if it is set we can be sure the input space and
* InputAccessMode of the ReducerPATask input files are set
*/
requiredConfigurationPropertyList
.add(PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_PA_TASK_READ_MODE.getKey());
/*
* The property that stores the "writeMode" of the MapperPATask is
* needed because if it is set we can be sure the output space and
* OutputAccessMode of the MapperPATask output files are set
*/
requiredConfigurationPropertyList
.add(PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_WRITE_MODE.getKey());
/*
* The property that stores the "writeMode" of the ReducerPATask is
* needed because if it is set we can be sure the output space and
* OutputAccessMode of the ReducerPATask output files are set
*/
requiredConfigurationPropertyList
.add(PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_PA_TASK_WRITE_MODE.getKey());
return requiredConfigurationPropertyList;
}
/**
* Retrieve the id of this job when it has been submitted to the ProActive
* Scheduler
*
* @return the JobId of the job
*/
public JobId getJobId() {
return jobId;
}
}
| src/scheduler/src/org/ow2/proactive/scheduler/ext/mapreduce/PAMapReduceJob.java | package org.ow2.proactive.scheduler.ext.mapreduce;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.security.KeyException;
import java.security.PublicKey;
import java.util.ArrayList;
import java.util.List;
import javax.security.auth.login.LoginException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.StringUtils;
import org.ow2.proactive.authentication.crypto.CredData;
import org.ow2.proactive.authentication.crypto.Credentials;
import org.ow2.proactive.scheduler.common.Scheduler;
import org.ow2.proactive.scheduler.common.SchedulerAuthenticationInterface;
import org.ow2.proactive.scheduler.common.SchedulerConnection;
import org.ow2.proactive.scheduler.common.exception.AlreadyConnectedException;
import org.ow2.proactive.scheduler.common.exception.ConnectionException;
import org.ow2.proactive.scheduler.common.exception.JobCreationException;
import org.ow2.proactive.scheduler.common.exception.NotConnectedException;
import org.ow2.proactive.scheduler.common.exception.PermissionException;
import org.ow2.proactive.scheduler.common.exception.SubmissionClosedException;
import org.ow2.proactive.scheduler.common.exception.UserException;
import org.ow2.proactive.scheduler.common.job.JobEnvironment;
import org.ow2.proactive.scheduler.common.job.JobId;
import org.ow2.proactive.scheduler.common.job.JobPriority;
import org.ow2.proactive.scheduler.common.job.TaskFlowJob;
import org.ow2.proactive.scheduler.common.task.ForkEnvironment;
import org.ow2.proactive.scheduler.common.task.JavaTask;
import org.ow2.proactive.scheduler.common.task.dataspaces.InputAccessMode;
import org.ow2.proactive.scheduler.common.task.flow.FlowScript;
import org.ow2.proactive.scheduler.ext.mapreduce.exception.PAJobConfigurationException;
import org.ow2.proactive.scheduler.ext.mapreduce.fs.PADataSpacesFileSystem;
import org.ow2.proactive.scheduler.ext.mapreduce.logging.DefaultLogger;
import org.ow2.proactive.scheduler.ext.mapreduce.logging.Logger;
import org.ow2.proactive.scheduler.task.launcher.TaskLauncher.SchedulerVars;
import org.ow2.proactive.scripting.InvalidScriptException;
import org.ow2.proactive.scripting.Script;
import org.ow2.proactive.scripting.SimpleScript;
/**
* {@link PAMapReduceJob} creates the ProActive MapReduce Workflow to submit to
* the ProActive Scheduler. To do this it translates the Hadoop Job (created
* using the new Hadoop MapReduce API) into a ProActive MapReduce Workflow,
* using some additional information the user must provide specifying them in
* the PAMapReduceJobConfiguration object.
*
* We have to notice that some code in this class is copied and pasted from the
* Hadoop classes: - see the method hasWindowsDrive - see the method
* changeHadoopPath - see the method changeHadoopPathList
*
* In the ProActive MapReduce framework we choose to not support the Hadoop
* {@link JobConf} class because it belongs to the old Hadoop API and it is deprecated.
*
* Concerning the logs of the ProActive MapReduce job we must notice that they are
* enabled/disabled via the method "Task.setPreciousLogs(boolean preciousLogs)".
* If "preciousLogs" is true, the logs produced by the task are stored in a
* "TaskLogs-[jobid]-[taskname].log" file in localspace, and transferred to
* outputspace at the end of the execution.
*
* @author The ProActive Team
*
*/
public class PAMapReduceJob {
protected static final Logger logger = DefaultLogger.getInstance();
protected TaskFlowJob mapReduceWorkflow = null;
protected Job hadoopJob = null;
protected PAMapReduceJobConfiguration paMapReduceJobConfiguration = null;
/**
* Store the id of this job when it is submitted to the ProActive Scheduler
*/
protected JobId jobId = null;
public PAMapReduceJob(Job job, PAMapReduceJobConfiguration pamrjc) throws PAJobConfigurationException {
this.hadoopJob = job;
this.paMapReduceJobConfiguration = pamrjc;
initLogger(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
checkConfiguration(paMapReduceJobConfiguration, initRequiredConfigurationProperties());
init();
}
/**
* Initialize the logger to use during the configuration of the ProActive
* MapReduce job
*
* @param debugLevelString
* the string representation of the boolean value that indicates
* if the debug log level must be enabled or not
*/
protected void initLogger(String debugLevelString) {
logger.setDebugLogLevel(Boolean.parseBoolean(debugLevelString));
}
/**
* Translate the Hadoop MapReduce Job in the ProActive one
*
* @throws JobCreationException
* @throws PAJobConfigurationException
*/
protected void init() throws PAJobConfigurationException {
if (!isInitialized()) {
// execute only if the Hadoop Job and the
// PAMapReduceJobConfiguration are not null
if ((hadoopJob != null) && (paMapReduceJobConfiguration != null)) {
/*
* Define the fork environment the tasks must tasks must use.
* TODO NOTICE 1: we cannot use the
* PASchedulerProperties.SCHEDULER_HOME property to retrieve the
* value of the ProActive Scheduler home because it will be null
* (because the HadoopMapReduceApplication is executed on the
* client side, when the user main class is executed. This means
* if the HadoopMapReduceApplication try to get ProActive
* Scheduler properties, those properties are null). So we force
* the user specify the ProActive Scheduler home folder. The
* user must not specify the folder to use to retrieve jars to
* add as additional classpaths to the ForkEnvironment (in fact
* that folder, "addons/", is directly related to the ProActive
* MapReduce Framework and is defined by the developer only once
* and cannot be changed). It will be better if in some way we
* retrieve the value of the ProActive Scheduler home from the
* Scheduler to which the mapreduce job will be submitted (...
* but it seams we cannot do that) TODO NOTICE 2: the additional
* classpaths added to the ForkEnvironment are extended with all
* the files (not only jars) contained in the
* "$SCHEDULER_HOME/addons/" folder. Hidden files are left out.
*
* Lastly, we must notice that by default the max size of the
* jvm heap depends on various factor such as the available
* memory on the host, the architecture of the host (32 bit, 64
* bit, ...) etc... In the case of the Eon cluster, the default
* heap size seams to be 1 GB and anyways it is sure it is more
* than 512MB
*/
String schedulerHomeString = paMapReduceJobConfiguration
.getPropertyAsString(PAMapReduceFrameworkProperties.SCHEDULER_HOME.key);
logger.debug("The ProActive Scheduler home is '" + schedulerHomeString + "'");
String schedulerAdditionalClasspathFolder = schedulerHomeString +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.SCHEDULER_ADDITIONAL_CLASSPATH_FOLDER.key);
logger.debug("The path of the additional classpath folder is '" +
schedulerAdditionalClasspathFolder + "'");
ForkEnvironment forkEnvironment = new ForkEnvironment();
/*
* We must explicitly add the list of the jars the ProActive MapReduce API needs
* and only them (not all the files that we may find inside the "$SCHEDULER_HOME/addons/" or
* "$SCHEDULER_HOME/dist/lib/" folders)
*/
String jarNameListString = PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.MAP_REDUCE_JARS.key);
if (jarNameListString != null) {
String[] jarNameList = jarNameListString.replaceAll("\\s+", " ").replaceAll("[,\\s]+",
",").split(",", 0);
if (jarNameList.length > 0) {
for (String jarName : jarNameList) {
forkEnvironment.addAdditionalClasspath(schedulerAdditionalClasspathFolder +
jarName);
logger.debug("Adding the following jar to the classpath '" +
schedulerAdditionalClasspathFolder + jarName + "'");
}
}
}
/*
* Specify the parameter for the forked environment
*/
String[] jvmArgumentArray = paMapReduceJobConfiguration.getJVMArguments();
if (jvmArgumentArray != null) {
for (int i = 0; i < jvmArgumentArray.length; i++) {
forkEnvironment.addJVMArgument(jvmArgumentArray[i]);
logger.debug("Setting JVM argument '" + jvmArgumentArray[i] + "'");
}
}
String reducerInputIndexFileSelector = null;
String reducerInputFileSelector = null;
String outputFileName = null;
Configuration hadoopJobConfiguration = hadoopJob.getConfiguration();
/*
* Since in the ProActive MapReduce framework actual class of
* the configuration instance is a PAHadoopJobConfiguration the
* first stuff we do is to create a PAHadoopJobConfiguration
* instance from the Hadoop Configuration one. Then all the
* getters and setters must be invoked on that instance and not
* on the Hadoop one.
*/
PAHadoopJobConfiguration paHadoopJobConfiguration = getPAHadoopJobConfiguration(hadoopJobConfiguration);
/*
* We must set some properties to be able to use the file system
* implementation based on the ProActive DataSpaces. This will
* overwrite the already existing properties in the Hadoop
* Configuration instance. In particular we must add: - a
* property whose name is "fs.<fsUriScheme>.impl" and whose
* value is the name of the class that implements the file
* system through the ProActive DataSpaces; - a property whose
* name is "fs.default.name" and whose value is the name of the
* file system implemented through the ProActive DataSpaces - a
* property whose name is "fs.<fsUriScheme>.impl.disable.cache"
* and whose value is a boolean that if "true" means the cache
* for the file system whose scheme is "fsUriScheme" is disabled
* (In the case of the file system implemented through the
* ProActive DataSpaces we leave the cache disabled)
*/
paHadoopJobConfiguration
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_FS_IMPLEMENTATION_PROPERTY_NAME.key),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_FILE_SYSTEM_DEFAULT_IMPLEMENTATION.key));
logger
.debug("The Hadoop Abstract File System implementation is '" +
paHadoopJobConfiguration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_FS_IMPLEMENTATION_PROPERTY_NAME.key)) +
"'");
paHadoopJobConfiguration
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_FS_DEFAULT_NAME_PROPERTY_NAME.key),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_FILE_SYSTEM_DEFAULT_NAME.key));
logger
.debug("The Hadoop Abstract File System implementation default name is '" +
paHadoopJobConfiguration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_FS_DEFAULT_NAME_PROPERTY_NAME.key)) +
"'");
paHadoopJobConfiguration
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_FS_DISABLE_CACHE_PROPERTY_NAME.key),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_FILE_SYSTEM_DISABLE_CACHE.key));
logger
.debug("The Hadoop Abstract File System implementation enabled cache is '" +
paHadoopJobConfiguration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_FS_DISABLE_CACHE_PROPERTY_NAME.key)) +
"'");
String[] inputPathStringList = paHadoopJobConfiguration
.get(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_INPUT_DIRECTORY_PROPERTY_NAME.key))
.split(StringUtils.COMMA_STR);
logger
.debug("The input files of the Hadoop MapREduce job are '" +
paHadoopJobConfiguration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_INPUT_DIRECTORY_PROPERTY_NAME.key)) +
"'");
/*
* We start the build of the ProActive MapReduce job
*/
TaskFlowJob tmpMapReduceWorkflow = new TaskFlowJob();
tmpMapReduceWorkflow.setCancelJobOnError(paMapReduceJobConfiguration.getJobCancelOnError());
logger
.debug("The value of the cancelJobOnError attribute of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getJobCancelOnError() + "'");
tmpMapReduceWorkflow.setRestartTaskOnError(paMapReduceJobConfiguration
.getRestartTaskOnError());
logger.debug("The value of the restartTaskOnError of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getRestartTaskOnError() + "'");
tmpMapReduceWorkflow.setMaxNumberOfExecution(paMapReduceJobConfiguration
.getMaxNumberOfExecutions());
logger.debug("The value of the maxNumberOfExecutions of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getMaxNumberOfExecutions() + "'");
// tmpMapReduceWorkflow.setLogFile(paMapReduceJobConfiguration
// .getLogFilePath());
// logger.debug("The value of the logFilePath of the ProActive MapReduce job is '"
// + paMapReduceJobConfiguration.getLogFilePath() + "'");
tmpMapReduceWorkflow.setName(hadoopJob.getJobName());
logger.debug("The value of the name of the ProActive MapReduce job is '" +
hadoopJob.getJobName() + "'");
String hadoopJobPriorityString = paHadoopJobConfiguration.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_JOB_PRIORITY.key));
tmpMapReduceWorkflow.setPriority(getPriority(hadoopJobPriorityString));
logger.debug("The value of the priority of the ProActive MapReduce job is '" +
getPriority(hadoopJobPriorityString) + "'");
tmpMapReduceWorkflow.setProjectName(paMapReduceJobConfiguration.getProjectName());
logger.debug("The value of the projectName of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getProjectName() + "'");
tmpMapReduceWorkflow.setDescription(paMapReduceJobConfiguration.getDescription());
logger.debug("The value of the description of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getDescription() + "'");
tmpMapReduceWorkflow.setInputSpace(paMapReduceJobConfiguration.getInputSpace());
logger.debug("The value of the input space of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getInputSpace() + "'");
tmpMapReduceWorkflow.setOutputSpace(paMapReduceJobConfiguration.getOutputSpace());
logger.debug("The value of the output space of the ProActive MapReduce job is '" +
paMapReduceJobConfiguration.getOutputSpace() + "'");
// Set the classpath of the job
String[] classpath = paMapReduceJobConfiguration.getClasspath();
if (classpath != null) {
JobEnvironment je = new JobEnvironment();
try {
je.setJobClasspath(classpath);
} catch (IOException ioe) {
ioe.printStackTrace();
}
tmpMapReduceWorkflow.setEnvironment(je);
}
// start of create the splitter task
JavaTask splitterPATask = new JavaTask();
splitterPATask.setName(paMapReduceJobConfiguration
.getTaskName(PAMapReduceFramework.SPLITTER_PA_TASK));
logger.debug("The value of the name of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration.getTaskName(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
splitterPATask.setCancelJobOnError(paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.SPLITTER_PA_TASK));
logger
.debug("The value of the cancelJobOnError of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
splitterPATask.setRestartTaskOnError(paMapReduceJobConfiguration
.getRestartMode(PAMapReduceFramework.SPLITTER_PA_TASK));
logger.debug("The value of the restartMode of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration.getRestartMode(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
splitterPATask.setMaxNumberOfExecution(paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.SPLITTER_PA_TASK));
logger
.debug("The value of the maxNumberOfExecutions of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration.getMaxNumberOfExecutions() + "'");
splitterPATask.setDescription(paMapReduceJobConfiguration
.getDescription(PAMapReduceFramework.SPLITTER_PA_TASK));
logger.debug("The value of the description of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration.getDescription(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
splitterPATask
.setPreciousLogs(PAMapReduceFrameworkProperties
.getPropertyAsBoolean(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
logger
.debug("The value of the precious logs of the ProActive MapReduce SplitterPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key) +
"'");
/*
* we must add the input files to the SplitterPATask only when
* the ReadMode of the SplitterPATask is equal to fullLocalRead
*/
if (paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.SPLITTER_PA_TASK).equals(
ReadMode.fullLocalRead)) {
for (int i = 0; i < inputPathStringList.length; i++) {
splitterPATask.addInputFiles(inputPathStringList[i], paMapReduceJobConfiguration
.getInputAccessMode(PAMapReduceFramework.SPLITTER_PA_TASK));
}
}
logger.debug("The value of the readMode of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
logger
.debug("The value of the inputAccessMode of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration
.getInputAccessMode(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
splitterPATask
.setExecutableClassName(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_SPLITTER_PA_TASK_EXECUTABLE_CLASS.key));
logger
.debug("The value of the executablClassName of the ProActive MapReduce SplitterPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_SPLITTER_PA_TASK_EXECUTABLE_CLASS.key) +
"'");
splitterPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_SPLITTER_PA_TASK_INPUT_DATASPACE.key,
paMapReduceJobConfiguration.getInputSpace(PAMapReduceFramework.SPLITTER_PA_TASK));
logger.debug("The value of the input space of the ProActive MapReduce SplitterPATask is '" +
paMapReduceJobConfiguration.getInputSpace(PAMapReduceFramework.SPLITTER_PA_TASK) + "'");
splitterPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
splitterPATask.addArgument(PAMapReduceFrameworkProperties.HADOOP_JOB_CONFIGURATION.key,
paHadoopJobConfiguration);
InputStream replicateMapperPATaskInputStream = Thread.currentThread().getContextClassLoader()
.getResourceAsStream(PAMapReduceFramework.REPLICATE_MAPPER_PA_TASK_SCRIPT_NAME);
Script replicateMapperScript = null;
try {
replicateMapperScript = new SimpleScript(
readScriptFile(replicateMapperPATaskInputStream),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_SCRIPT_ENGINE.key),
new String[] {
SchedulerVars.JAVAENV_TASK_ID_VARNAME.toString(),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key) });
} catch (InvalidScriptException ise) {
ise.printStackTrace();
}
FlowScript replicateMapperFlowScript = null;
try {
replicateMapperFlowScript = FlowScript.createReplicateFlowScript(replicateMapperScript);
} catch (InvalidScriptException ise) {
ise.printStackTrace();
}
splitterPATask.setFlowScript(replicateMapperFlowScript);
splitterPATask.setForkEnvironment(forkEnvironment);
try {
tmpMapReduceWorkflow.addTask(splitterPATask);
} catch (UserException ue) {
ue.printStackTrace();
}
// end of create splitter task
// start of create mapper task
JavaTask mapperPATask = new JavaTask();
mapperPATask.setName(paMapReduceJobConfiguration
.getTaskName(PAMapReduceFramework.MAPPER_PA_TASK));
logger.debug("The value of the name of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getTaskName(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
mapperPATask.setCancelJobOnError(paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.MAPPER_PA_TASK));
logger
.debug("The value of the cancelJobOnError of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
mapperPATask.setRestartTaskOnError(paMapReduceJobConfiguration
.getRestartMode(PAMapReduceFramework.MAPPER_PA_TASK));
logger.debug("The value of the restartMode of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getRestartMode(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
mapperPATask.setMaxNumberOfExecution(paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.MAPPER_PA_TASK));
logger
.debug("The value of the maxNumberOfExecutions of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getMaxNumberOfExecutions() + "'");
mapperPATask.setDescription(paMapReduceJobConfiguration
.getDescription(PAMapReduceFramework.MAPPER_PA_TASK));
logger.debug("The value of the description of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getDescription(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
mapperPATask
.setPreciousLogs(PAMapReduceFrameworkProperties
.getPropertyAsBoolean(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
logger
.debug("The value of the precious logs of the ProActive MapReduce MapperPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key) +
"'");
mapperPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_READ_MODE.key,
paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.MAPPER_PA_TASK).key);
logger.debug("The value of the readMode of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
mapperPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_INPUT_DATASPACE.key,
paMapReduceJobConfiguration.getInputSpace(PAMapReduceFramework.MAPPER_PA_TASK));
logger.debug("The value of the input space of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getInputSpace(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
/*
* we must add the input files to the MapperPATask only when the
* ReadMode of the MapperPATask is equal to fullLocalRead
*/
if (paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.MAPPER_PA_TASK).equals(
ReadMode.fullLocalRead)) {
for (int i = 0; i < inputPathStringList.length; i++) {
mapperPATask.addInputFiles(inputPathStringList[i], paMapReduceJobConfiguration
.getInputAccessMode(PAMapReduceFramework.MAPPER_PA_TASK));
}
}
mapperPATask
.setExecutableClassName(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_EXECUTABLE_CLASS.key));
logger
.debug("The value of the executableClassName of the ProActive MapReduce MapperPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_EXECUTABLE_CLASS.key) +
"'");
mapperPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_OUTPUT_DATASPACE.key,
paMapReduceJobConfiguration.getOutputSpace(PAMapReduceFramework.MAPPER_PA_TASK));
logger.debug("The value of the output space of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getOutputSpace(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
/*
* the string to use to select the output file of a mapper will
* be the following: intermediate_$REP.out TODO check that when
* the writeMode is
* "PAMapReduceFramework.WRITE_MODE_REMOTE_WRITE" (this means
* that the output access mode is "none") then we do not need to
* specify the output files of the MapperPATask TODO do the same
* for the ReducerPATask TODO when the readMode is equal to
* PAMapReduceFramework.READ_MODE_REMOTE_READ or
* PAMapReduceFramework.READ_MODE_PARTIAL_LOCAL_READ (so that
* the input access mode is "none") we do not need to add input
* files to the task (SplitterPATask, MapperPATask and
* ReducerPATask)
*/
if (!(paMapReduceJobConfiguration.getWriteMode(PAMapReduceFramework.MAPPER_PA_TASK)
.equals(WriteMode.remoteWrite))) {
outputFileName = PAMapReduceFramework
.getMapperIntermediateFileSelector(PAMapReduceFramework.REPLICATION_INDEX_TAG);
logger
.debug("The value of the intermediateFileSelector of the ProActive MapReduce MapperPATask is '" +
outputFileName + "'");
mapperPATask.addOutputFiles(outputFileName, paMapReduceJobConfiguration
.getOutputAccessMode(PAMapReduceFramework.MAPPER_PA_TASK));
logger
.debug("The value of the outputAccessMode of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration
.getOutputAccessMode(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
/*
* the string to use to select the output index file of a
* mapper will be the following: intermediate_$REP.index
*/
outputFileName = PAMapReduceFramework
.getMapperIntermediateIndexFileSelector(PAMapReduceFramework.REPLICATION_INDEX_TAG);
logger
.debug("The value of the intermediateIndexFileSelector of the ProActive MapReduce MapperPATask is '" +
outputFileName + "'");
mapperPATask.addOutputFiles(outputFileName, paMapReduceJobConfiguration
.getOutputAccessMode(PAMapReduceFramework.MAPPER_PA_TASK));
}
logger.debug("The value of the writeMode of the ProActive MapReduce MapperPATask is '" +
paMapReduceJobConfiguration.getWriteMode(PAMapReduceFramework.MAPPER_PA_TASK) + "'");
mapperPATask.addArgument(PAMapReduceFrameworkProperties.HADOOP_JOB_CONFIGURATION.key,
paHadoopJobConfiguration);
mapperPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
mapperPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_PROFILE.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_PROFILE.key));
mapperPATask.addDependence(splitterPATask);
mapperPATask.setForkEnvironment(forkEnvironment);
try {
tmpMapReduceWorkflow.addTask(mapperPATask);
} catch (UserException ue) {
ue.printStackTrace();
}
// end of create mapper task
// start of create the mapper join task
JavaTask mapperJoinPATask = new JavaTask();
mapperJoinPATask.setName(paMapReduceJobConfiguration
.getTaskName(PAMapReduceFramework.MAPPER_JOIN_PA_TASK));
logger.debug("The value of the name of the ProActive MapReduce MapperJoinPATask is '" +
paMapReduceJobConfiguration.getTaskName(PAMapReduceFramework.MAPPER_JOIN_PA_TASK) + "'");
mapperJoinPATask.setDescription(paMapReduceJobConfiguration
.getDescription(PAMapReduceFramework.MAPPER_JOIN_PA_TASK));
logger.debug("The value of the description of the ProActive MapReduce MapperJoinPATask is '" +
paMapReduceJobConfiguration.getDescription(PAMapReduceFramework.MAPPER_JOIN_PA_TASK) +
"'");
mapperJoinPATask
.setPreciousLogs(PAMapReduceFrameworkProperties
.getPropertyAsBoolean(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
logger
.debug("The value of the precious logs of the ProActive MapReduce MapperJoinPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key) +
"'");
mapperJoinPATask.setCancelJobOnError(paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.MAPPER_JOIN_PA_TASK));
logger
.debug("The value of the cancelJobOnError of the ProActive MapReduce MapperJoinPATask is '" +
paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.MAPPER_JOIN_PA_TASK) + "'");
mapperJoinPATask.setRestartTaskOnError(paMapReduceJobConfiguration
.getRestartMode(PAMapReduceFramework.MAPPER_JOIN_PA_TASK));
logger.debug("The value of the restartMode of the ProActive MapReduce MapperJoinPATask is '" +
paMapReduceJobConfiguration.getRestartMode(PAMapReduceFramework.MAPPER_JOIN_PA_TASK) +
"'");
mapperJoinPATask.setMaxNumberOfExecution(paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.MAPPER_JOIN_PA_TASK));
logger
.debug("The value of the maxNumberOfExecutions of the ProActive MapReduce MapperJoinPATask is '" +
paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.MAPPER_JOIN_PA_TASK) + "'");
mapperJoinPATask
.setExecutableClassName(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_JOIN_PA_TASK_EXECUTABLE_CLASS.key));
logger
.debug("The value of the executableClassName of the ProActive MapReduce MapperJoinPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_JOIN_PA_TASK_EXECUTABLE_CLASS.key) +
"'");
mapperJoinPATask.addDependence(mapperPATask);
mapperJoinPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
mapperJoinPATask
.addArgument(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_OUTPUT_DIRECTORY_PROPERTY_NAME.key),
paHadoopJobConfiguration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_OUTPUT_DIRECTORY_PROPERTY_NAME.key)));
InputStream replicateReducerPATaskInputStream = Thread.currentThread()
.getContextClassLoader().getResourceAsStream(
PAMapReduceFramework.REPLICATE_REDUCER_PA_TASK_SCRIPT_NAME);
Script<?> replicateReducerScript = null;
try {
replicateReducerScript = new SimpleScript(
this.readScriptFile(replicateReducerPATaskInputStream),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_SCRIPT_ENGINE.key),
new String[] {
SchedulerVars.JAVAENV_TASK_ID_VARNAME.toString(),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key),
"" +
paHadoopJobConfiguration
.getInt(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_NUMBER_OF_REDUCER_TASKS_PROPERTY_NAME.key),
1) });
} catch (InvalidScriptException ise) {
ise.printStackTrace();
}
FlowScript replicateReducerFlowScript = null;
try {
replicateReducerFlowScript = FlowScript.createReplicateFlowScript(replicateReducerScript);
} catch (InvalidScriptException ise) {
ise.printStackTrace();
}
mapperJoinPATask.setFlowScript(replicateReducerFlowScript);
mapperJoinPATask.setForkEnvironment(forkEnvironment);
try {
tmpMapReduceWorkflow.addTask(mapperJoinPATask);
} catch (UserException ue) {
ue.printStackTrace();
}
// end of create the mapper join task
// start of: create the reducer task
JavaTask reducerPATask = new JavaTask();
reducerPATask.setName(paMapReduceJobConfiguration
.getTaskName(PAMapReduceFramework.REDUCER_PA_TASK));
logger.debug("The value of the name of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getTaskName(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask.setCancelJobOnError(paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.REDUCER_PA_TASK));
logger
.debug("The value of the cancelJobOnError of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask.setRestartTaskOnError(paMapReduceJobConfiguration
.getRestartMode(PAMapReduceFramework.REDUCER_PA_TASK));
logger.debug("The value of the restartMode of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getRestartMode(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask.setMaxNumberOfExecution(paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.REDUCER_PA_TASK));
logger
.debug("The value of the maxNumberOfExecutions of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask.setDescription(paMapReduceJobConfiguration
.getDescription(PAMapReduceFramework.REDUCER_PA_TASK));
logger.debug("The value of the description of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getDescription(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask
.setPreciousLogs(PAMapReduceFrameworkProperties
.getPropertyAsBoolean(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
logger
.debug("The value of the precious logs of the ProActive MapReduce ReducerPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key) +
"'");
reducerPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_PA_TASK_READ_MODE.key,
paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.REDUCER_PA_TASK).key);
logger.debug("The value of the readMode of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_PA_TASK_INPUT_DATASPACE.key,
paMapReduceJobConfiguration.getInputSpace(PAMapReduceFramework.REDUCER_PA_TASK));
logger.debug("The value of the input space of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getInputSpace(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
/*
* We must test if the "readMode" of the ReducerPATask is
* 'fullLocalRead' then we must transfer on the node the
* ReducerPATask will execute on the MapperPATask output "index"
* files and the MapperPATask output "actual data" files. If the
* "readMode" is "remoteRead" we must transfer on the node the
* ReducerPATask will execute on only the index files. This
* means the index files are ALWAYS transferred on the node the
* ReducerPATask will execute on (so we use
* "InputAccessMode.TransferFromOutputSpace" directly without
* retrieving the input access mode information from the
* configuration).
*/
reducerInputIndexFileSelector = PAMapReduceFramework
.getReducerIntermediateIndexFileSelector();
logger
.debug("The value of the intermediateIndexFileSelector of the ProActive MapReduce ReducerPATask is '" +
reducerInputIndexFileSelector + "'");
reducerPATask.addInputFiles(reducerInputIndexFileSelector,
InputAccessMode.TransferFromOutputSpace);
if (paMapReduceJobConfiguration.getReadMode(PAMapReduceFramework.REDUCER_PA_TASK).equals(
ReadMode.fullLocalRead)) {
/*
* the string to use to select the output files of a mapper
* is the following: intermediate_*.out
*/
reducerInputFileSelector = PAMapReduceFramework.getReducerIntermediateFileSelector();
logger
.debug("The value of the intermediateFileSelector of the ProActive MapReduce ReducerPATask is '" +
reducerInputFileSelector + "'");
reducerPATask.addInputFiles(reducerInputFileSelector, paMapReduceJobConfiguration
.getInputAccessMode(PAMapReduceFramework.REDUCER_PA_TASK));
}
reducerPATask
.setExecutableClassName(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_PA_TASK_EXECUTABLE_CLASS.key));
reducerPATask.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_PA_TASK_OUTPUT_DATASPACE.key,
paMapReduceJobConfiguration.getOutputSpace(PAMapReduceFramework.REDUCER_PA_TASK));
logger.debug("The value of the output space of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getOutputSpace(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
/*
* re-using the Hadoop OutputFormat we must notice the output
* will be written into a directory like the following one:
* "$OUTPUT_DIRECTORY/_temporary/_attempt_<jtIdentifier>_<jobId>_r_<taskId>_<taskAttemptId>/part-r-<taskId>"
* for which a possible selection string will be:
* "$OUTPUT_DIRECTORY/_temporary/_attempt_* /part-r-*"
*/
String outputFilesSelectionString = paHadoopJobConfiguration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_OUTPUT_DIRECTORY_PROPERTY_NAME.key)) +
File.separator + PAMapReduceFramework.getTemporaryOutputDirectoryRegex();
logger
.debug("The value of the outputFileSelectionString of the ProActive MapReduce ReducerPATask is '" +
outputFilesSelectionString + "'");
/*
* we must add the output files to the ReducerPATask only when
* the WriteMode of the ReducerPATask is equal to localWrite
*/
if (paMapReduceJobConfiguration.getWriteMode(PAMapReduceFramework.REDUCER_PA_TASK).equals(
WriteMode.localWrite)) {
reducerPATask.addOutputFiles(outputFilesSelectionString, paMapReduceJobConfiguration
.getOutputAccessMode(PAMapReduceFramework.REDUCER_PA_TASK));
}
logger
.debug("The value of the outputAccessMode of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration
.getOutputAccessMode(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
logger.debug("The value of the writeMode of the ProActive MapReduce ReducerPATask is '" +
paMapReduceJobConfiguration.getWriteMode(PAMapReduceFramework.REDUCER_PA_TASK) + "'");
reducerPATask.addArgument(PAMapReduceFrameworkProperties.HADOOP_JOB_CONFIGURATION.key,
paHadoopJobConfiguration);
reducerPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
reducerPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_PROFILE.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_PROFILE.key));
reducerPATask.addDependence(mapperJoinPATask);
reducerPATask.setForkEnvironment(forkEnvironment);
try {
tmpMapReduceWorkflow.addTask(reducerPATask);
} catch (UserException ue) {
ue.printStackTrace();
}
// end of create the reduce task
// start of create the reducer join task
JavaTask reducerJoinPATask = new JavaTask();
reducerJoinPATask.setName(paMapReduceJobConfiguration
.getTaskName(PAMapReduceFramework.REDUCER_JOIN_PA_TASK));
logger.debug("The value of the name of the ProActive MapReduce ReducerJoinPATask is '" +
paMapReduceJobConfiguration.getTaskName(PAMapReduceFramework.REDUCER_JOIN_PA_TASK) + "'");
reducerJoinPATask.setDescription(paMapReduceJobConfiguration
.getDescription(PAMapReduceFramework.REDUCER_JOIN_PA_TASK));
logger
.debug("The value of the description of the ProActive MapReduce ReducerJoinPATask is '" +
paMapReduceJobConfiguration
.getDescription(PAMapReduceFramework.REDUCER_JOIN_PA_TASK) + "'");
reducerJoinPATask
.setPreciousLogs(PAMapReduceFrameworkProperties
.getPropertyAsBoolean(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
logger
.debug("The value of the precious logs of the ProActive MapReduce ReducerJoinPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key) +
"'");
reducerJoinPATask.setCancelJobOnError(paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.REDUCER_JOIN_PA_TASK));
logger
.debug("The value of the cancelJobOnError of the ProActive MapReduce ReducerJoinPATask is '" +
paMapReduceJobConfiguration
.getCancelJobOnError(PAMapReduceFramework.REDUCER_JOIN_PA_TASK) + "'");
reducerJoinPATask.setRestartTaskOnError(paMapReduceJobConfiguration
.getRestartMode(PAMapReduceFramework.REDUCER_JOIN_PA_TASK));
logger
.debug("The value of the restartTaskOnError of the ProActive MapReduce ReducerJoinPATask is '" +
paMapReduceJobConfiguration
.getRestartMode(PAMapReduceFramework.REDUCER_JOIN_PA_TASK) + "'");
reducerJoinPATask.setMaxNumberOfExecution(paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.REDUCER_JOIN_PA_TASK));
logger
.debug("The value of the maxNumberOfExecutions of the ProActive MapReduce ReducerJoinPATask is '" +
paMapReduceJobConfiguration
.getMaxNumberOfExecutions(PAMapReduceFramework.REDUCER_JOIN_PA_TASK) +
"'");
reducerJoinPATask
.setExecutableClassName(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_JOIN_PA_TASK_EXECUTABLE_CLASS.key));
logger
.debug("The value of the executableClassName of the ProActive MapReduce ReducerJoinPATask is '" +
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_JOIN_PA_TASK_EXECUTABLE_CLASS.key) +
"'");
reducerJoinPATask
.addArgument(
PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key,
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key));
InputStream fileTransferPostScriptInputStream = Thread.currentThread()
.getContextClassLoader().getResourceAsStream(
PAMapReduceFramework.OUTPUT_FILE_TRANSFER_POST_SCRIPT_NAME);
Script<?> fileTransferScript = null;
try {
/*
* we must notice that when we reuse the Hadoop OutputFormat
* classes the name of the output file in which the
* ReducerPATask will put its output data is implicitly
* defined (the output folder directory will be something
* like "_temporary/_attempt..."). This means if we want to
* put the ReducerPATask output files in the directory the
* user specified and we want to give the output files the
* name the user desires we must execute a post script to
* move files (renaming them at the same time). The name of
* the ReducerPATak output file will be compliant to the
* following format: "<userDefinedPrefix><reducerId>". By
* default, if the user does not specify any prefix for the
* ReducerPATask output file names then the String retrieved
* by the method
* PAMapReduceFramework.getReducerOutputFileNamePrefix()
* will be used (that string until now is "reducer_").
* Look at the ReducerPATask class to get more information
* about the name of output file of the reducer task.
*/
String reducerOutputFileNamePrefix = null;
reducerOutputFileNamePrefix = paMapReduceJobConfiguration
.getReducerOutputFileNamePrefix();
if (reducerOutputFileNamePrefix == null) {
reducerOutputFileNamePrefix = PAMapReduceFramework.getReducerOutputFileNamePrefix();
}
fileTransferScript = new SimpleScript(
this.readScriptFile(fileTransferPostScriptInputStream),
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_SCRIPT_ENGINE.key),
new String[] {
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.WORKFLOW_JAVA_TASK_LOGGING_DEBUG.key),
outputFilesSelectionString, reducerOutputFileNamePrefix });
} catch (InvalidScriptException ise) {
ise.printStackTrace();
}
reducerJoinPATask.setPostScript(fileTransferScript);
reducerJoinPATask.addDependence(reducerPATask);
reducerJoinPATask.setForkEnvironment(forkEnvironment);
try {
tmpMapReduceWorkflow.addTask(reducerJoinPATask);
} catch (UserException ue) {
ue.printStackTrace();
}
// end of create the reducer join task
mapReduceWorkflow = tmpMapReduceWorkflow;
/*
* TODO verify if there is a getUser() and getCredentials()
* method we could use to as the username and password (in
* general as the credentials) to establsh a connection to the
* scheduler
*/
/*
* TODO verify if we can use an hadoop method as
* hadoopJobConfiguration.getMaxMapAttempts(), that give us the
* maximum number of attempts that will be made to run a map
* task
*/
/*
* TODO verify if we can use an hadoop method as
* hadoopJobConfiguration.getMaxMapAttempts(), that give us the
* maximum number of attempts that will be made to run a reduce
* task
*/
}
}
}
/**
* Run the Hadoop MapReduce Job
*
* @return true if the Hadoop MapReduce Job is submitted correctly to the
* ProActive Scheduler
*/
public boolean run() {
if (isInitialized()) {
return (submitJob(mapReduceWorkflow));
}
return false;
}
/**
* Check if the ProActive MapReduce Workflow is already initialized
*
* @return true if the ProActive MapReduce Workflow is already initialized,
* false otherwise
*/
protected boolean isInitialized() {
if (mapReduceWorkflow != null) {
return true;
}
return false;
}
/**
* Submit the TaskFlowJob representation of the Hadoop Job to the ProActive
* Scheduler
*
* @param taskFlowJob
* : the ProActive TaksFlowJob representation of the Hadoop Job
* @return boolean true if the job is submitted successfully, false
* otherwise
*/
protected boolean submitJob(TaskFlowJob mapReduceWorkflow) {
SchedulerAuthenticationInterface sai = null;
try {
sai = SchedulerConnection.join(paMapReduceJobConfiguration
.getPropertyAsString(PAMapReduceFrameworkProperties.SCHEDULER_URL.key));
} catch (ConnectionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Scheduler scheduler = null;
try {
scheduler = sai.login(Credentials.getCredentials());
} catch (LoginException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (AlreadyConnectedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (KeyException e) {
try {
// (2) alternative authentication method
PublicKey pubKey = null;
try {
pubKey = sai.getPublicKey();
} catch (LoginException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
if (pubKey == null) {
pubKey = Credentials.getPublicKey(Credentials.getPubKeyPath());
}
try {
String username = paMapReduceJobConfiguration
.getPropertyAsString(PAMapReduceFrameworkProperties.SCHEDULER_USERNAME.key);
String password = paMapReduceJobConfiguration
.getPropertyAsString(PAMapReduceFrameworkProperties.SCHEDULER_PASSWORD.key);
scheduler = sai.login(Credentials.createCredentials(new CredData(username, password),
pubKey));
} catch (LoginException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (AlreadyConnectedException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
} catch (KeyException ke2) {
// cannot find public key !
}
}
if (scheduler != null) {
try {
jobId = scheduler.submit(mapReduceWorkflow);
} catch (NotConnectedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (PermissionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SubmissionClosedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (JobCreationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
try {
scheduler.disconnect();
} catch (NotConnectedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (PermissionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (jobId != null) {
return true;
}
return false;
}
/**
* Read the content of the file containing the script
*
* @param inputStream
* the InputStream to use to read from the file containing the
* script
* @return String the content of the script file
*/
protected String readScriptFile(InputStream inputStream) {
if (inputStream != null) {
final char[] buffer = new char[0x10000];
StringBuilder out = new StringBuilder();
Reader in;
try {
in = new InputStreamReader(inputStream, "UTF-8");
int read;
do {
read = in.read(buffer, 0, buffer.length);
if (read > 0) {
out.append(buffer, 0, read);
}
} while (read >= 0);
} catch (UnsupportedEncodingException e) {
// thrown by in.read(buffer, 0, buffer.length);
e.printStackTrace();
} catch (IOException e) {
// thrown by in.read(buffer, 0, buffer.length);
e.printStackTrace();
}
return out.toString();
}
return null;
}
/**
* Build a {@link Serializable} {@link Configuration} object so that we can
* pass it to the ProActive tasks as an argument
*
* @param configuration
* the Hadoop {@link Configuration} object from which we have to
* create the Serializable configuration object
* @return {@link PAHadoopJobConfiguration} the {@link Serializable}
* configuration object
* @throws PAJobConfigurationException
*/
protected PAHadoopJobConfiguration getPAHadoopJobConfiguration(Configuration configuration)
throws PAJobConfigurationException {
if (configuration != null) {
changeHadoopInputPathList(configuration);
changeHadoopOutputPath(configuration);
PAHadoopJobConfiguration pahjc = new PAHadoopJobConfiguration(configuration);
/*
* To force Hadoop to create InputSplit instances whose dimension
* will be the one the user specified we must set the properties
* "mapred.min.split.size" and "mapred.max.split.size" to the value
* of the split size the user defined (in the configuration file or
* invoking the method
* PAMapReduceJobConfiguration.setInputSplitSize()). We must notice
* that in the following code we set the Hadoop properties that
* represent the minimum and maximum split size equal to the size of
* the input split the user defined ONLY if the user has not already
* defined the "mapred.min.split.size" and "mapred.max.split.size"
* Hadoop property for the Hadoop job. To do that we check if the
* value of the minimum and maximum of the Hadoop properties
* "mapred.min.split.size" and "mapred.max.split.size" are equal to
* the Hadoop defined default values and if that is the case we do
* not change the values of the properties "mapred.min.split.size"
* and "mapred.max.split.size". We must also notice that, if the
* size the user defined for the input split is greater than the
* size of input file, we must want that the Hadoop FileInputFormat
* set the size of the input split equal to the size of the input
* file. To grant that we must not alter the default value of the
* "mapred.min.split.size" property. The problem is that when we
* build the ProActive MapReduce job we do not know the
* DataSpacesFileObject that refers to the input file. This means we
* cannot check if the size the user defined for the input split is
* greater than the size of the input file. Hence, to get an input
* split whose size is equal to the size of the input file we must
* not alter the default values of "mapred.min.split.size" and
* "mapred.max.split.size" properties. This means if the user does
* not specify the size of the input split the
* "mapred.min.split.size" and "mapred.max.split.size" properties
* maintain their default value and the Hadoop FileInputFormat
* creates input splits whose size is equal to the size of the input
* file. This means the input split size is not a REQUIRED property
* and that we must check if the user has defined or not the input
* split size to see when we must alter the default value of the
* "mapred.min.split.size" and "mapred.max.split.size" properties.
* In conclusion, we must notice that to obtain one input split
* whose size is equal to the size of the input file the user must
* define a size for the input split that is greater than the size
* of the input file. In that case the FileInputFormat ends to build
* the input split when the EOF is encountered in the input file. At
* that point the size of the input split is equal to the size of
* the input file. But the simple way to obtain an input split whose
* size is equal to the size of the input file is to tell the user
* not to define the property that represents the value of the input
* split size since in some cases the user cannot know the size of
* the input file in advance (i.e., he cannot define a size for the
* input split greater than the size of the input file).
*/
if (paMapReduceJobConfiguration.getInputSplitSize() != Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) {
/*
* if we are here it means the user defined the size of the
* input split since the
* PAMapReduceJobcConfiguration.getInputSplitSize() method did
* not return the default value (that is equal to the Hadoop
* maximum value for the size of the input split)
*/
logger
.debug("The value to use to set the minimum size for the input split is '" +
pahjc
.getLong(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey()),
Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) + "'");
logger
.debug("The default value of the minimum size of the input split is '" +
Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey()))) + "'");
if (pahjc
.getLong(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey()),
Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) == Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) {
pahjc
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey()), "" +
paMapReduceJobConfiguration.getInputSplitSize());
logger
.debug("The minimum size of the input split in the ProActive MapReduce job is '" +
pahjc
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MIN_SPLIT_SIZE_PROPERTY_NAME
.getKey())) + "'");
}
logger
.debug("The value to use to set the maximum size for the input split is '" +
pahjc
.getLong(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey()),
Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) + "'");
logger
.debug("The default value of the maximum size of the input split is '" +
Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey()))) + "'");
if (pahjc
.getLong(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey()),
Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) == Long
.parseLong(PAMapReduceFramework
.getDefault(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey())))) {
pahjc
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey()), "" +
paMapReduceJobConfiguration.getInputSplitSize());
logger
.debug("The maximum size of the input split in the ProActive MapReduce job is '" +
pahjc
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_MAX_SPLIT_SIZE_PROPERTY_NAME
.getKey())) + "'");
}
} else {
logger.debug("The user did not define the size of the input split");
}
return pahjc;
}
return null;
}
/**
* Modify each {@link Path} in the list of input path of the Hadoop job
* using the method {@link PAMapReduceJob#changeHadoopPath(String)}.
*
* This method build a new comma separated list of input paths and set it in
* the {@link Configuration} received as parameter, substituting the
* original comma separated list of input paths. Each path in the new comma
* separated list of input paths is modified according to the
* {@link PAMapReduceJob#changeHadoopPath(String)} method.
*
* We must notice that this method contains some code coped and pasted from
* the
* {@link FileInputFormat#getInputPaths(org.apache.hadoop.mapreduce.JobContext)}
* and {@link FileInputFormat#addInputPaths(Job, String)}.
*
* @param configuration
* the configuration to use to retrieve the list of the job input
* {@link Path}
*
* We must notice that this method contains some code copied and
* pasted from the code of the methods getInputPaths(JobContext
* jobContext) addInputPath(Job job, Path path) of the class
* org.apache.hadoop.mapreduce.lib.input.FileInputFormat
*/
protected void changeHadoopInputPathList(Configuration configuration) {
String inputPathStringList = configuration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_INPUT_DIRECTORY_PROPERTY_NAME.key));
if ((inputPathStringList != null) && (!inputPathStringList.trim().equalsIgnoreCase(""))) {
String newInputPathStringList = "";
String[] list = StringUtils.split(inputPathStringList);
for (int i = 0; i < list.length; i++) {
if (i == 0) {
newInputPathStringList += changeHadoopPath(StringUtils.escapeString(list[i]));
} else {
newInputPathStringList += StringUtils.COMMA_STR +
changeHadoopPath(StringUtils.escapeString(list[i]));
}
}
configuration
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_INPUT_DIRECTORY_PROPERTY_NAME.key),
newInputPathStringList);
}
}
/**
* Modify each {@link Path} in the list of output path of the Hadoop job
* according to the method {@link PAMapReduceJob#changeHadoopPath(String)}
*
* @param configuration
* the configuration to use to retrieve the job output
* {@link Path}
*/
protected void changeHadoopOutputPath(Configuration configuration) {
String outputPathString = configuration
.get(PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_OUTPUT_DIRECTORY_PROPERTY_NAME.key));
if ((outputPathString != null) && (!outputPathString.trim().equalsIgnoreCase(""))) {
outputPathString = changeHadoopPath(StringUtils.escapeString(outputPathString));
configuration
.set(
PAMapReduceFrameworkProperties
.getPropertyAsString(PAMapReduceFrameworkProperties.HADOOP_OUTPUT_DIRECTORY_PROPERTY_NAME.key),
outputPathString);
}
}
/**
* Modify the string representation of an Hadoop path in the way the new
* path will be relative (i.e., no initial "/" under unix). In such a way
* when we instantiate an Hadoop {@link Path} no scheme will be added and
* the Hadoop classes that need the {@link FileSystem} implementation to
* which that path belongs to will be forced to retrieve that information
* from the configuration. This means that in the case of the ProActive
* MapReduce configuration the returned FileSystem implementation will be
* {@link PADataSpacesFileSystem} (whose configuration properties is
* {@link PAMapReduceFrameworkProperties#WORKFLOW_FILE_SYSTEM_DEFAULT_NAME}
* ). We must look at {@link Path#getFileSystem(Configuration)} (and
* following the invocation chain at
* {@link FileSystem#getDefaultUri(Configuration)}) to get more details. In
* the case of the ProActive MapReduce framework the name of the file system
* is "pads:///" and this tells to the Hadoop FileSystem to instantiate via
* reflection the class referred by the property "fs.pads.impl" that in the
* case of ProActive MapReduce framework is
* "org.ow2.proactive.scheduler.ext.hadoopmapreduce.fs.PADataSpacesFileSystem"
* . This class corresponds to the {@link FileSystem} implementation based
* on DataSpacesFileObject
*
* @param pathString
* the String representation of the Hadoop {@link Path}. The
* string will identify a relative path (i.e., no initial "/"
* under unix)
*
* We have to notice that the code of this method is a copied and
* pasted from the constructor Path(String pathString) of the
* Hadoop class org.apache.hadoop.fs.Path
*/
protected String changeHadoopPath(String pathString) {
String modifiedPath = pathString;
// add a slash in front of paths with Windows drive letters
if (hasWindowsDrive(pathString, false))
pathString = "/" + pathString;
// parse uri components
String scheme = null;
String authority = null;
int start = 0;
// parse uri scheme, if any
int colon = pathString.indexOf(':');
int slash = pathString.indexOf('/');
if ((colon != -1) && ((slash == -1) || (colon < slash))) { // has a
// scheme
scheme = pathString.substring(0, colon);
start = colon + 1;
/*
* We must substitute the existing scheme with our "pads" scheme
* preserving the colon and the slash. Then remembering that Hadoop
* MapReduce resolve relative paths using the Java System Property
* "user.dir" we have to delete the occurrence of the value of that
* string with the path part of the user configured input dataspace.
* E.g., if the Hadoop created path is something like
* "file:/home/theproactiveteam/workspace/proactive_mapreduce_client/current_input"
* and and the value of the "user.dir" Java System property is
* "/home/theproactiveteam/workspace/proactive_mapreduce_client/"
* then the non scheme string part will be:
* ":/home/theproactiveteam/workspace/proactive_mapreduce_client/current_input"
* and the "user.dir" will begin at index 1 in the no scheme string
* part. The string after the "user.dir" string in the no scheme
* part string will be "/current_input" and to obtain the relative
* path we must left out the initial "/" (this is the reason of the
* "substring(1)")
*
* Lastly after all is executed we must obtain "current_input".
*/
String noSchemeStringPart = pathString.substring(colon);
String userDirString = System.getProperty(PAMapReduceFramework.USER_DIR);
int startOfUserDirString = pathString.indexOf(userDirString);
if (startOfUserDirString > 0) {
/*
* startOfUserDirString is greater than 0 because the character
* whose index is zero is the colon (because the scheme part is
* defined, in the Java URI, as the string that comes before the
* colon)
*/
/*
* delete the "user.dir" string from the no scheme string part
*/
String afterUserDirString = pathString.substring(startOfUserDirString +
userDirString.length());
/*
* we left out the initial "/" (or "\") character TODO test if
* with windows it will work because maybe Java under windows
* can use "\\" so that we must do "substring(2)"
*/
modifiedPath = afterUserDirString.substring(1);
} else {
/*
* the "user.dir" string is not contained in the no scheme
* string part so we only we left out the first two characters
* ":/" of the no scheme string
*/
modifiedPath = noSchemeStringPart.substring(2);
}
}
return modifiedPath;
}
/**
* Check if the String representation Hadoop {@link Path} refers to a window
* system or not TODO delete the hard coding
*
* @param pathString
* the String representation of the Hadoop path
* @param slashed
* boolean that indicates if the first character of the string
* representation of the path is a "/" or not
* @return true if the string representation of the Hadoop {@link Path} has
* a windows drive letter false otherwise
*
* We have to notice that this code is copied and pasted from the
* code of the same method of the Hadoop {@link Path} class
*/
protected boolean hasWindowsDrive(String pathString, boolean slashed) {
boolean windows = System.getProperty("os.name").startsWith("Windows");
if (!windows)
return false;
int start = slashed ? 1 : 0;
return pathString.length() >= start + 2 &&
(slashed ? pathString.charAt(0) == '/' : true) &&
pathString.charAt(start + 1) == ':' &&
((pathString.charAt(start) >= 'A' && pathString.charAt(start) <= 'Z') || (pathString
.charAt(start) >= 'a' && pathString.charAt(start) <= 'z'));
}
/**
* Translate the string representation of the priority of the Hadoop Job
* into the equivalent priority of the ProActive Job
*
* @param hadoopJobPriorityString
* the string representation of the HadoopJob
* @return {@link JobPriority} the priority of the ProActive Job
*/
protected JobPriority getPriority(String hadoopJobPriorityString) {
if ((hadoopJobPriorityString == null) || (hadoopJobPriorityString.trim().equalsIgnoreCase(""))) {
return JobPriority.NORMAL;
} else {
if (org.apache.hadoop.mapred.JobPriority.valueOf(hadoopJobPriorityString).equals(
org.apache.hadoop.mapred.JobPriority.VERY_HIGH)) {
return JobPriority.HIGHEST;
} else if (org.apache.hadoop.mapred.JobPriority.valueOf(hadoopJobPriorityString).equals(
org.apache.hadoop.mapred.JobPriority.HIGH)) {
return JobPriority.HIGH;
} else if (org.apache.hadoop.mapred.JobPriority.valueOf(hadoopJobPriorityString).equals(
org.apache.hadoop.mapred.JobPriority.NORMAL)) {
return JobPriority.NORMAL;
} else if (org.apache.hadoop.mapred.JobPriority.valueOf(hadoopJobPriorityString).equals(
org.apache.hadoop.mapred.JobPriority.LOW)) {
return JobPriority.LOW;
} else if (org.apache.hadoop.mapred.JobPriority.valueOf(hadoopJobPriorityString).equals(
org.apache.hadoop.mapred.JobPriority.VERY_LOW)) {
return JobPriority.LOWEST;
} else {
return JobPriority.IDLE;
}
}
}
/**
* Check if the given configuration is a valid configuration
*
* @param pamrjc
* the configuration to check
* @param requiredConfigurationPropertyList
* the list of the properties that is required that they are set
* @return true if the configuration is valid, false otherwise
* @throws PAJobConfigurationException
*/
protected boolean checkConfiguration(PAMapReduceJobConfiguration pamrjc,
List<String> requiredConfigurationPropertyList) throws PAJobConfigurationException {
for (String currentProperty : requiredConfigurationPropertyList) {
if (pamrjc.getPropertyAsString(currentProperty) == null) {
throw new PAJobConfigurationException("Property '" + currentProperty +
"' is required but it is not set!");
}
}
return true;
}
/**
* Initialize the list that represents the required properties that must be
* set to be able to build the ProActive MapReduce taskflow
*
* @param requiredConfigurationProperties
*/
protected List<String> initRequiredConfigurationProperties() {
List<String> requiredConfigurationPropertyList = new ArrayList<String>();
/*
* The property that stores the ProActive Scheduler home is required
* because the ProActive MapReduce API/framework configuration would use
* that information to add the "/addons" directory to the
* ForkEnvironment a task must use
*/
requiredConfigurationPropertyList.add(PAMapReduceFrameworkProperties.SCHEDULER_HOME.getKey());
/*
* The property that stores the value of the input space the ProActive
* MapReduce job must use is needed because otherwise we do not know
* where input files are stored
*/
requiredConfigurationPropertyList.add(PAMapReduceFrameworkProperties.WORKFLOW_INPUT_SPACE.getKey());
/*
* The property that stores the value of the output space the ProActive
* MapReduce job must use is needed because otherwise we do not know
* where output files must be stored
*/
requiredConfigurationPropertyList.add(PAMapReduceFrameworkProperties.WORKFLOW_INPUT_SPACE.getKey());
/*
* The property that stores the "readMode" of the MapperPATask is needed
* because if it is set we can be sure the input space and
* InputAccessMode of the MapperPATask input files are set
*/
requiredConfigurationPropertyList
.add(PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_READ_MODE.getKey());
/*
* The property that stores the "readMode" of the ReducerPATask is
* needed because if it is set we can be sure the input space and
* InputAccessMode of the ReducerPATask input files are set
*/
requiredConfigurationPropertyList
.add(PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_PA_TASK_READ_MODE.getKey());
/*
* The property that stores the "writeMode" of the MapperPATask is
* needed because if it is set we can be sure the output space and
* OutputAccessMode of the MapperPATask output files are set
*/
requiredConfigurationPropertyList
.add(PAMapReduceFrameworkProperties.WORKFLOW_MAPPER_PA_TASK_WRITE_MODE.getKey());
/*
* The property that stores the "writeMode" of the ReducerPATask is
* needed because if it is set we can be sure the output space and
* OutputAccessMode of the ReducerPATask output files are set
*/
requiredConfigurationPropertyList
.add(PAMapReduceFrameworkProperties.WORKFLOW_REDUCER_PA_TASK_WRITE_MODE.getKey());
return requiredConfigurationPropertyList;
}
/**
* Retrieve the id of this job when it has been submitted to the ProActive
* Scheduler
*
* @return the JobId of the job
*/
public JobId getJobId() {
return jobId;
}
}
| SCHEDULING-1307 : Workaround : add ProActive_Scheduler-core.jar to forkEnvironment classpath using envScript
git-svn-id: 27916816d6cfa57849e9a885196bf7392b80e1ac@20359 28e8926c-6b08-0410-baaa-805c5e19b8d6
| src/scheduler/src/org/ow2/proactive/scheduler/ext/mapreduce/PAMapReduceJob.java | SCHEDULING-1307 : Workaround : add ProActive_Scheduler-core.jar to forkEnvironment classpath using envScript |
|
Java | agpl-3.0 | 021838cdb837544027eceaba5fd4ea415e264c02 | 0 | opensourceBIM/BIMserver,opensourceBIM/BIMserver,opensourceBIM/BIMserver | package org.bimserver.servlets;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Random;
import javax.activation.DataHandler;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
import org.bimserver.BimServer;
import org.bimserver.BimserverDatabaseException;
import org.bimserver.bimbots.BimBotsException;
import org.bimserver.bimbots.BimBotsOutput;
import org.bimserver.bimbots.BimBotsServiceInterface;
import org.bimserver.bimbots.BimServerBimBotsInput;
import org.bimserver.database.BimDatabase;
import org.bimserver.database.DatabaseSession;
import org.bimserver.database.OldQuery;
import org.bimserver.database.OldQuery.Deep;
import org.bimserver.emf.IfcModelInterface;
import org.bimserver.emf.PackageMetaData;
import org.bimserver.emf.Schema;
import org.bimserver.ifc.BasicIfcModel;
import org.bimserver.interfaces.objects.SDeserializerPluginConfiguration;
import org.bimserver.interfaces.objects.SExtendedData;
import org.bimserver.interfaces.objects.SExtendedDataSchema;
import org.bimserver.interfaces.objects.SFile;
import org.bimserver.interfaces.objects.SProject;
import org.bimserver.models.log.AccessMethod;
import org.bimserver.models.store.InternalServicePluginConfiguration;
import org.bimserver.models.store.ObjectState;
import org.bimserver.models.store.PluginDescriptor;
import org.bimserver.models.store.Revision;
import org.bimserver.models.store.StorePackage;
import org.bimserver.models.store.User;
import org.bimserver.models.store.UserSettings;
import org.bimserver.plugins.PluginConfiguration;
import org.bimserver.plugins.SchemaName;
import org.bimserver.plugins.deserializers.DeserializeException;
import org.bimserver.plugins.deserializers.Deserializer;
import org.bimserver.plugins.deserializers.DeserializerPlugin;
import org.bimserver.plugins.services.ServicePlugin;
import org.bimserver.shared.exceptions.PluginException;
import org.bimserver.shared.exceptions.ServerException;
import org.bimserver.shared.exceptions.UserException;
import org.bimserver.shared.interfaces.ServiceInterface;
import org.bimserver.utils.InputStreamDataSource;
import org.bimserver.webservices.authorization.AuthenticationException;
import org.bimserver.webservices.authorization.Authorization;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class ServiceRunnerServlet extends SubServlet {
private static final Logger LOGGER = LoggerFactory.getLogger(ServiceRunnerServlet.class);
public ServiceRunnerServlet(BimServer bimServer, ServletContext servletContext) {
super(bimServer, servletContext);
}
@Override
public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (request.getRequestURI().endsWith("/servicelist")) {
processServiceList(request, response);
return;
}
String token = null;
if (request.getHeader("Authorization") != null) {
String a = request.getHeader("Authorization");
if (a.startsWith("Bearer")) {
token = a.substring(7);
}
}
if (token == null) {
token = request.getHeader("Token");
}
LOGGER.info("Token: " + token);
String serviceName = request.getHeader("ServiceName");
if (serviceName == null) {
serviceName = request.getRequestURI();
if (serviceName.startsWith("/services/")) {
serviceName = serviceName.substring(10);
}
}
LOGGER.info("ServiceName: " + serviceName);
long serviceOid = Long.parseLong(serviceName);
String inputType = request.getHeader("Input-Type");
LOGGER.info("Input-Type: " + inputType);
try (DatabaseSession session = getBimServer().getDatabase().createSession()) {
Authorization authorization = Authorization.fromToken(getBimServer().getEncryptionKey(), token);
User user = session.get(authorization.getUoid(), OldQuery.getDefault());
if (user == null) {
LOGGER.error("Service \"" + serviceName + "\" not found for this user");
throw new UserException("No user found with uoid " + authorization.getUoid());
}
if (user.getState() == ObjectState.DELETED) {
LOGGER.error("User has been deleted");
throw new UserException("User has been deleted");
}
InternalServicePluginConfiguration foundService = null;
UserSettings userSettings = user.getUserSettings();
for (InternalServicePluginConfiguration internalServicePluginConfiguration : userSettings.getServices()) {
if (internalServicePluginConfiguration.getOid() == serviceOid) {
foundService = internalServicePluginConfiguration;
break;
}
}
if (foundService == null) {
LOGGER.info("Service \"" + serviceName + "\" not found for this user");
throw new ServletException("Service \"" + serviceName + "\" not found for this user");
}
PluginDescriptor pluginDescriptor = foundService.getPluginDescriptor();
ServicePlugin servicePlugin = getBimServer().getPluginManager().getServicePlugin(pluginDescriptor.getPluginClassName(), true);
if (servicePlugin instanceof BimBotsServiceInterface) {
LOGGER.info("Found service " + servicePlugin);
BimBotsServiceInterface bimBotsServiceInterface = (BimBotsServiceInterface)servicePlugin;
try {
if (getBimServer().getServerSettingsCache().getServerSettings().isStoreServiceRuns()) {
LOGGER.info("Storing intermediate results");
// When we store service runs, we can just use the streaming deserializer to stream directly to the database, after that we'll trigger the actual service
// Create or find project and link user and service to project
// Checkin stream into project
// Trigger service
ServiceInterface serviceInterface = getBimServer().getServiceFactory().get(authorization, AccessMethod.INTERNAL).get(ServiceInterface.class);
SProject project = serviceInterface.addProject("tmp-" + new Random().nextInt(), "ifc2x3tc1");
SDeserializerPluginConfiguration deserializer = serviceInterface.getSuggestedDeserializerForExtension("ifc", project.getOid());
if (deserializer == null) {
throw new BimBotsException("No deserializer found");
}
serviceInterface.checkin(project.getOid(), "Auto checkin", deserializer.getOid(), -1L, "s", new DataHandler(new InputStreamDataSource(request.getInputStream())), false, true);
project = serviceInterface.getProjectByPoid(project.getOid());
PackageMetaData packageMetaData = getBimServer().getMetaDataManager().getPackageMetaData(project.getSchema());
IfcModelInterface model = new BasicIfcModel(packageMetaData, null);
try {
Revision revision = session.get(project.getLastRevisionId(), OldQuery.getDefault());
session.getMap(model, new OldQuery(packageMetaData, project.getId(), revision.getId(), revision.getOid(), null, Deep.NO));
} catch (BimserverDatabaseException e) {
e.printStackTrace();
}
BimServerBimBotsInput input = new BimServerBimBotsInput(getBimServer(), authorization.getUoid(), null, null, model);
BimBotsOutput output = bimBotsServiceInterface.runBimBot(input, getBimServer().getSConverter().convertToSObject(foundService.getSettings()));
SExtendedData extendedData = new SExtendedData();
SFile file = new SFile();
file.setData(output.getData());
file.setFilename(output.getContentDisposition());
file.setMime(output.getContentType());
file.setSize(output.getData().length);
Long fileId = serviceInterface.uploadFile(file);
extendedData.setFileId(fileId);
extendedData.setTitle(output.getTitle());
SExtendedDataSchema extendedDataSchema = null;
try {
extendedDataSchema = serviceInterface.getExtendedDataSchemaByName(output.getSchemaName());
} catch (UserException e) {
extendedDataSchema = new SExtendedDataSchema();
extendedDataSchema.setContentType(output.getContentType());
extendedDataSchema.setName(output.getSchemaName());
serviceInterface.addExtendedDataSchema(extendedDataSchema);
}
extendedData.setSchemaId(extendedDataSchema.getOid());
serviceInterface.addExtendedDataToRevision(project.getLastRevisionId(), extendedData);
response.setHeader("Output-Type", output.getSchemaName());
response.setHeader("Data-Title", output.getTitle());
response.setHeader("Data-Identifier", "" + project.getOid());
response.setHeader("Content-Type", output.getContentType());
response.setHeader("Content-Disposition", output.getContentDisposition());
response.getOutputStream().write(output.getData());
} else {
// When we don't store the service runs, there is no other way than to just use the old deserializer and run the service from the EMF model
LOGGER.info("NOT Storing intermediate results");
DeserializerPlugin deserializerPlugin = getBimServer().getPluginManager().getFirstDeserializer("ifc", Schema.IFC2X3TC1, true);
if (deserializerPlugin == null) {
throw new BimBotsException("No deserializer plugin found");
}
byte[] data = IOUtils.toByteArray(request.getInputStream());
SchemaName schema = SchemaName.valueOf(inputType);
Deserializer deserializer = deserializerPlugin.createDeserializer(new PluginConfiguration());
PackageMetaData packageMetaData = getBimServer().getMetaDataManager().getPackageMetaData("ifc2x3tc1");
deserializer.init(packageMetaData);
IfcModelInterface model = deserializer.read(new ByteArrayInputStream(data), schema.name(), data.length, null);
BimServerBimBotsInput input = new BimServerBimBotsInput(getBimServer(), authorization.getUoid(), schema, data, model);
BimBotsOutput output = bimBotsServiceInterface.runBimBot(input, getBimServer().getSConverter().convertToSObject(foundService.getSettings()));
response.setHeader("Output-Type", output.getSchemaName());
response.setHeader("Data-Title", output.getTitle());
response.setHeader("Content-Type", output.getContentType());
response.setHeader("Content-Disposition", output.getContentDisposition());
response.getOutputStream().write(output.getData());
}
} catch (BimBotsException e) {
LOGGER.error("", e);
} catch (DeserializeException e) {
LOGGER.error("", e);
} catch (PluginException e) {
LOGGER.error("", e);
} catch (ServerException e) {
LOGGER.error("", e);
}
} else {
throw new ServletException("Service \"" + serviceName + "\" does not implement the BimBotsServiceInterface");
}
} catch (AuthenticationException e) {
LOGGER.error("", e);
} catch (BimserverDatabaseException e) {
LOGGER.error("", e);
} catch (UserException e) {
LOGGER.error("", e);
}
}
private void processServiceList(HttpServletRequest request, HttpServletResponse response) {
BimDatabase database = getBimServer().getDatabase();
ObjectMapper mapper = new ObjectMapper();
ObjectNode result = mapper.createObjectNode();
ArrayNode array = mapper.createArrayNode();
result.set("services", array);
try (DatabaseSession session = database.createSession()) {
for (PluginDescriptor pluginDescriptor : session.getAllOfType(StorePackage.eINSTANCE.getPluginDescriptor(), PluginDescriptor.class, OldQuery.getDefault())) {
if (pluginDescriptor.getPluginInterfaceClassName().equals(ServicePlugin.class.getName())) {
ServicePlugin servicePlugin = getBimServer().getPluginManager().getServicePlugin(pluginDescriptor.getPluginClassName(), true);
if (servicePlugin instanceof BimBotsServiceInterface) {
try {
BimBotsServiceInterface bimBotsServiceInterface = (BimBotsServiceInterface)servicePlugin;
ObjectNode descriptorJson = mapper.createObjectNode();
descriptorJson.put("id", pluginDescriptor.getOid());
descriptorJson.put("name", pluginDescriptor.getName());
descriptorJson.put("description", pluginDescriptor.getDescription());
descriptorJson.put("provider", getBimServer().getServerSettingsCache().getServerSettings().getName());
descriptorJson.put("providerIcon", getBimServer().getServerSettingsCache().getServerSettings().getIcon());
ArrayNode inputs = mapper.createArrayNode();
ArrayNode outputs = mapper.createArrayNode();
for (String schemaName : bimBotsServiceInterface.getAvailableInputs()) {
inputs.add(schemaName);
}
for (String schemaName : bimBotsServiceInterface.getAvailableOutputs()) {
outputs.add(schemaName);
}
descriptorJson.set("inputs", inputs);
descriptorJson.set("outputs", outputs);
ObjectNode oauth = mapper.createObjectNode();
oauth.put("authorizationUrl", getBimServer().getServerSettingsCache().getServerSettings().getSiteAddress() + "/oauth/authorize");
oauth.put("registerUrl", getBimServer().getServerSettingsCache().getServerSettings().getSiteAddress() + "/oauth/register");
oauth.put("tokenUrl", getBimServer().getServerSettingsCache().getServerSettings().getSiteAddress() + "/oauth/access");
descriptorJson.set("oauth", oauth);
descriptorJson.put("resourceUrl", getBimServer().getServerSettingsCache().getServerSettings().getSiteAddress() + "/services");
array.add(descriptorJson);
} catch (Exception e) {
LOGGER.error("", e);
}
}
}
}
response.setContentType("application/json");
response.getOutputStream().write(mapper.writeValueAsBytes(result));
} catch (BimserverDatabaseException e) {
LOGGER.error("", e);
} catch (JsonProcessingException e) {
LOGGER.error("", e);
} catch (IOException e) {
LOGGER.error("", e);
}
}
} | BimServer/src/org/bimserver/servlets/ServiceRunnerServlet.java | package org.bimserver.servlets;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Random;
import javax.activation.DataHandler;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
import org.bimserver.BimServer;
import org.bimserver.BimserverDatabaseException;
import org.bimserver.bimbots.BimBotsException;
import org.bimserver.bimbots.BimBotsOutput;
import org.bimserver.bimbots.BimBotsServiceInterface;
import org.bimserver.bimbots.BimServerBimBotsInput;
import org.bimserver.database.BimDatabase;
import org.bimserver.database.DatabaseSession;
import org.bimserver.database.OldQuery;
import org.bimserver.database.OldQuery.Deep;
import org.bimserver.emf.IfcModelInterface;
import org.bimserver.emf.PackageMetaData;
import org.bimserver.emf.Schema;
import org.bimserver.ifc.BasicIfcModel;
import org.bimserver.interfaces.objects.SDeserializerPluginConfiguration;
import org.bimserver.interfaces.objects.SExtendedData;
import org.bimserver.interfaces.objects.SExtendedDataSchema;
import org.bimserver.interfaces.objects.SFile;
import org.bimserver.interfaces.objects.SProject;
import org.bimserver.models.log.AccessMethod;
import org.bimserver.models.store.InternalServicePluginConfiguration;
import org.bimserver.models.store.ObjectState;
import org.bimserver.models.store.PluginDescriptor;
import org.bimserver.models.store.Revision;
import org.bimserver.models.store.StorePackage;
import org.bimserver.models.store.User;
import org.bimserver.models.store.UserSettings;
import org.bimserver.plugins.PluginConfiguration;
import org.bimserver.plugins.SchemaName;
import org.bimserver.plugins.deserializers.DeserializeException;
import org.bimserver.plugins.deserializers.Deserializer;
import org.bimserver.plugins.deserializers.DeserializerPlugin;
import org.bimserver.plugins.services.ServicePlugin;
import org.bimserver.shared.exceptions.PluginException;
import org.bimserver.shared.exceptions.ServerException;
import org.bimserver.shared.exceptions.UserException;
import org.bimserver.shared.interfaces.ServiceInterface;
import org.bimserver.utils.InputStreamDataSource;
import org.bimserver.webservices.authorization.AuthenticationException;
import org.bimserver.webservices.authorization.Authorization;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class ServiceRunnerServlet extends SubServlet {
private static final Logger LOGGER = LoggerFactory.getLogger(ServiceRunnerServlet.class);
public ServiceRunnerServlet(BimServer bimServer, ServletContext servletContext) {
super(bimServer, servletContext);
}
@Override
public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (request.getRequestURI().endsWith("/servicelist")) {
processServiceList(request, response);
return;
}
String token = null;
if (request.getHeader("Authorization") != null) {
String a = request.getHeader("Authorization");
if (a.startsWith("Bearer")) {
token = a.substring(7);
}
}
if (token == null) {
token = request.getHeader("Token");
}
LOGGER.info("Token: " + token);
String serviceName = request.getHeader("ServiceName");
if (serviceName == null) {
serviceName = request.getRequestURI();
if (serviceName.startsWith("/services/")) {
serviceName = serviceName.substring(10);
}
}
LOGGER.info("ServiceName: " + serviceName);
long serviceOid = Long.parseLong(serviceName);
String inputType = request.getHeader("Input-Type");
LOGGER.info("Input-Type: " + inputType);
try (DatabaseSession session = getBimServer().getDatabase().createSession()) {
Authorization authorization = Authorization.fromToken(getBimServer().getEncryptionKey(), token);
User user = session.get(authorization.getUoid(), OldQuery.getDefault());
if (user == null) {
LOGGER.error("Service \"" + serviceName + "\" not found for this user");
throw new UserException("No user found with uoid " + authorization.getUoid());
}
if (user.getState() == ObjectState.DELETED) {
LOGGER.error("User has been deleted");
throw new UserException("User has been deleted");
}
InternalServicePluginConfiguration foundService = null;
UserSettings userSettings = user.getUserSettings();
for (InternalServicePluginConfiguration internalServicePluginConfiguration : userSettings.getServices()) {
if (internalServicePluginConfiguration.getOid() == serviceOid) {
foundService = internalServicePluginConfiguration;
break;
}
}
if (foundService == null) {
LOGGER.info("Service \"" + serviceName + "\" not found for this user");
throw new ServletException("Service \"" + serviceName + "\" not found for this user");
}
PluginDescriptor pluginDescriptor = foundService.getPluginDescriptor();
ServicePlugin servicePlugin = getBimServer().getPluginManager().getServicePlugin(pluginDescriptor.getPluginClassName(), true);
if (servicePlugin instanceof BimBotsServiceInterface) {
LOGGER.info("Found service " + servicePlugin);
BimBotsServiceInterface bimBotsServiceInterface = (BimBotsServiceInterface)servicePlugin;
try {
if (getBimServer().getServerSettingsCache().getServerSettings().isStoreServiceRuns()) {
LOGGER.info("Storing intermediate results");
// When we store service runs, we can just use the streaming deserializer to stream directly to the database, after that we'll trigger the actual service
// Create or find project and link user and service to project
// Checkin stream into project
// Trigger service
ServiceInterface serviceInterface = getBimServer().getServiceFactory().get(authorization, AccessMethod.INTERNAL).get(ServiceInterface.class);
SProject project = serviceInterface.addProject("tmp-" + new Random().nextInt(), "ifc2x3tc1");
SDeserializerPluginConfiguration deserializer = serviceInterface.getSuggestedDeserializerForExtension("ifc", project.getOid());
if (deserializer == null) {
throw new BimBotsException("No deserializer found");
}
serviceInterface.checkin(project.getOid(), "Auto checkin", deserializer.getOid(), -1L, "s", new DataHandler(new InputStreamDataSource(request.getInputStream())), false, true);
project = serviceInterface.getProjectByPoid(project.getOid());
PackageMetaData packageMetaData = getBimServer().getMetaDataManager().getPackageMetaData(project.getSchema());
IfcModelInterface model = new BasicIfcModel(packageMetaData, null);
try {
Revision revision = session.get(project.getLastRevisionId(), OldQuery.getDefault());
session.getMap(model, new OldQuery(packageMetaData, project.getId(), revision.getId(), revision.getOid(), null, Deep.NO));
} catch (BimserverDatabaseException e) {
e.printStackTrace();
}
BimServerBimBotsInput input = new BimServerBimBotsInput(getBimServer(), authorization.getUoid(), null, null, model);
BimBotsOutput output = bimBotsServiceInterface.runBimBot(input, getBimServer().getSConverter().convertToSObject(foundService.getSettings()));
SExtendedData extendedData = new SExtendedData();
SFile file = new SFile();
file.setData(output.getData());
file.setFilename(output.getContentDisposition());
file.setMime(output.getContentType());
file.setSize(output.getData().length);
Long fileId = serviceInterface.uploadFile(file);
extendedData.setFileId(fileId);
extendedData.setTitle(output.getTitle());
SExtendedDataSchema extendedDataSchema = null;
try {
extendedDataSchema = serviceInterface.getExtendedDataSchemaByName(output.getSchemaName());
} catch (UserException e) {
extendedDataSchema = new SExtendedDataSchema();
extendedDataSchema.setContentType(output.getContentType());
extendedDataSchema.setName(output.getSchemaName());
serviceInterface.addExtendedDataSchema(extendedDataSchema);
}
extendedData.setSchemaId(extendedDataSchema.getOid());
serviceInterface.addExtendedDataToRevision(project.getLastRevisionId(), extendedData);
response.setHeader("Output-Type", output.getSchemaName());
response.setHeader("Data-Title", output.getTitle());
response.setHeader("Content-Type", output.getContentType());
response.setHeader("Content-Disposition", output.getContentDisposition());
response.getOutputStream().write(output.getData());
} else {
// When we don't store the service runs, there is no other way than to just use the old deserializer and run the service from the EMF model
LOGGER.info("NOT Storing intermediate results");
DeserializerPlugin deserializerPlugin = getBimServer().getPluginManager().getFirstDeserializer("ifc", Schema.IFC2X3TC1, true);
if (deserializerPlugin == null) {
throw new BimBotsException("No deserializer plugin found");
}
byte[] data = IOUtils.toByteArray(request.getInputStream());
SchemaName schema = SchemaName.valueOf(inputType);
Deserializer deserializer = deserializerPlugin.createDeserializer(new PluginConfiguration());
PackageMetaData packageMetaData = getBimServer().getMetaDataManager().getPackageMetaData("ifc2x3tc1");
deserializer.init(packageMetaData);
IfcModelInterface model = deserializer.read(new ByteArrayInputStream(data), schema.name(), data.length, null);
BimServerBimBotsInput input = new BimServerBimBotsInput(getBimServer(), authorization.getUoid(), schema, data, model);
BimBotsOutput output = bimBotsServiceInterface.runBimBot(input, getBimServer().getSConverter().convertToSObject(foundService.getSettings()));
response.setHeader("Output-Type", output.getSchemaName());
response.setHeader("Data-Title", output.getTitle());
response.setHeader("Content-Type", output.getContentType());
response.setHeader("Content-Disposition", output.getContentDisposition());
response.getOutputStream().write(output.getData());
}
} catch (BimBotsException e) {
LOGGER.error("", e);
} catch (DeserializeException e) {
LOGGER.error("", e);
} catch (PluginException e) {
LOGGER.error("", e);
} catch (ServerException e) {
LOGGER.error("", e);
}
} else {
throw new ServletException("Service \"" + serviceName + "\" does not implement the BimBotsServiceInterface");
}
} catch (AuthenticationException e) {
LOGGER.error("", e);
} catch (BimserverDatabaseException e) {
LOGGER.error("", e);
} catch (UserException e) {
LOGGER.error("", e);
}
}
private void processServiceList(HttpServletRequest request, HttpServletResponse response) {
BimDatabase database = getBimServer().getDatabase();
ObjectMapper mapper = new ObjectMapper();
ObjectNode result = mapper.createObjectNode();
ArrayNode array = mapper.createArrayNode();
result.set("services", array);
try (DatabaseSession session = database.createSession()) {
for (PluginDescriptor pluginDescriptor : session.getAllOfType(StorePackage.eINSTANCE.getPluginDescriptor(), PluginDescriptor.class, OldQuery.getDefault())) {
if (pluginDescriptor.getPluginInterfaceClassName().equals(ServicePlugin.class.getName())) {
ServicePlugin servicePlugin = getBimServer().getPluginManager().getServicePlugin(pluginDescriptor.getPluginClassName(), true);
if (servicePlugin instanceof BimBotsServiceInterface) {
try {
BimBotsServiceInterface bimBotsServiceInterface = (BimBotsServiceInterface)servicePlugin;
ObjectNode descriptorJson = mapper.createObjectNode();
descriptorJson.put("id", pluginDescriptor.getOid());
descriptorJson.put("name", pluginDescriptor.getName());
descriptorJson.put("description", pluginDescriptor.getDescription());
descriptorJson.put("provider", getBimServer().getServerSettingsCache().getServerSettings().getName());
descriptorJson.put("providerIcon", getBimServer().getServerSettingsCache().getServerSettings().getIcon());
ArrayNode inputs = mapper.createArrayNode();
ArrayNode outputs = mapper.createArrayNode();
for (String schemaName : bimBotsServiceInterface.getAvailableInputs()) {
inputs.add(schemaName);
}
for (String schemaName : bimBotsServiceInterface.getAvailableOutputs()) {
outputs.add(schemaName);
}
descriptorJson.set("inputs", inputs);
descriptorJson.set("outputs", outputs);
ObjectNode oauth = mapper.createObjectNode();
oauth.put("authorizationUrl", getBimServer().getServerSettingsCache().getServerSettings().getSiteAddress() + "/oauth/authorize");
oauth.put("registerUrl", getBimServer().getServerSettingsCache().getServerSettings().getSiteAddress() + "/oauth/register");
oauth.put("tokenUrl", getBimServer().getServerSettingsCache().getServerSettings().getSiteAddress() + "/oauth/access");
descriptorJson.set("oauth", oauth);
descriptorJson.put("resourceUrl", getBimServer().getServerSettingsCache().getServerSettings().getSiteAddress() + "/services");
array.add(descriptorJson);
} catch (Exception e) {
LOGGER.error("", e);
}
}
}
}
response.setContentType("application/json");
response.getOutputStream().write(mapper.writeValueAsBytes(result));
} catch (BimserverDatabaseException e) {
LOGGER.error("", e);
} catch (JsonProcessingException e) {
LOGGER.error("", e);
} catch (IOException e) {
LOGGER.error("", e);
}
}
} | https://github.com/opensourceBIM/BIMserver/issues/720 | BimServer/src/org/bimserver/servlets/ServiceRunnerServlet.java | https://github.com/opensourceBIM/BIMserver/issues/720 |
|
Java | lgpl-2.1 | fa336e80816b1f7ce4c6c274c42c020cb6b5125c | 0 | SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer | /*
* Copyright (C) 2020 tobid.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301 USA
*/
package net.sf.jaer.eventprocessing.filter;
import com.jogamp.opengl.GL2;
import com.jogamp.opengl.GLAutoDrawable;
import com.jogamp.opengl.util.gl2.GLUT;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import net.sf.jaer.Description;
import net.sf.jaer.DevelopmentStatus;
import net.sf.jaer.chip.AEChip;
import net.sf.jaer.event.BasicEvent;
import net.sf.jaer.event.EventPacket;
import net.sf.jaer.eventprocessing.EventFilter;
import net.sf.jaer.eventprocessing.FilterChain;
import net.sf.jaer.graphics.FrameAnnotater;
/**
* Filter for testing noise filters
*
* @author tobid/shasah
*/
@Description("Tests noise filters by injecting known noise and measuring how much signal and noise is filtered")
@DevelopmentStatus(DevelopmentStatus.Status.InDevelopment)
public class NoiseTesterFilter extends AbstractNoiseFilter implements FrameAnnotater {
FilterChain chain;
private float shotNoiseRateHz = getFloat("shotNoiseRateHz", .1f);
private float leakNoiseRateHz = getFloat("leakNoiseRateHz", .1f);
private int sx;
private int sy;
private int startEventTime = -1; // ts of the first event in this packet
private int endEventTime = -1; // ts of the last event in this packet
private int lastEventTime = -1; // ts of the last event in last packet
private BasicEvent lastE;
private float TPR = 0;
private float precision = 0;
private float TNR = 0;
private float accuracy = 0;
float balanceRelation = 0;
// float balanceRelation = 2 * TPR * precision / (TPR + precision); // wish to norm to 1. if both TPR and precision is 1. the value is 1
public NoiseTesterFilter(AEChip chip) {
super(chip);
chain = new FilterChain(chip);
chain.add(new BackgroundActivityFilter(chip));
chain.add(new SequenceBasedFilter(chip));
setEnclosedFilterChain(chain);
setPropertyTooltip("shotNoiseRateHz", "rate per pixel of shot noise events");
setPropertyTooltip("leakNoiseRateHz", "rate per pixel of leak noise events");
}
@Override
public void annotate(GLAutoDrawable drawable) {
if (!showFilteringStatistics) {
return;
}
GL2 gl = drawable.getGL().getGL2();
gl.glPushMatrix();
final GLUT glut = new GLUT();
gl.glColor3f(.2f, .2f, .8f); // must set color before raster position (raster position is like glVertex)
gl.glRasterPos3f(0, 10, 0);
// final float filteredOutPercent = 100 * (float) filteredOutEventCount / totalEventCount;
// String s = null;
String s = String.format("TPR=%%%6.1f, TNR=%%%6.1f, BR=%%%6.1f", 100 * TPR, 100 * TNR, 100 * balanceRelation);
glut.glutBitmapString(GLUT.BITMAP_HELVETICA_18, s);
gl.glPopMatrix();
}
@Override
public EventPacket<?> filterPacket(EventPacket<?> in) {
totalEventCount = 0;
filteredOutEventCount = 0;
startEventTime = in.getFirstTimestamp();
endEventTime = in.getLastTimestamp();
int TP = 0; // filter take real events as real events. the number of events
int TN = 0; // filter take noise events as noise events
int FP = 0; // filter take noise events as real events
int FN = 0; // filter take real events as noise events
ArrayList inList = new ArrayList<BasicEvent>(in.getSize());
for (BasicEvent e : in) {
inList.add(e);
lastE = e;
}
// record the first timestamp and last timestamp of the packet
// add noise into the packet in and get a new packet?
EventPacket<BasicEvent> newIn = addNoise(in, shotNoiseRateHz, leakNoiseRateHz);
ArrayList newInList = new ArrayList<BasicEvent>(newIn.getSize());
for (BasicEvent e : newIn) {
newInList.add(e);
}
EventPacket<BasicEvent> out = getEnclosedFilterChain().filterPacket(newIn);
ArrayList outList = new ArrayList<BasicEvent>(out.getSize());
for (BasicEvent e : out) {
outList.add(e);
}
// compare out with newIn and in to get TP, TN, FP, FN. consider using set intersecion and union
Set<BasicEvent> result = new HashSet<BasicEvent>((Collection<? extends BasicEvent>) outList);
result.retainAll((Collection<?>) inList); // Intersection,
// in is the clean real events, so the intersection will result the collection of TP
TP = result.size();
Set<BasicEvent> result2 = new HashSet<BasicEvent>((Collection<? extends BasicEvent>) inList);
result2.removeAll(result);
// subtraction, the intersection result is the TP, in is TP + FN
// so in - result = #FN
FN = result2.size();
Set<BasicEvent> noise;
noise = new HashSet<BasicEvent>((Collection<? extends BasicEvent>) newInList);
noise.removeAll((Collection<?>) inList);
// noise is TN + FP
Set<BasicEvent> noise1 = new HashSet<BasicEvent>(noise);
noise1.retainAll((Collection<?>) outList); // intersection
// noise but occur in the filters output, this is False Positive FP
FP = noise1.size();
Set<BasicEvent> noise2 = new HashSet<BasicEvent>(noise);
noise2.removeAll(noise1); // subtraction
// TN + FP - FP = TN.
TN = noise2.size();
// System.out.printf("every event is: %d %d %d %d %d, %d %d %d: %d %d %d %d\n", inList.size(), newInList.size(), outList.size(), outRealList.size(), outNoiseList.size(), outInitList.size(), outInitRealList.size(), outInitNoiseList.size(), TP, TN, FP, FN);
TPR = TP + FN == 0 ? 0 : (float) (TP * 1.0 / (TP + FN));
precision = TP + FP == 0 ? 0 : (float)(TP * 1.0 / (TP + FP));
TNR = TN + FP == 0 ? 0 : (float)(TN * 1.0 / (TN + FP));
accuracy = (float) ((TP + TN) * 1.0 / (TP + TN + FP + FN));
balanceRelation = TPR + precision == 0? 0: (float)(2 * TPR * precision / (TPR + precision)); // wish to norm to 1. if both TPR and precision is 1. the value is 1
System.out.printf("every event is: %d %d %d TP: %d TN: %d FP: %d FN: %d %%%3.1f %%%3.1f %%%3.1f\n", inList.size(), newInList.size(), outList.size(), TP, TN, FP, FN, 100 * TPR, 100 * TNR, 100 * balanceRelation);
// in=getEnclosedFilterChain().filterPacket(in);
lastEventTime = endEventTime;
return out;
}
@Override
public void resetFilter() {
}
@Override
public void initFilter() {
lastE = new BasicEvent();
sx = chip.getSizeX() - 1;
sy = chip.getSizeY() - 1;
}
/**
* @return the shotNoiseRateHz
*/
public float getShotNoiseRateHz() {
return shotNoiseRateHz;
}
/**
* @param shotNoiseRateHz the shotNoiseRateHz to set
*/
public void setShotNoiseRateHz(float shotNoiseRateHz) {
this.shotNoiseRateHz = shotNoiseRateHz;
putFloat("shotNoiseRateHz", shotNoiseRateHz);
}
/**
* @return the leakNoiseRateHz
*/
public float getLeakNoiseRateHz() {
return leakNoiseRateHz;
}
/**
* @param leakNoiseRateHz the leakNoiseRateHz to set
*/
public void setLeakNoiseRateHz(float leakNoiseRateHz) {
this.leakNoiseRateHz = leakNoiseRateHz;
putFloat("leakNoiseRateHz", leakNoiseRateHz);
}
private EventPacket addNoise(EventPacket<? extends BasicEvent> in, float shotNoiseRateHz, float leakNoiseRateHz) {
// throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
EventPacket<BasicEvent> newIn = new EventPacket<BasicEvent>();
newIn.appendCopy((EventPacket<BasicEvent>) in);
ArrayList newInList = new ArrayList<BasicEvent>(newIn.getSize());
int count = 0;
// for (BasicEvent e : in) {
// newInList.add(e);
// count += 1;
// }
//
// if (count > 0){
// BasicEvent noiseE = (BasicEvent) newInList.get(count - 1);
// }else{
// ArrayList outRealList = new ArrayList<BasicEvent>(out.getSize());
//
// }
// BasicEvent noiseE = lastE;
int lastts = lastE.timestamp;
int Min = 0;
for (int i = 2; i < 300; i += 30) {
BasicEvent noiseE = new BasicEvent();
int randomX = Min + (int)(Math.random() * ((sx - Min) + 1));
noiseE.x = (short) randomX;
int randomY = Min + (int)(Math.random() * ((sy - Min) + 1));
noiseE.y = (short) randomY;
noiseE.timestamp = lastts + i;
newIn.appendCopy(noiseE);
}
return newIn;
}
}
| src/net/sf/jaer/eventprocessing/filter/NoiseTesterFilter.java | /*
* Copyright (C) 2020 tobid.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301 USA
*/
package net.sf.jaer.eventprocessing.filter;
import com.jogamp.opengl.GL2;
import com.jogamp.opengl.GLAutoDrawable;
import com.jogamp.opengl.util.gl2.GLUT;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import net.sf.jaer.Description;
import net.sf.jaer.DevelopmentStatus;
import net.sf.jaer.chip.AEChip;
import net.sf.jaer.event.BasicEvent;
import net.sf.jaer.event.EventPacket;
import net.sf.jaer.eventprocessing.EventFilter;
import net.sf.jaer.eventprocessing.FilterChain;
import net.sf.jaer.graphics.FrameAnnotater;
/**
* Filter for testing noise filters
*
* @author tobid/shasah
*/
@Description("Tests noise filters by injecting known noise and measuring how much signal and noise is filtered")
@DevelopmentStatus(DevelopmentStatus.Status.InDevelopment)
public class NoiseTesterFilter extends AbstractNoiseFilter implements FrameAnnotater {
FilterChain chain;
private float shotNoiseRateHz = getFloat("shotNoiseRateHz", .1f);
private float leakNoiseRateHz = getFloat("leakNoiseRateHz", .1f);
private int startEventTime = -1; // ts of the first event in this packet
private int endEventTime = -1; // ts of the last event in this packet
private int lastEventTime = -1; // ts of the last event in last packet
private float TPR = 0;
private float precision = 0;
private float TNR = 0;
private float accuracy = 0;
float balanceRelation = 2 * TPR * precision / (TPR + precision); // wish to norm to 1. if both TPR and precision is 1. the value is 1
public NoiseTesterFilter(AEChip chip) {
super(chip);
chain = new FilterChain(chip);
chain.add(new BackgroundActivityFilter(chip));
chain.add(new SequenceBasedFilter(chip));
setEnclosedFilterChain(chain);
setPropertyTooltip("shotNoiseRateHz", "rate per pixel of shot noise events");
setPropertyTooltip("leakNoiseRateHz", "rate per pixel of leak noise events");
}
@Override
public void annotate(GLAutoDrawable drawable) {
if (!showFilteringStatistics) {
return;
}
GL2 gl = drawable.getGL().getGL2();
gl.glPushMatrix();
final GLUT glut = new GLUT();
gl.glColor3f(.2f, .2f, .8f); // must set color before raster position (raster position is like glVertex)
gl.glRasterPos3f(0, 10, 0);
// final float filteredOutPercent = 100 * (float) filteredOutEventCount / totalEventCount;
// String s = null;
String s = String.format("TPR=%%%6.1f, TNR=%%%6.1f", 100*TPR,100*TNR);
glut.glutBitmapString(GLUT.BITMAP_HELVETICA_18, s);
gl.glPopMatrix();
}
@Override
public EventPacket<?> filterPacket(EventPacket<?> in) {
totalEventCount = 0;
filteredOutEventCount = 0;
startEventTime = in.getFirstTimestamp();
endEventTime = in.getLastTimestamp();
// record the first timestamp and last timestamp of the packet
// add noise into the packet in and get a new packet?
EventPacket<BasicEvent> newIn = addNoise(in, shotNoiseRateHz, leakNoiseRateHz);
EventPacket<BasicEvent> out = getEnclosedFilterChain().filterPacket(newIn);
int TP = 0; // filter take real events as real events. the number of events
int TN = 0; // filter take noise events as noise events
int FP = 0; // filter take noise events as real events
int FN = 0; // filter take real events as noise events
ArrayList inList = new ArrayList<BasicEvent>(in.getSize());
ArrayList newInList = new ArrayList<BasicEvent>(newIn.getSize());
ArrayList outList = new ArrayList<BasicEvent>(out.getSize());
for (BasicEvent e : in) {
inList.add(e);
}
for (BasicEvent e : newIn) {
newInList.add(e);
}
for (BasicEvent e : out) {
outList.add(e);
}
// compare out with newIn and in to get TP, TN, FP, FN. consider using set intersecion and union
Set<BasicEvent> result = new HashSet<BasicEvent>((Collection<? extends BasicEvent>) outList);
// java.lang.ClassCastException: net.sf.jaer.event.ApsDvsEventPacket cannot be cast to java.util.Collection
// at net.sf.jaer.eventprocessing.filter.NoiseTesterFilter.filterPacket(NoiseTesterFilter.java:83)
result.retainAll((Collection<?>) inList); // Intersection,
// in is the clean real events, so the intersection will result the collection of TP
TP = result.size();
Set<BasicEvent> result2 = new HashSet<BasicEvent>((Collection<? extends BasicEvent>) inList);
result2.removeAll(result);
// subtraction, the intersection result is the TP, in is TP + FN
// so in - result = #FN
FN = result2.size();
Set<BasicEvent> noise;
noise = new HashSet<BasicEvent>((Collection<? extends BasicEvent>) newInList);
noise.removeAll((Collection<?>) inList);
// noise is TN + FP
Set<BasicEvent> noise1 = new HashSet<BasicEvent>(noise);
noise1.retainAll((Collection<?>) outList); // intersection
// noise but occur in the filters output, this is False Positive FP
FP = noise1.size();
Set<BasicEvent> noise2 = new HashSet<BasicEvent>(noise);
noise2.removeAll(noise1); // subtraction
// TN + FP - FP = TN.
TN = noise2.size();
TPR = TP / (TP + FN);
precision = TP + FP == 0 ? 0 : TP / (TP + FP);
TNR = TN + FP == 0 ? 0 : TN / (TN + FP);
accuracy = (TP + TN) / (TP + TN + FP + FN);
balanceRelation = 2 * TPR * precision / (TPR + precision); // wish to norm to 1. if both TPR and precision is 1. the value is 1
// in=getEnclosedFilterChain().filterPacket(in);
return out;
}
@Override
public void resetFilter() {
}
@Override
public void initFilter() {
}
/**
* @return the shotNoiseRateHz
*/
public float getShotNoiseRateHz() {
return shotNoiseRateHz;
}
/**
* @param shotNoiseRateHz the shotNoiseRateHz to set
*/
public void setShotNoiseRateHz(float shotNoiseRateHz) {
this.shotNoiseRateHz = shotNoiseRateHz;
putFloat("shotNoiseRateHz", shotNoiseRateHz);
}
/**
* @return the leakNoiseRateHz
*/
public float getLeakNoiseRateHz() {
return leakNoiseRateHz;
}
/**
* @param leakNoiseRateHz the leakNoiseRateHz to set
*/
public void setLeakNoiseRateHz(float leakNoiseRateHz) {
this.leakNoiseRateHz = leakNoiseRateHz;
putFloat("leakNoiseRateHz", leakNoiseRateHz);
}
private EventPacket addNoise(EventPacket<? extends BasicEvent> in, float shotNoiseRateHz, float leakNoiseRateHz) {
// throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
EventPacket<BasicEvent> newIn = new EventPacket<BasicEvent>();
newIn.appendCopy((EventPacket<BasicEvent>) in);
ArrayList newInList = new ArrayList<BasicEvent>(newIn.getSize());
int count = 0;
for (BasicEvent e : in) {
newInList.add(e);
count += 1;
}
BasicEvent noiseE = (BasicEvent) newInList.get(count - 1);
for(int i = 2; i < 10; i ++){
noiseE.x = (short) (noiseE.x / i);
newIn.appendCopy(noiseE);
noiseE.y = (short) (noiseE.y / i);
newIn.appendCopy(noiseE);
}
return newIn;
}
}
| it can work now for the dot moving clean dataset | src/net/sf/jaer/eventprocessing/filter/NoiseTesterFilter.java | it can work now for the dot moving clean dataset |
|
Java | lgpl-2.1 | 5ea055187c87eabc75f6888f8d275e9062fb0037 | 0 | ShiftMediaProject/libbluray,koying/libbluray,UIKit0/libbluray,ShiftMediaProject/libbluray,zxlooong/libbluray,EdwardNewK/libbluray,Azzuro/libbluray,tourettes/libbluray,vlc-mirror/libbluray,UIKit0/libbluray,tourettes/libbluray,zxlooong/libbluray,vlc-mirror/libbluray,ShiftMediaProject/libbluray,ace20022/libbluray,koying/libbluray,ShiftMediaProject/libbluray,ace20022/libbluray,Azzuro/libbluray,mwgoldsmith/bluray,tourettes/libbluray,mwgoldsmith/bluray,Distrotech/libbluray,Distrotech/libbluray,koying/libbluray,Distrotech/libbluray,Distrotech/libbluray,EdwardNewK/libbluray,UIKit0/libbluray,zxlooong/libbluray,koying/libbluray,mwgoldsmith/bluray,mwgoldsmith/bluray,pingflood/libbluray,tourettes/libbluray,Azzuro/libbluray,ace20022/libbluray,ace20022/libbluray,vlc-mirror/libbluray,UIKit0/libbluray,Azzuro/libbluray,EdwardNewK/libbluray,vlc-mirror/libbluray,EdwardNewK/libbluray,pingflood/libbluray,pingflood/libbluray | /*
* This file is part of libbluray
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see
* <http://www.gnu.org/licenses/>.
*/
package org.videolan.media.content.playlist;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Rectangle;
import org.dvb.media.VideoPresentationControl;
import org.havi.ui.HScreen;
import org.havi.ui.HScreenRectangle;
import org.havi.ui.HVideoConfiguration;
import org.videolan.StreamInfo;
public abstract class VideoControl extends StreamControl implements VideoPresentationControl {
protected VideoControl(Handler player, int plane) {
super(player);
this.plane = plane;
}
protected HScreenRectangle getNormalizedRectangle(Dimension dimension, Rectangle rectangle) {
if ((dimension.width == 0) || (dimension.height == 0))
return new HScreenRectangle(0, 0, 0, 0);
float x = rectangle.x / dimension.width;
float y = rectangle.y / dimension.height;
float w = rectangle.width / dimension.width;
float h = rectangle.height / dimension.height;
return new HScreenRectangle(x, y, w, h);
}
protected Rectangle getRectangle(Dimension dimension, HScreenRectangle rectangle) {
int x = (int)(rectangle.x * dimension.width);
int y = (int)(rectangle.y * dimension.height);
int w = (int)(rectangle.width * dimension.width);
int h = (int)(rectangle.height * dimension.height);
return new Rectangle(x, y, w, h);
}
protected float getPositionOnScreen(float pos) {
if (pos < 0.0f)
return 0.0f;
if (pos > 1.0f)
return 1.0f;
return pos;
}
protected HScreenRectangle getRectangleOnScreen(HScreenRectangle rectangle) {
float x1 = getPositionOnScreen(rectangle.x);
float y1 = getPositionOnScreen(rectangle.y);
float x2 = getPositionOnScreen(rectangle.x + rectangle.width);
float y2 = getPositionOnScreen(rectangle.y + rectangle.height);
return new HScreenRectangle(x1, y1, x2 - x1, y2 - y1);
}
protected Dimension getScreenSize() {
HVideoConfiguration hvc = HScreen.getDefaultHScreen().getDefaultHVideoDevice().getCurrentConfiguration();
return hvc.getPixelResolution();
}
protected void setVideoArea(HScreenRectangle rectangle) {
dstArea = rectangle;
// TODO
}
public Dimension getInputVideoSize() {
StreamInfo stream = getCurrentStream();
if (stream == null)
return new Dimension(0, 0);
return stream.getVideoSize();
}
public Dimension getVideoSize() {
Rectangle dr = getRectangle(getScreenSize(), dstArea);
return new Dimension(dr.width, dr.height);
}
public HScreenRectangle getActiveVideoArea() {
return new HScreenRectangle(dstArea.x, dstArea.y, dstArea.width, dstArea.height);
}
public HScreenRectangle getActiveVideoAreaOnScreen() {
return getRectangleOnScreen(dstArea);
}
public HScreenRectangle getTotalVideoArea() {
return getActiveVideoArea();
}
public HScreenRectangle getTotalVideoAreaOnScreen() {
return getActiveVideoAreaOnScreen();
}
public boolean supportsClipping() {
return true;
}
public Rectangle getClipRegion() {
return getRectangle(getVideoSize(), srcArea);
}
public Rectangle setClipRegion(Rectangle clipRect) {
Dimension vd = getInputVideoSize();
if ((vd.width == 0) || (vd.height == 0))
return new Rectangle(0, 0);
srcArea = getRectangleOnScreen(getNormalizedRectangle(vd, clipRect));
//TODO
org.videolan.Logger.unimplemented("VideoControl", "setClipRegion");
return getRectangle(vd, srcArea);
}
public float[] supportsArbitraryHorizontalScaling() {
return new float[] { 0.001f, 4.0f };
}
public float[] supportsArbitraryVerticalScaling() {
return new float[] { 0.001f, 4.0f };
}
public float[] getHorizontalScalingFactors() {
throw new Error("Not implemented"); // TODO implement
}
public float[] getVerticalScalingFactors() {
throw new Error("Not implemented"); // TODO implement
}
public byte getPositioningCapability() {
return POS_CAP_FULL;
}
public Component getControlComponent() {
throw new Error("Not implemented"); // TODO implement
}
private int plane = 0;
private HScreenRectangle srcArea = new HScreenRectangle(0.0f, 0.0f, 1.0f, 1.0f);
private HScreenRectangle dstArea = new HScreenRectangle(0.0f, 0.0f, 1.0f, 1.0f);
}
| src/libbluray/bdj/java/org/videolan/media/content/playlist/VideoControl.java | /*
* This file is part of libbluray
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see
* <http://www.gnu.org/licenses/>.
*/
package org.videolan.media.content.playlist;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Rectangle;
import org.dvb.media.VideoPresentationControl;
import org.havi.ui.HScreen;
import org.havi.ui.HScreenRectangle;
import org.havi.ui.HVideoConfiguration;
import org.videolan.StreamInfo;
public abstract class VideoControl extends StreamControl implements VideoPresentationControl {
protected VideoControl(Handler player, int plane) {
super(player);
this.plane = plane;
}
protected HScreenRectangle getNormalizedRectangle(Dimension dimension, Rectangle rectangle) {
if ((dimension.width == 0) || (dimension.height == 0))
return new HScreenRectangle(0, 0, 0, 0);
float x = rectangle.x / dimension.width;
float y = rectangle.y / dimension.height;
float w = rectangle.width / dimension.width;
float h = rectangle.height / dimension.height;
return new HScreenRectangle(x, y, w, h);
}
protected Rectangle getRectangle(Dimension dimension, HScreenRectangle rectangle) {
int x = (int)(rectangle.x * dimension.width);
int y = (int)(rectangle.y * dimension.height);
int w = (int)(rectangle.width * dimension.width);
int h = (int)(rectangle.height * dimension.height);
return new Rectangle(x, y, w, h);
}
protected float getPositionOnScreen(float pos) {
if (pos < 0.0f)
return 0.0f;
if (pos > 1.0f)
return 1.0f;
return pos;
}
protected HScreenRectangle getRectangleOnScreen(HScreenRectangle rectangle) {
float x1 = getPositionOnScreen(rectangle.x);
float y1 = getPositionOnScreen(rectangle.y);
float x2 = getPositionOnScreen(rectangle.x + rectangle.width);
float y2 = getPositionOnScreen(rectangle.y + rectangle.height);
return new HScreenRectangle(x1, y1, x2 - x1, y2 - y1);
}
protected Dimension getScreenSize() {
HVideoConfiguration hvc = HScreen.getDefaultHScreen().getDefaultHVideoDevice().getCurrentConfiguration();
return hvc.getPixelResolution();
}
protected void setVideoArea(HScreenRectangle rectangle) {
dstArea = rectangle;
// TODO
}
public Dimension getInputVideoSize() {
StreamInfo stream = getCurrentStream();
if (stream == null)
return new Dimension(0, 0);
return stream.getVideoSize();
}
public Dimension getVideoSize() {
Rectangle dr = getRectangle(getScreenSize(), dstArea);
return new Dimension(dr.width, dr.height);
}
public HScreenRectangle getActiveVideoArea() {
return new HScreenRectangle(dstArea.x, dstArea.y, dstArea.width, dstArea.height);
}
public HScreenRectangle getActiveVideoAreaOnScreen() {
return getRectangleOnScreen(dstArea);
}
public HScreenRectangle getTotalVideoArea() {
return getActiveVideoArea();
}
public HScreenRectangle getTotalVideoAreaOnScreen() {
return getActiveVideoAreaOnScreen();
}
public boolean supportsClipping() {
return true;
}
public Rectangle getClipRegion() {
return getRectangle(getVideoSize(), srcArea);
}
public Rectangle setClipRegion(Rectangle clipRect) {
Dimension vd = getInputVideoSize();
if ((vd.width == 0) || (vd.height == 0))
return new Rectangle(0, 0);
srcArea = getRectangleOnScreen(getNormalizedRectangle(vd, clipRect));
//TODO
throw new Error("Not implemented"); // TODO implement
//return getRectangle(vd, srcArea);
}
public float[] supportsArbitraryHorizontalScaling() {
return new float[] { 0.001f, 4.0f };
}
public float[] supportsArbitraryVerticalScaling() {
return new float[] { 0.001f, 4.0f };
}
public float[] getHorizontalScalingFactors() {
throw new Error("Not implemented"); // TODO implement
}
public float[] getVerticalScalingFactors() {
throw new Error("Not implemented"); // TODO implement
}
public byte getPositioningCapability() {
return POS_CAP_FULL;
}
public Component getControlComponent() {
throw new Error("Not implemented"); // TODO implement
}
private int plane = 0;
private HScreenRectangle srcArea = new HScreenRectangle(0.0f, 0.0f, 1.0f, 1.0f);
private HScreenRectangle dstArea = new HScreenRectangle(0.0f, 0.0f, 1.0f, 1.0f);
}
| Fixed handling of unimplemented feature in VideoControl.setClipRegion()
| src/libbluray/bdj/java/org/videolan/media/content/playlist/VideoControl.java | Fixed handling of unimplemented feature in VideoControl.setClipRegion() |
|
Java | lgpl-2.1 | 6dcf9fa17c37e18f91824311c9544c76e7bf4f48 | 0 | CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine | /*
* Copyright (C) 2002-2004 David Pavlis <[email protected]>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.jetel.interpreter;
import java.math.BigDecimal;
import java.util.Calendar;
import java.util.Date;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jetel.data.DataField;
import org.jetel.data.DataRecord;
import org.jetel.data.primitive.CloverDouble;
import org.jetel.data.primitive.CloverInteger;
import org.jetel.data.primitive.CloverLong;
import org.jetel.data.primitive.DecimalFactory;
import org.jetel.data.primitive.HugeDecimal;
import org.jetel.data.primitive.Numeric;
import org.jetel.exception.BadDataFormatException;
import org.jetel.interpreter.node.*;
import org.jetel.metadata.DataFieldMetadata;
import org.jetel.util.Compare;
import org.jetel.util.StringUtils;
/**
* Executor of FilterExpression parse tree.
*
* @author dpavlis
* @since 16.9.2004
*
* Executor of FilterExpression parse tree
*/
public class TransformLangExecutor implements TransformLangParserVisitor,
TransformLangParserConstants{
public static final int BREAK_BREAK=1;
public static final int BREAK_CONTINUE=2;
public static final int BREAK_RETURN=3;
protected Stack stack;
protected boolean breakFlag;
protected int breakType;
protected Properties globalParameters;
protected DataRecord[] inputRecords;
protected DataRecord[] outputRecords;
protected Node emptyNode; // used as replacement for empty statements
static Log logger = LogFactory.getLog(TransformLangExecutor.class);
/**
* Constructor
*/
public TransformLangExecutor(Properties globalParameters) {
stack = new Stack();
breakFlag = false;
this.globalParameters=globalParameters;
emptyNode = new SimpleNode(Integer.MAX_VALUE);
}
public TransformLangExecutor() {
this(null);
}
/**
* Set input data records for processing.<br>
* Referenced input data fields will be resolved from
* these data records.
*
* @param inputRecords array of input data records carrying values
*/
public void setInputRecords(DataRecord[] inputRecords){
this.inputRecords=inputRecords;
}
/**
* Set output data records for processing.<br>
* Referenced output data fields will be resolved from
* these data records - assigment (in code) to output data field
* will result in assigment to one of these data records.
*
* @param outputRecords array of output data records for setting values
*/
public void setOutputRecords(DataRecord[] outputRecords){
this.outputRecords=outputRecords;
}
/**
* Set global parameters which may be reference from within the
* transformation source code
*
* @param parameters
*/
public void setGlobalParameters(Properties parameters){
this.globalParameters=parameters;
}
/**
* Method which returns result of executing parse tree.<br>
* Basically, it returns whatever object was left on top of executor's
* stack.
*
* @return
*/
public Object getResult() {
return stack.pop();
}
public Object getGlobalVariable(int varSlot){
return stack.getGlobalVar(varSlot);
}
public void setGlobalVariable(int varSlot,Object value){
stack.storeGlobalVar(varSlot,value);
}
/* *********************************************************** */
/* implementation of visit methods for each class of AST node */
/* *********************************************************** */
/* it seems to be necessary to define a visit() method for SimpleNode */
public Object visit(SimpleNode node, Object data) {
// throw new TransformLangExecutorRuntimeException(node,
// "Error: Call to visit for SimpleNode");
return data;
}
public Object visit(CLVFStart node, Object data) {
int i, k = node.jjtGetNumChildren();
for (i = 0; i < k; i++)
node.jjtGetChild(i).jjtAccept(this, data);
return data; // this value is ignored in this example
}
public Object visit(CLVFStartExpression node, Object data) {
int i, k = node.jjtGetNumChildren();
for (i = 0; i < k; i++)
node.jjtGetChild(i).jjtAccept(this, data);
return data; // this value is ignored in this example
}
public Object visit(CLVFOr node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a=stack.pop();
if (! (a instanceof Boolean)){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}else{
if (((Boolean)a).booleanValue()){
stack.push(Stack.TRUE_VAL);
return data;
}
}
node.jjtGetChild(1).jjtAccept(this, data);
a=stack.pop();
if (! (a instanceof Boolean)){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}
if (((Boolean) a).booleanValue()) {
stack.push(Stack.TRUE_VAL);
} else {
stack.push(Stack.FALSE_VAL);
}
return data;
}
public Object visit(CLVFAnd node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a=stack.pop();
if (! (a instanceof Boolean)){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}else{
if (!((Boolean)a).booleanValue()){
stack.push(Stack.FALSE_VAL);
return data;
}
}
node.jjtGetChild(1).jjtAccept(this, data);
a=stack.pop();
if (! (a instanceof Boolean)){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}
if (!((Boolean)a).booleanValue()) {
stack.push(Stack.FALSE_VAL);
return data;
} else {
stack.push(Stack.TRUE_VAL);
return data;
}
}
public Object visit(CLVFComparison node, Object data) {
int cmpResult = 2;
boolean lValue = false;
// special handling for Regular expression
if (node.cmpType == REGEX_EQUAL) {
node.jjtGetChild(0).jjtAccept(this, data);
Object field1 = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object field2 = stack.pop();
if (field1 instanceof CharSequence && field2 instanceof Matcher) {
Matcher regex = (Matcher) field2;
regex.reset(((CharSequence) field1));
if (regex.matches()) {
lValue = true;
} else {
lValue = false;
}
} else {
Object[] arguments = { field1, field2 };
throw new TransformLangExecutorRuntimeException(node,arguments,
"regex equal - wrong type of literal(s)");
}
// other types of comparison
} else {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
try {
if (a instanceof Numeric && b instanceof Numeric) {
cmpResult = ((Numeric) a).compareTo((Numeric) b);
/*
* }else if (a instanceof Number && b instanceof Number){
* cmpResult=Compare.compare((Number)a,(Number)b);
*/
} else if (a instanceof Date && b instanceof Date) {
cmpResult = ((Date) a).compareTo((Date) b);
} else if (a instanceof CharSequence
&& b instanceof CharSequence) {
cmpResult = Compare.compare((CharSequence) a,
(CharSequence) b);
} else if (a instanceof Boolean && b instanceof Boolean) {
if (node.cmpType==EQUAL || node.cmpType==NON_EQUAL){
cmpResult = ((Boolean) a).equals(b) ? 0 : -1;
}else{
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"compare - unsupported cmparison operator ["+tokenImage[node.cmpType]+"] for literals/expressions");
}
} else {
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"compare - incompatible literals/expressions");
}
} catch (ClassCastException ex) {
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"compare - incompatible literals/expressions");
}
switch (node.cmpType) {
case EQUAL:
if (cmpResult == 0) {
lValue = true;
}
break;// equal
case LESS_THAN:
if (cmpResult == -1) {
lValue = true;
}
break;// less than
case GREATER_THAN:
if (cmpResult == 1) {
lValue = true;
}
break;// grater than
case LESS_THAN_EQUAL:
if (cmpResult <= 0) {
lValue = true;
}
break;// less than equal
case GREATER_THAN_EQUAL:
if (cmpResult >= 0) {
lValue = true;
}
break;// greater than equal
case NON_EQUAL:
if (cmpResult != 0) {
lValue = true;
}
break;
default:
// this should never happen !!!
logger.fatal("Internal error: Unsupported comparison operator !");
throw new RuntimeException("Internal error - Unsupported comparison operator !");
}
}
stack.push(lValue ? Stack.TRUE_VAL : Stack.FALSE_VAL);
return data;
}
public Object visit(CLVFAddNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
if (a == null || b == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"add - NULL value not allowed");
}
// if (!(b instanceof Numeric || b instanceof CharSequence)) {
// throw new TransformLangExecutorRuntimeException(node, new Object[] { b },
// "add - wrong type of literal");
// }
try {
if (a instanceof Numeric && b instanceof Numeric) {
Numeric result = ((Numeric) a).duplicateNumeric();
result.add((Numeric) b);
stack.push(result);
} else if (a instanceof Date && b instanceof Numeric) {
Calendar result = Calendar.getInstance();
result.setTime((Date) a);
result.add(Calendar.DATE, ((Numeric) b).getInt());
stack.push(result.getTime());
} else if (a instanceof CharSequence) {
CharSequence a1 = (CharSequence) a;
StringBuffer buf=new StringBuffer(a1.length()*2);
StringUtils.strBuffAppend(buf,a1);
if (b instanceof CharSequence) {
StringUtils.strBuffAppend(buf,(CharSequence)b);
} else {
buf.append(b);
}
stack.push(buf);
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"add - wrong type of literal(s)");
}
} catch (ClassCastException ex) {
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"add - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFSubNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
if (a == null || b == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"sub - NULL value not allowed");
}
if (!(b instanceof Numeric)) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { b },
"sub - wrong type of literal");
}
if (a instanceof Numeric) {
Numeric result = ((Numeric) a).duplicateNumeric();
result.sub((Numeric) b);
stack.push(result);
} else if (a instanceof Date) {
Calendar result = Calendar.getInstance();
result.setTime((Date) a);
result.add(Calendar.DATE, ((Numeric) b).getInt() * -1);
stack.push(result.getTime());
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"sub - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFMulNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
if (a == null || b == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"mul - NULL value not allowed");
}
if (!(b instanceof Numeric)) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { b },
"mul - wrong type of literal");
}
if (a instanceof Numeric) {
Numeric result = ((Numeric) a).duplicateNumeric();
result.mul((Numeric) b);
stack.push(result);
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"mul - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFDivNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
if (a == null || b == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"div - NULL value not allowed");
}
if (!(b instanceof Numeric)) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { b },
"div - wrong type of literal");
}
if (a instanceof Numeric) {
Numeric result = ((Numeric) a).duplicateNumeric();
try{
result.div((Numeric) b);
}catch(ArithmeticException ex){
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,"div - aritmetic exception - "+ex.getMessage());
}
stack.push(result);
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"div - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFModNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
if (a == null || b == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"mod - NULL value not allowed");
}
if (!(b instanceof Numeric)) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { b },
"mod - wrong type of literal");
}
if (a instanceof Numeric) {
Numeric result = ((Numeric) a).duplicateNumeric();
result.mod((Numeric) b);
stack.push(result);
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"mod - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFNegation node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object value = stack.pop();
if (value instanceof Boolean) {
stack.push(((Boolean) value).booleanValue() ? Stack.FALSE_VAL
: Stack.TRUE_VAL);
} else {
throw new TransformLangExecutorRuntimeException(node, new Object[] { value },
"logical condition does not evaluate to BOOLEAN value");
}
return data;
}
public Object visit(CLVFSubStrNode node, Object data) {
int length, from;
node.jjtGetChild(0).jjtAccept(this, data);
Object str = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object fromO = stack.pop();
node.jjtGetChild(2).jjtAccept(this, data);
Object lengthO = stack.pop();
if (lengthO == null || fromO == null || str == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { lengthO,
fromO, str }, "substring - NULL value not allowed");
}
try {
length = ((Numeric) lengthO).getInt();
from = ((Numeric) fromO).getInt();
} catch (Exception ex) {
Object arguments[] = { lengthO, fromO, str };
throw new TransformLangExecutorRuntimeException(node,arguments, "substring - "
+ ex.getMessage());
}
if (str instanceof CharSequence) {
stack.push(((CharSequence) str).subSequence(from, from + length));
} else {
Object[] arguments = { lengthO, fromO, str };
throw new TransformLangExecutorRuntimeException(node,arguments,
"substring - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFUppercaseNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof CharSequence) {
CharSequence seq = (CharSequence) a;
node.strBuf.setLength(0);
node.strBuf.ensureCapacity(seq.length());
for (int i = 0; i < seq.length(); i++) {
node.strBuf.append(Character.toUpperCase(seq.charAt(i)));
}
stack.push(node.strBuf);
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"uppercase - wrong type of literal");
}
return data;
}
public Object visit(CLVFLowercaseNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof CharSequence) {
CharSequence seq = (CharSequence) a;
node.strBuf.setLength(0);
node.strBuf.ensureCapacity(seq.length());
for (int i = 0; i < seq.length(); i++) {
node.strBuf.append(Character.toLowerCase(seq.charAt(i)));
}
stack.push(node.strBuf);
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"lowercase - wrong type of literal");
}
return data;
}
public Object visit(CLVFTrimNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
int start, end;
if (a instanceof CharSequence) {
CharSequence seq = (CharSequence) a;
int length = seq.length();
for (start = 0; start < length; start++) {
if (seq.charAt(start) != ' ' && seq.charAt(start) != '\t') {
break;
}
}
for (end = length - 1; end >= 0; end--) {
if (seq.charAt(end) != ' ' && seq.charAt(end) != '\t') {
break;
}
}
if (start > end)
stack.push("");
else
stack.push(seq.subSequence(start, end + 1));
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"trim - wrong type of literal");
}
return data;
}
public Object visit(CLVFLengthNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof CharSequence) {
stack.push(new CloverInteger(((CharSequence) a).length()));
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"lenght - wrong type of literal");
}
return data;
}
public Object visit(CLVFTodayNode node, Object data) {
stack.push(stack.calendar.getTime() );
return data;
}
public Object visit(CLVFIsNullNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object value = stack.pop();
if (value == null) {
stack.push(Stack.TRUE_VAL);
} else {
stack.push(Stack.FALSE_VAL);
}
return data;
}
public Object visit(CLVFNVLNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object value = stack.pop();
if (value == null) {
node.jjtGetChild(1).jjtAccept(this, data);
// not necessary: stack.push(stack.pop());
} else {
stack.push(value);
}
return data;
}
public Object visit(CLVFLiteral node, Object data) {
stack.push(node.value);
return data;
}
public Object visit(CLVFInputFieldLiteral node, Object data) {
DataField field=inputRecords[node.recordNo].getField(node.fieldNo);
if (field instanceof Numeric){
stack.push(((Numeric)field).duplicateNumeric());
}else{
stack.push(field.getValue());
}
// old stack.push(inputRecords[node.recordNo].getField(node.fieldNo).getValue());
// we return reference to DataField so we can
// perform extra checking in special cases
return node.field;
}
public Object visit(CLVFOutputFieldLiteral node, Object data) {
//stack.push(inputRecords[node.recordNo].getField(node.fieldNo));
// we return reference to DataField so we can
// perform extra checking in special cases
return data;
}
public Object visit(CLVFGlobalParameterLiteral node, Object data) {
stack.push(globalParameters!=null ? globalParameters.getProperty(node.name) : null);
return data;
}
public Object visit(CLVFRegexLiteral node, Object data) {
stack.push(node.matcher);
return data;
}
public Object visit(CLVFConcatNode node, Object data) {
Object a;
StringBuffer strBuf = new StringBuffer(40);
int numChildren = node.jjtGetNumChildren();
for (int i = 0; i < numChildren; i++) {
node.jjtGetChild(i).jjtAccept(this, data);
a = stack.pop();
if (a instanceof CharSequence) {
StringUtils.strBuffAppend(strBuf,(CharSequence) a);
} else {
if (a != null) {
strBuf.append(a);
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"concat - wrong type of literal(s)");
}
}
}
stack.push(strBuf);
return data;
}
public Object visit(CLVFDateAddNode node, Object data) {
int shiftAmount;
node.jjtGetChild(0).jjtAccept(this, data);
Object date = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object amount = stack.pop();
try {
shiftAmount = ((Numeric) amount).getInt();
} catch (Exception ex) {
Object arguments[] = { amount };
throw new TransformLangExecutorRuntimeException(node,arguments, "dateadd - "
+ ex.getMessage());
}
if (date instanceof Date) {
node.calendar.setTime((Date) date);
node.calendar.add(node.calendarField, shiftAmount);
stack.push(node.calendar.getTime());
} else {
Object arguments[] = { date };
throw new TransformLangExecutorRuntimeException(node,arguments,
"dateadd - no Date expression");
}
return data;
}
public Object visit(CLVFDate2NumNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object date = stack.pop();
if (date instanceof Date) {
node.calendar.setTime((Date) date);
stack.push(new CloverInteger(node.calendar.get(node.calendarField)));
} else {
Object arguments[] = { date };
throw new TransformLangExecutorRuntimeException(node,arguments,
"date2num - no Date expression");
}
return data;
}
public Object visit(CLVFDateDiffNode node, Object data) {
Object date1, date2;
node.jjtGetChild(0).jjtAccept(this, data);
date1 = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
date2 = stack.pop();
if (date1 instanceof Date && date2 instanceof Date) {
long diffSec = (((Date) date1).getTime() - ((Date) date2).getTime()) / 1000;
int diff = 0;
switch (node.calendarField) {
case Calendar.SECOND:
// we have the difference in seconds
diff = (int) diffSec;
break;
case Calendar.MINUTE:
// how many minutes'
diff = (int) diffSec / 60;
break;
case Calendar.HOUR_OF_DAY:
diff = (int) diffSec / 3600;
break;
case Calendar.DAY_OF_MONTH:
// how many days is the difference
diff = (int) diffSec / 86400;
break;
case Calendar.WEEK_OF_YEAR:
// how many weeks
diff = (int) diffSec / 604800;
break;
case Calendar.MONTH:
node.start.setTime((Date) date1);
node.end.setTime((Date) date2);
diff = (node.start.get(Calendar.MONTH) + node.start
.get(Calendar.YEAR) * 12)
- (node.end.get(Calendar.MONTH) + node.end
.get(Calendar.YEAR) * 12);
break;
case Calendar.YEAR:
node.start.setTime((Date) date1);
node.end.setTime((Date) date2);
diff = node.start.get(node.calendarField)
- node.end.get(node.calendarField);
break;
default:
Object arguments[] = { new Integer(node.calendarField) };
throw new TransformLangExecutorRuntimeException(node,arguments,
"datediff - wrong difference unit");
}
stack.push(new CloverInteger(diff));
} else {
Object arguments[] = { date1, date2 };
throw new TransformLangExecutorRuntimeException(node,arguments,
"datediff - no Date expression");
}
return data;
}
public Object visit(CLVFMinusNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object value = stack.pop();
if (value instanceof Numeric) {
Numeric result = ((Numeric) value).duplicateNumeric();
result.mul(Stack.NUM_MINUS_ONE);
stack.push(result);
} else {
Object arguments[] = { value };
throw new TransformLangExecutorRuntimeException(node,arguments,
"minus - not a number");
}
return data;
}
public Object visit(CLVFReplaceNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object str = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object regexO = stack.pop();
node.jjtGetChild(2).jjtAccept(this, data);
Object withO = stack.pop();
if (withO == null || regexO == null || str == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { withO,
regexO, str }, "NULL value not allowed");
}
if (str instanceof CharSequence && withO instanceof CharSequence
&& regexO instanceof CharSequence) {
if (node.pattern == null || !node.stored.equals(regexO)) {
node.pattern = Pattern.compile(((CharSequence) regexO)
.toString());
node.matcher = node.pattern.matcher((CharSequence) str);
node.stored = regexO;
} else {
node.matcher.reset((CharSequence) str);
}
stack.push(node.matcher.replaceAll(((CharSequence) withO)
.toString()));
} else {
Object[] arguments = { withO, regexO, str };
throw new TransformLangExecutorRuntimeException(node,arguments,
"replace - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFNum2StrNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Numeric) {
if (node.radix == 10) {
stack.push(((Numeric) a).toString());
} else {
if (a instanceof CloverInteger) {
stack.push(Integer.toString(((CloverInteger) a).getInt(),
node.radix));
} else if (a instanceof CloverLong) {
stack.push(Long.toString(((CloverLong) a).getLong(),
node.radix));
} else if (a instanceof CloverDouble && node.radix == 16) {
stack.push(Double.toHexString(((CloverDouble) a)
.getDouble()));
} else {
Object[] arguments = { a, new Integer(node.radix) };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"num2str - can't convert number to string using specified radix");
}
}
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node, arguments,
"num2str - wrong type of literal");
}
return data;
}
public Object visit(CLVFStr2NumNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof CharSequence) {
try {
Object value = null;
switch (node.numType) {
case INT_VAR:
value = new CloverInteger(Integer.parseInt(
((CharSequence) a).toString(), node.radix));
break;
case LONG_VAR:
value = new CloverLong(Long.parseLong(((CharSequence) a)
.toString(), node.radix));
break;
case DECIMAL_VAR:
if (node.radix == 10) {
value = DecimalFactory.getDecimal(((CharSequence) a)
.toString());
} else {
Object[] arguments = { a, new Integer(node.radix) };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"str2num - can't convert string to decimal number using specified radix");
}
break;
default:
// get double/number type
switch (node.radix) {
case 10:
case 16:
value = new CloverDouble(Double
.parseDouble(((CharSequence) a).toString()));
break;
default:
Object[] arguments = { a, new Integer(node.radix) };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"str2num - can't convert string to number/double number using specified radix");
}
}
stack.push(value);
} catch (NumberFormatException ex) {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,
arguments, "str2num - can't convert \"" + a + "\"");
}
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node, arguments,
"str2num - wrong type of literal");
}
return data;
}
public Object visit(CLVFDate2StrNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Date) {
stack.push(node.dateFormat.format((Date)a));
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"date2str - wrong type of literal");
}
return data;
}
public Object visit(CLVFStr2DateNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof CharSequence) {
try {
stack.push(node.dateFormat.parse(((CharSequence)a).toString()));
} catch (java.text.ParseException ex) {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"str2date - can't convert \"" + a + "\"");
}
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"str2date - wrong type of literal");
}
return data;
}
public Object visit(CLVFIffNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object condition = stack.pop();
if (condition instanceof Boolean) {
if (((Boolean) condition).booleanValue()) {
node.jjtGetChild(1).jjtAccept(this, data);
} else {
node.jjtGetChild(2).jjtAccept(this, data);
}
stack.push(stack.pop());
} else {
Object[] arguments = { condition };
throw new TransformLangExecutorRuntimeException(node,arguments,
"iif - condition does not evaluate to BOOLEAN value");
}
return data;
}
public Object visit(CLVFPrintErrNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
System.err.println(a != null ? a : "<null>");
// stack.push(Stack.TRUE_VAL);
return data;
}
public Object visit(CLVFPrintStackNode node, Object data) {
for (int i=stack.top;i>=0;i--){
System.err.println("["+i+"] : "+stack.stack[i]);
}
return data;
}
/***************************************************************************
* Transformation Language executor starts here.
**************************************************************************/
public Object visit(CLVFForStatement node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data); // set up of the loop
boolean condition = false;
Node loopCondition = node.jjtGetChild(1);
Node increment = node.jjtGetChild(2);
Node body;
try{
body=node.jjtGetChild(3);
}catch(ArrayIndexOutOfBoundsException ex){
body=emptyNode;
}
try {
loopCondition.jjtAccept(this, data); // evaluate the condition
condition = ((Boolean) stack.pop()).booleanValue();
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
// loop execution
while (condition) {
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) {
return data;
}
}
increment.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// evaluate the condition
loopCondition.jjtAccept(this, data);
try {
condition = ((Boolean) stack.pop()).booleanValue();
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
}
}
return data;
}
public Object visit(CLVFWhileStatement node, Object data) {
boolean condition = false;
Node loopCondition = node.jjtGetChild(0);
Node body;
try{
body=node.jjtGetChild(1);
}catch(ArrayIndexOutOfBoundsException ex){
body=emptyNode;
}
try {
loopCondition.jjtAccept(this, data); // evaluate the condition
condition = ((Boolean) stack.pop()).booleanValue();
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
// loop execution
while (condition) {
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) return data;
}
// evaluate the condition
loopCondition.jjtAccept(this, data);
try {
condition = ((Boolean) stack.pop()).booleanValue();
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
}
}
return data;
}
public Object visit(CLVFIfStatement node, Object data) {
boolean condition = false;
try {
node.jjtGetChild(0).jjtAccept(this, data); // evaluate the
// condition
condition = ((Boolean) stack.pop()).booleanValue();
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"condition does not evaluate to BOOLEAN value");
} catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
// first if
if (condition) {
node.jjtGetChild(1).jjtAccept(this, data);
//TODO: stack.pop(); -problem with return statement!!! // in case there is anything on top of stack
} else { // if else part exists
if (node.jjtGetNumChildren() > 2) {
node.jjtGetChild(2).jjtAccept(this, data);
//TODO: stack.pop(); // in case there is anything on top of stack
}
}
return data;
}
public Object visit(CLVFDoStatement node, Object data) {
boolean condition = false;
Node loopCondition = node.jjtGetChild(1);
Node body = node.jjtGetChild(0);
// loop execution
do {
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) return data;
}
// evaluate the condition
loopCondition.jjtAccept(this, data);
try {
condition = ((Boolean) stack.pop()).booleanValue();
}catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
} while (condition);
return data;
}
public Object visit(CLVFSwitchStatement node, Object data) {
// get value of switch && push/leave it on stack
boolean match=false;
node.jjtGetChild(0).jjtAccept(this, data);
Object switchVal=stack.pop();
int numChildren = node.jjtGetNumChildren();
int numCases = node.hasDefaultClause ? numChildren-1 : numChildren;
// loop over remaining case statements
for (int i = 1; i < numCases; i++) {
stack.push(switchVal);
if (node.jjtGetChild(i).jjtAccept(this, data)==Stack.TRUE_VAL){
match=true;
}
if (breakFlag) {
if (breakType == BREAK_BREAK) {
breakFlag = false;
}
break;
}
}
// test whether execute default branch
if (node.hasDefaultClause && !match){
node.jjtGetChild(numChildren-1).jjtAccept(this, data);
}
return data;
}
public Object visit(CLVFCaseExpression node, Object data) {
// test if literal (as child 0) is equal to data on stack
// if so, execute block (child 1)
boolean match = false;
Object switchVal = stack.pop();
node.jjtGetChild(0).jjtAccept(this, data);
Object value = stack.pop();
try {
if (switchVal instanceof Numeric) {
match = (((Numeric) value).compareTo((Numeric) switchVal) == 0);
} else if (switchVal instanceof CharSequence) {
match = (Compare.compare((CharSequence) switchVal,
(CharSequence) value) == 0);
} else if (switchVal instanceof Date) {
match = (((Date) switchVal).compareTo((Date) value) == 0);
} else if (switchVal instanceof Boolean) {
match = ((Boolean) switchVal).equals((Boolean) value);
}
} catch (ClassCastException ex) {
Object[] args=new Object[] {switchVal,value};
throw new TransformLangExecutorRuntimeException(node,args,"incompatible literals in case clause");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid case value");
}
if (match){
node.jjtGetChild(1).jjtAccept(this, data);
return Stack.TRUE_VAL;
}else{
return Stack.FALSE_VAL;
}
}
public Object visit(CLVFPlusPlusNode node, Object data) {
Node childNode = node.jjtGetChild(0);
if (childNode instanceof CLVFVariableLiteral) {
CLVFVariableLiteral varNode=(CLVFVariableLiteral) childNode;
Object var=stack.getVar(varNode.localVar, varNode.varSlot);
if (var instanceof Numeric){
((Numeric)var).add(Stack.NUM_ONE);
stack.push(((Numeric)var).duplicateNumeric());
}else if (var instanceof Date){
stack.calendar.setTime((Date)var);
stack.calendar.add(Calendar.DATE, 1);
stack.push(stack.calendar.getTime());
}else{
throw new TransformLangExecutorRuntimeException(node,"variable is not of numeric or date type");
}
} else {
childNode.jjtAccept(this, data);
try {
Numeric num = ((Numeric) stack.pop()).duplicateNumeric();
num.add(Stack.NUM_ONE);
stack.push(num);
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"expression is not of numeric type");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid numeric expression");
}
}
return data;
}
public Object visit(CLVFMinusMinusNode node, Object data) {
Node childNode = node.jjtGetChild(0);
if (childNode instanceof CLVFVariableLiteral) {
CLVFVariableLiteral varNode=(CLVFVariableLiteral) childNode;
Object var=stack.getVar(varNode.localVar, varNode.varSlot);
if (var instanceof Numeric){
((Numeric)var).sub(Stack.NUM_ONE);
stack.push(((Numeric)var).duplicateNumeric());
}else if (var instanceof Date){
stack.calendar.setTime((Date)var);
stack.calendar.add(Calendar.DATE, 1);
stack.push(stack.calendar.getTime());
}else{
throw new TransformLangExecutorRuntimeException(node,"variable is not of numeric or date type");
}
} else {
childNode.jjtAccept(this, data);
try {
Numeric num = ((Numeric) stack.pop()).duplicateNumeric();
num.add(Stack.NUM_ONE);
stack.push(num);
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"expression is not of numeric type");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid numeric expression");
}
}
return data;
}
public Object visit(CLVFBlock node, Object data) {
int childern = node.jjtGetNumChildren();
for (int i = 0; i < childern; i++) {
node.jjtGetChild(i).jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// have we seen contiue/break/return statement ??
if (breakFlag){
return data;
}
}
return data;
}
/*
* Loop & block & function control nodes
*/
public Object visit(CLVFBreakStatement node, Object data) {
breakFlag = true; // we encountered break statement;
breakType=BREAK_BREAK;
return data;
}
public Object visit(CLVFContinueStatement node, Object data) {
breakFlag = true; // we encountered continue statement;
breakType= BREAK_CONTINUE;
return data;
}
public Object visit(CLVFReturnStatement node, Object data) {
if (node.jjtHasChildren()){
node.jjtGetChild(0).jjtAccept(this, data);
}
breakFlag = true;
breakType = BREAK_RETURN;
return data;
}
public Object visit(CLVFBreakpointNode node, Object data) {
// TODO
return data;
}
/*
* Variable declarations
*/
public Object visit(CLVFVarDeclaration node, Object data) {
// test for duplicite declaration - should have been done before
/*if (stack.symtab.containsKey(node.name)) {
throw new TransformLangExecutorRuntimeException(node,
"variable already declared - \"" + node.name + "\"");
}*/
Object value;
// create global/local variable
switch (node.type) {
case INT_VAR:
value= new CloverInteger(0);
break;
case LONG_VAR:
value= new CloverLong(0);
break;
case DOUBLE_VAR:
value= new CloverDouble(0);
break;
case DECIMAL_VAR:
if (node.length>0){
if (node.precision>0){
value = DecimalFactory.getDecimal(node.length,node.precision);
}else{
value = DecimalFactory.getDecimal(node.length,0);
}
}else{
value= DecimalFactory.getDecimal();
}
break;
case STRING_VAR:
value= new StringBuffer();
break;
case DATE_VAR:
value=new Date();
break;
case BOOLEAN_VAR:
value= Stack.FALSE_VAL;
break;
default:
throw new TransformLangExecutorRuntimeException(node,
"variable declaration - "
+ "unknown variable type for variable \""
+ node.name + "\"");
}
stack.storeVar(node.localVar, node.varSlot, value);
return data;
}
public Object visit(CLVFVariableLiteral node, Object data) {
Object var = stack.getVar(node.localVar, node.varSlot);
// variable can be null
stack.push(var);
/*
if (var != null) {
stack.push(var);
} else {
throw new TransformLangExecutorRuntimeException(node, "unknown variable \""
+ node.varName + "\"");
}
*/
return data;
}
public Object visit(CLVFAssignment node, Object data) {
CLVFVariableLiteral childNode=(CLVFVariableLiteral) node.jjtGetChild(0);
Object variable = stack.getVar(childNode.localVar,childNode.varSlot);
node.jjtGetChild(1).jjtAccept(this, data);
Object value = stack.pop();
try {
if (variable instanceof Numeric) {
((Numeric) variable).setValue((Numeric) value);
} else if (variable instanceof StringBuffer) {
StringBuffer var = (StringBuffer) variable;
var.setLength(0);
StringUtils.strBuffAppend(var,(CharSequence) value);
} else if (variable instanceof Boolean) {
stack.storeVar(childNode.localVar,childNode.varSlot, (Boolean)value); // boolean is not updatable - we replace the reference
// stack.put(varName,((Boolean)value).booleanValue() ?
// Stack.TRUE_VAL : Stack.FALSE_VAL);
} else if (variable instanceof Date) {
((Date) variable).setTime(((Date) value).getTime());
} else {
throw new TransformLangExecutorRuntimeException(node,
"unknown variable \"" + childNode.varName + "\"");
}
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + value + "\" to variable \""
+ childNode.varName + "\" - incompatible data types");
} catch (NumberFormatException ex){
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of number \"" + value + "\" to variable \"" + childNode.varName + "\" : "+ex.getMessage());
} catch (TransformLangExecutorRuntimeException ex){
throw ex;
} catch (Exception ex){
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + value + "\" to variable \"" + childNode.varName + "\" : "+ex.getMessage());
}
return data;
}
public Object visit(CLVFMapping node, Object data) {
DataField field=outputRecords[node.recordNo].getField(node.fieldNo);
int arity=node.arity; // how many children we have defined
Object value=null;
try{
// we try till success or no more options
for (int i=0;i<arity;i++){
node.jjtGetChild(i).jjtAccept(this, data);
value=stack.pop();
try{
// TODO: small hack
if (field instanceof Numeric){
((Numeric)field).setValue((Numeric)value);
}else{
field.setValue(value);
}
}catch(BadDataFormatException ex){
if (i == arity)
throw ex;
else
continue;
}catch(Exception ex){
if (i == arity)
throw ex;
else
continue;
}
break; // success during assignment, finish looping
}
}catch(BadDataFormatException ex){
if (!outputRecords[node.recordNo].getField(node.fieldNo).getMetadata().isNullable()){
throw new TransformLangExecutorRuntimeException(node,"can't assign NULL to \"" + node.fieldName + "\"");
}else{
throw new TransformLangExecutorRuntimeException(node,"data format exception when mapping \"" + node.fieldName + "\" - assigning \""
+ value + "\"");
}
}catch(TransformLangExecutorRuntimeException ex){
throw ex;
}catch(Exception ex){
String msg=ex.getMessage();
throw new TransformLangExecutorRuntimeException(node,
(msg!=null ? msg : "") +
" when mapping \"" + node.fieldName + "\" ("+DataFieldMetadata.type2Str(field.getType())
+") - assigning \"" + value + "\" ("+(value!=null ? value.getClass(): "unknown class" )+")");
}
return data;
}
/*
* Declaration & calling of Functions here
*/
public Object visit(CLVFFunctionCallStatement node, Object data) {
//put call parameters on stack
node.childrenAccept(this,data);
CLVFFunctionDeclaration executionNode=node.callNode;
// open call frame
stack.pushFuncCallFrame();
// store call parameters from stack as local variables
for (int i=executionNode.numParams-1;i>=0; stack.storeLocalVar(i--,stack.pop()));
// execute function body
// loop execution
Object returnData;
int numChildren=executionNode.jjtGetNumChildren();
for (int i=0;i<numChildren;i++){
executionNode.jjtGetChild(i).jjtAccept(this,data);
returnData=stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_RETURN){
if (returnData!=null)
stack.push(returnData);
break;
}
}
}
stack.popFuncCallFrame();
return data;
}
public Object visit(CLVFFunctionDeclaration node, Object data) {
return data;
}
public Object visit(CLVFStatementExpression node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
return data;
}
public Object executeFunction(CLVFFunctionDeclaration executionNode, Object[] data) {
//put call parameters on stack
if (data==null){
data=new Object[0];
}
// open call frame
stack.pushFuncCallFrame();
// store call parameters from stack as local variables
for (int i=executionNode.numParams-1;i>=0; stack.storeLocalVar(i--,data[i]));
// execute function body
// loop execution
Object returnData;
int numChildren=executionNode.jjtGetNumChildren();
for (int i=0;i<numChildren;i++){
executionNode.jjtGetChild(i).jjtAccept(this,data);
returnData=stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_RETURN){
if (returnData!=null)
stack.push(returnData);
break;
}
}
}
stack.popFuncCallFrame();
return data;
}
/*
* MATH functions log,log10,exp,pow,sqrt,round
*/
public Object visit(CLVFSqrtNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Numeric) {
try{
stack.push(new CloverDouble(Math.sqrt(((Numeric)a).getDouble()) ));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"Error when executing SQRT function",ex);
}
}else {
Object[] arguments = { a};
throw new TransformLangExecutorRuntimeException(node,arguments,
"sqrt - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFLogNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Numeric) {
try{
stack.push(new CloverDouble(Math.log(((Numeric)a).getDouble()) ));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"Error when executing LOG function",ex);
}
}else {
Object[] arguments = { a};
throw new TransformLangExecutorRuntimeException(node,arguments,
"log - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFLog10Node node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Numeric) {
try{
stack.push(new CloverDouble( Math.log10(((Numeric)a).getDouble())));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"Error when executing LOG10 function",ex);
}
}else {
Object[] arguments = { a};
throw new TransformLangExecutorRuntimeException(node,arguments,
"log10 - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFExpNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Numeric) {
try{
stack.push(new CloverDouble( Math.exp(((Numeric)a).getDouble())));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"Error when executing EXP function",ex);
}
}else {
Object[] arguments = { a};
throw new TransformLangExecutorRuntimeException(node,arguments,
"exp - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFRoundNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Numeric) {
try{
stack.push(new CloverLong(Math.round(((Numeric)a).getDouble())));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"Error when executing ROUND function",ex);
}
}else {
Object[] arguments = { a};
throw new TransformLangExecutorRuntimeException(node,arguments,
"round - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFPowNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
if (a instanceof Numeric && b instanceof Numeric) {
try{
stack.push(new CloverDouble(Math.pow(((Numeric)a).getDouble(),
((Numeric)b).getDouble())));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"Error when executing POW function",ex);
}
}else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"pow - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFPINode node, Object data) {
stack.push(Stack.NUM_PI);
return data;
}
public Object visit(CLVFTruncNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Date ) {
stack.calendar.setTime((Date)a);
stack.calendar.set(Calendar.HOUR_OF_DAY, 0);
stack.calendar.set(Calendar.MINUTE , 0);
stack.calendar.set(Calendar.SECOND , 0);
stack.calendar.set(Calendar.MILLISECOND , 0);
stack.push( stack.calendar.getTime() );
}else if (a instanceof Numeric){
stack.push(new CloverLong(((Numeric)a).getLong()));
}else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"trunc - wrong type of literal(s)");
}
return data;
}
} | cloveretl.engine/src/org/jetel/interpreter/TransformLangExecutor.java | /*
* Copyright (C) 2002-2004 David Pavlis <[email protected]>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.jetel.interpreter;
import java.math.BigDecimal;
import java.util.Calendar;
import java.util.Date;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jetel.data.DataField;
import org.jetel.data.DataRecord;
import org.jetel.data.primitive.CloverDouble;
import org.jetel.data.primitive.CloverInteger;
import org.jetel.data.primitive.CloverLong;
import org.jetel.data.primitive.DecimalFactory;
import org.jetel.data.primitive.HugeDecimal;
import org.jetel.data.primitive.Numeric;
import org.jetel.exception.BadDataFormatException;
import org.jetel.interpreter.node.*;
import org.jetel.metadata.DataFieldMetadata;
import org.jetel.util.Compare;
import org.jetel.util.StringUtils;
/**
* Executor of FilterExpression parse tree.
*
* @author dpavlis
* @since 16.9.2004
*
* Executor of FilterExpression parse tree
*/
public class TransformLangExecutor implements TransformLangParserVisitor,
TransformLangParserConstants{
public static final int BREAK_BREAK=1;
public static final int BREAK_CONTINUE=2;
public static final int BREAK_RETURN=3;
protected Stack stack;
protected boolean breakFlag;
protected int breakType;
protected Properties globalParameters;
protected DataRecord[] inputRecords;
protected DataRecord[] outputRecords;
protected Node emptyNode; // used as replacement for empty statements
static Log logger = LogFactory.getLog(TransformLangExecutor.class);
/**
* Constructor
*/
public TransformLangExecutor(Properties globalParameters) {
stack = new Stack();
breakFlag = false;
this.globalParameters=globalParameters;
emptyNode = new SimpleNode(Integer.MAX_VALUE);
}
public TransformLangExecutor() {
this(null);
}
/**
* Set input data records for processing.<br>
* Referenced input data fields will be resolved from
* these data records.
*
* @param inputRecords array of input data records carrying values
*/
public void setInputRecords(DataRecord[] inputRecords){
this.inputRecords=inputRecords;
}
/**
* Set output data records for processing.<br>
* Referenced output data fields will be resolved from
* these data records - assigment (in code) to output data field
* will result in assigment to one of these data records.
*
* @param outputRecords array of output data records for setting values
*/
public void setOutputRecords(DataRecord[] outputRecords){
this.outputRecords=outputRecords;
}
/**
* Set global parameters which may be reference from within the
* transformation source code
*
* @param parameters
*/
public void setGlobalParameters(Properties parameters){
this.globalParameters=parameters;
}
/**
* Method which returns result of executing parse tree.<br>
* Basically, it returns whatever object was left on top of executor's
* stack.
*
* @return
*/
public Object getResult() {
return stack.pop();
}
public Object getGlobalVariable(int varSlot){
return stack.getGlobalVar(varSlot);
}
public void setGlobalVariable(int varSlot,Object value){
stack.storeGlobalVar(varSlot,value);
}
/* *********************************************************** */
/* implementation of visit methods for each class of AST node */
/* *********************************************************** */
/* it seems to be necessary to define a visit() method for SimpleNode */
public Object visit(SimpleNode node, Object data) {
// throw new TransformLangExecutorRuntimeException(node,
// "Error: Call to visit for SimpleNode");
return data;
}
public Object visit(CLVFStart node, Object data) {
int i, k = node.jjtGetNumChildren();
for (i = 0; i < k; i++)
node.jjtGetChild(i).jjtAccept(this, data);
return data; // this value is ignored in this example
}
public Object visit(CLVFStartExpression node, Object data) {
int i, k = node.jjtGetNumChildren();
for (i = 0; i < k; i++)
node.jjtGetChild(i).jjtAccept(this, data);
return data; // this value is ignored in this example
}
public Object visit(CLVFOr node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a=stack.pop();
if (! (a instanceof Boolean)){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}else{
if (((Boolean)a).booleanValue()){
stack.push(Stack.TRUE_VAL);
return data;
}
}
node.jjtGetChild(1).jjtAccept(this, data);
a=stack.pop();
if (! (a instanceof Boolean)){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}
if (((Boolean) a).booleanValue()) {
stack.push(Stack.TRUE_VAL);
} else {
stack.push(Stack.FALSE_VAL);
}
return data;
}
public Object visit(CLVFAnd node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a=stack.pop();
if (! (a instanceof Boolean)){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}else{
if (!((Boolean)a).booleanValue()){
stack.push(Stack.FALSE_VAL);
return data;
}
}
node.jjtGetChild(1).jjtAccept(this, data);
a=stack.pop();
if (! (a instanceof Boolean)){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}
if (!((Boolean)a).booleanValue()) {
stack.push(Stack.FALSE_VAL);
return data;
} else {
stack.push(Stack.TRUE_VAL);
return data;
}
}
public Object visit(CLVFComparison node, Object data) {
int cmpResult = 2;
boolean lValue = false;
// special handling for Regular expression
if (node.cmpType == REGEX_EQUAL) {
node.jjtGetChild(0).jjtAccept(this, data);
Object field1 = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object field2 = stack.pop();
if (field1 instanceof CharSequence && field2 instanceof Matcher) {
Matcher regex = (Matcher) field2;
regex.reset(((CharSequence) field1));
if (regex.matches()) {
lValue = true;
} else {
lValue = false;
}
} else {
Object[] arguments = { field1, field2 };
throw new TransformLangExecutorRuntimeException(node,arguments,
"regex equal - wrong type of literal(s)");
}
// other types of comparison
} else {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
try {
if (a instanceof Numeric && b instanceof Numeric) {
cmpResult = ((Numeric) a).compareTo((Numeric) b);
/*
* }else if (a instanceof Number && b instanceof Number){
* cmpResult=Compare.compare((Number)a,(Number)b);
*/
} else if (a instanceof Date && b instanceof Date) {
cmpResult = ((Date) a).compareTo((Date) b);
} else if (a instanceof CharSequence
&& b instanceof CharSequence) {
cmpResult = Compare.compare((CharSequence) a,
(CharSequence) b);
} else if (a instanceof Boolean && b instanceof Boolean) {
if (node.cmpType==EQUAL || node.cmpType==NON_EQUAL){
cmpResult = ((Boolean) a).equals(b) ? 0 : -1;
}else{
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"compare - unsupported cmparison operator ["+tokenImage[node.cmpType]+"] for literals/expressions");
}
} else {
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"compare - incompatible literals/expressions");
}
} catch (ClassCastException ex) {
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"compare - incompatible literals/expressions");
}
switch (node.cmpType) {
case EQUAL:
if (cmpResult == 0) {
lValue = true;
}
break;// equal
case LESS_THAN:
if (cmpResult == -1) {
lValue = true;
}
break;// less than
case GREATER_THAN:
if (cmpResult == 1) {
lValue = true;
}
break;// grater than
case LESS_THAN_EQUAL:
if (cmpResult <= 0) {
lValue = true;
}
break;// less than equal
case GREATER_THAN_EQUAL:
if (cmpResult >= 0) {
lValue = true;
}
break;// greater than equal
case NON_EQUAL:
if (cmpResult != 0) {
lValue = true;
}
break;
default:
// this should never happen !!!
logger.fatal("Internal error: Unsupported comparison operator !");
throw new RuntimeException("Internal error - Unsupported comparison operator !");
}
}
stack.push(lValue ? Stack.TRUE_VAL : Stack.FALSE_VAL);
return data;
}
public Object visit(CLVFAddNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
if (a == null || b == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"add - NULL value not allowed");
}
// if (!(b instanceof Numeric || b instanceof CharSequence)) {
// throw new TransformLangExecutorRuntimeException(node, new Object[] { b },
// "add - wrong type of literal");
// }
try {
if (a instanceof Numeric && b instanceof Numeric) {
Numeric result = ((Numeric) a).duplicateNumeric();
result.add((Numeric) b);
stack.push(result);
} else if (a instanceof Date && b instanceof Numeric) {
Calendar result = Calendar.getInstance();
result.setTime((Date) a);
result.add(Calendar.DATE, ((Numeric) b).getInt());
stack.push(result.getTime());
} else if (a instanceof CharSequence) {
CharSequence a1 = (CharSequence) a;
StringBuffer buf=new StringBuffer(a1.length()*2);
StringUtils.strBuffAppend(buf,a1);
if (b instanceof CharSequence) {
StringUtils.strBuffAppend(buf,(CharSequence)b);
} else {
buf.append(b);
}
stack.push(buf);
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"add - wrong type of literal(s)");
}
} catch (ClassCastException ex) {
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"add - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFSubNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
if (a == null || b == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"sub - NULL value not allowed");
}
if (!(b instanceof Numeric)) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { b },
"sub - wrong type of literal");
}
if (a instanceof Numeric) {
Numeric result = ((Numeric) a).duplicateNumeric();
result.sub((Numeric) b);
stack.push(result);
} else if (a instanceof Date) {
Calendar result = Calendar.getInstance();
result.setTime((Date) a);
result.add(Calendar.DATE, ((Numeric) b).getInt() * -1);
stack.push(result.getTime());
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"sub - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFMulNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
if (a == null || b == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"mul - NULL value not allowed");
}
if (!(b instanceof Numeric)) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { b },
"mul - wrong type of literal");
}
if (a instanceof Numeric) {
Numeric result = ((Numeric) a).duplicateNumeric();
result.mul((Numeric) b);
stack.push(result);
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"mul - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFDivNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
if (a == null || b == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"div - NULL value not allowed");
}
if (!(b instanceof Numeric)) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { b },
"div - wrong type of literal");
}
if (a instanceof Numeric) {
Numeric result = ((Numeric) a).duplicateNumeric();
try{
result.div((Numeric) b);
}catch(ArithmeticException ex){
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,"div - aritmetic exception - "+ex.getMessage());
}
stack.push(result);
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"div - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFModNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
if (a == null || b == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"mod - NULL value not allowed");
}
if (!(b instanceof Numeric)) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { b },
"mod - wrong type of literal");
}
if (a instanceof Numeric) {
Numeric result = ((Numeric) a).duplicateNumeric();
result.mod((Numeric) b);
stack.push(result);
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"mod - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFNegation node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object value = stack.pop();
if (value instanceof Boolean) {
stack.push(((Boolean) value).booleanValue() ? Stack.FALSE_VAL
: Stack.TRUE_VAL);
} else {
throw new TransformLangExecutorRuntimeException(node, new Object[] { value },
"logical condition does not evaluate to BOOLEAN value");
}
return data;
}
public Object visit(CLVFSubStrNode node, Object data) {
int length, from;
node.jjtGetChild(0).jjtAccept(this, data);
Object str = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object fromO = stack.pop();
node.jjtGetChild(2).jjtAccept(this, data);
Object lengthO = stack.pop();
if (lengthO == null || fromO == null || str == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { lengthO,
fromO, str }, "substring - NULL value not allowed");
}
try {
length = ((Numeric) lengthO).getInt();
from = ((Numeric) fromO).getInt();
} catch (Exception ex) {
Object arguments[] = { lengthO, fromO, str };
throw new TransformLangExecutorRuntimeException(node,arguments, "substring - "
+ ex.getMessage());
}
if (str instanceof CharSequence) {
stack.push(((CharSequence) str).subSequence(from, from + length));
} else {
Object[] arguments = { lengthO, fromO, str };
throw new TransformLangExecutorRuntimeException(node,arguments,
"substring - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFUppercaseNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof CharSequence) {
CharSequence seq = (CharSequence) a;
node.strBuf.setLength(0);
node.strBuf.ensureCapacity(seq.length());
for (int i = 0; i < seq.length(); i++) {
node.strBuf.append(Character.toUpperCase(seq.charAt(i)));
}
stack.push(node.strBuf);
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"uppercase - wrong type of literal");
}
return data;
}
public Object visit(CLVFLowercaseNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof CharSequence) {
CharSequence seq = (CharSequence) a;
node.strBuf.setLength(0);
node.strBuf.ensureCapacity(seq.length());
for (int i = 0; i < seq.length(); i++) {
node.strBuf.append(Character.toLowerCase(seq.charAt(i)));
}
stack.push(node.strBuf);
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"lowercase - wrong type of literal");
}
return data;
}
public Object visit(CLVFTrimNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
int start, end;
if (a instanceof CharSequence) {
CharSequence seq = (CharSequence) a;
int length = seq.length();
for (start = 0; start < length; start++) {
if (seq.charAt(start) != ' ' && seq.charAt(start) != '\t') {
break;
}
}
for (end = length - 1; end >= 0; end--) {
if (seq.charAt(end) != ' ' && seq.charAt(end) != '\t') {
break;
}
}
if (start > end)
stack.push("");
else
stack.push(seq.subSequence(start, end + 1));
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"trim - wrong type of literal");
}
return data;
}
public Object visit(CLVFLengthNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof CharSequence) {
stack.push(new CloverInteger(((CharSequence) a).length()));
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"lenght - wrong type of literal");
}
return data;
}
public Object visit(CLVFTodayNode node, Object data) {
stack.push(stack.calendar.getTime() );
return data;
}
public Object visit(CLVFIsNullNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object value = stack.pop();
if (value == null) {
stack.push(Stack.TRUE_VAL);
} else {
stack.push(Stack.FALSE_VAL);
}
return data;
}
public Object visit(CLVFNVLNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object value = stack.pop();
if (value == null) {
node.jjtGetChild(1).jjtAccept(this, data);
// not necessary: stack.push(stack.pop());
} else {
stack.push(value);
}
return data;
}
public Object visit(CLVFLiteral node, Object data) {
stack.push(node.value);
return data;
}
public Object visit(CLVFInputFieldLiteral node, Object data) {
DataField field=inputRecords[node.recordNo].getField(node.fieldNo);
if (field instanceof Numeric){
stack.push(((Numeric)field).duplicateNumeric());
}else{
stack.push(field.getValue());
}
// old stack.push(inputRecords[node.recordNo].getField(node.fieldNo).getValue());
// we return reference to DataField so we can
// perform extra checking in special cases
return node.field;
}
public Object visit(CLVFOutputFieldLiteral node, Object data) {
//stack.push(inputRecords[node.recordNo].getField(node.fieldNo));
// we return reference to DataField so we can
// perform extra checking in special cases
return data;
}
public Object visit(CLVFGlobalParameterLiteral node, Object data) {
stack.push(globalParameters!=null ? globalParameters.getProperty(node.name) : null);
return data;
}
public Object visit(CLVFRegexLiteral node, Object data) {
stack.push(node.matcher);
return data;
}
public Object visit(CLVFConcatNode node, Object data) {
Object a;
StringBuffer strBuf = new StringBuffer(40);
int numChildren = node.jjtGetNumChildren();
for (int i = 0; i < numChildren; i++) {
node.jjtGetChild(i).jjtAccept(this, data);
a = stack.pop();
if (a instanceof CharSequence) {
StringUtils.strBuffAppend(strBuf,(CharSequence) a);
} else {
if (a != null) {
strBuf.append(a);
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"concat - wrong type of literal(s)");
}
}
}
stack.push(strBuf);
return data;
}
public Object visit(CLVFDateAddNode node, Object data) {
int shiftAmount;
node.jjtGetChild(0).jjtAccept(this, data);
Object date = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object amount = stack.pop();
try {
shiftAmount = ((Numeric) amount).getInt();
} catch (Exception ex) {
Object arguments[] = { amount };
throw new TransformLangExecutorRuntimeException(node,arguments, "dateadd - "
+ ex.getMessage());
}
if (date instanceof Date) {
node.calendar.setTime((Date) date);
node.calendar.add(node.calendarField, shiftAmount);
stack.push(node.calendar.getTime());
} else {
Object arguments[] = { date };
throw new TransformLangExecutorRuntimeException(node,arguments,
"dateadd - no Date expression");
}
return data;
}
public Object visit(CLVFDate2NumNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object date = stack.pop();
if (date instanceof Date) {
node.calendar.setTime((Date) date);
stack.push(new CloverInteger(node.calendar.get(node.calendarField)));
} else {
Object arguments[] = { date };
throw new TransformLangExecutorRuntimeException(node,arguments,
"date2num - no Date expression");
}
return data;
}
public Object visit(CLVFDateDiffNode node, Object data) {
Object date1, date2;
node.jjtGetChild(0).jjtAccept(this, data);
date1 = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
date2 = stack.pop();
if (date1 instanceof Date && date2 instanceof Date) {
long diffSec = (((Date) date1).getTime() - ((Date) date2).getTime()) / 1000;
int diff = 0;
switch (node.calendarField) {
case Calendar.SECOND:
// we have the difference in seconds
diff = (int) diffSec;
break;
case Calendar.MINUTE:
// how many minutes'
diff = (int) diffSec / 60;
break;
case Calendar.HOUR_OF_DAY:
diff = (int) diffSec / 3600;
break;
case Calendar.DAY_OF_MONTH:
// how many days is the difference
diff = (int) diffSec / 86400;
break;
case Calendar.WEEK_OF_YEAR:
// how many weeks
diff = (int) diffSec / 604800;
break;
case Calendar.MONTH:
node.start.setTime((Date) date1);
node.end.setTime((Date) date2);
diff = (node.start.get(Calendar.MONTH) + node.start
.get(Calendar.YEAR) * 12)
- (node.end.get(Calendar.MONTH) + node.end
.get(Calendar.YEAR) * 12);
break;
case Calendar.YEAR:
node.start.setTime((Date) date1);
node.end.setTime((Date) date2);
diff = node.start.get(node.calendarField)
- node.end.get(node.calendarField);
break;
default:
Object arguments[] = { new Integer(node.calendarField) };
throw new TransformLangExecutorRuntimeException(node,arguments,
"datediff - wrong difference unit");
}
stack.push(new CloverInteger(diff));
} else {
Object arguments[] = { date1, date2 };
throw new TransformLangExecutorRuntimeException(node,arguments,
"datediff - no Date expression");
}
return data;
}
public Object visit(CLVFMinusNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object value = stack.pop();
if (value instanceof Numeric) {
Numeric result = ((Numeric) value).duplicateNumeric();
result.mul(Stack.NUM_MINUS_ONE);
stack.push(result);
} else {
Object arguments[] = { value };
throw new TransformLangExecutorRuntimeException(node,arguments,
"minus - not a number");
}
return data;
}
public Object visit(CLVFReplaceNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object str = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object regexO = stack.pop();
node.jjtGetChild(2).jjtAccept(this, data);
Object withO = stack.pop();
if (withO == null || regexO == null || str == null) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { withO,
regexO, str }, "NULL value not allowed");
}
if (str instanceof CharSequence && withO instanceof CharSequence
&& regexO instanceof CharSequence) {
if (node.pattern == null || !node.stored.equals(regexO)) {
node.pattern = Pattern.compile(((CharSequence) regexO)
.toString());
node.matcher = node.pattern.matcher((CharSequence) str);
node.stored = regexO;
} else {
node.matcher.reset((CharSequence) str);
}
stack.push(node.matcher.replaceAll(((CharSequence) withO)
.toString()));
} else {
Object[] arguments = { withO, regexO, str };
throw new TransformLangExecutorRuntimeException(node,arguments,
"replace - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFNum2StrNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Numeric) {
if (node.radix == 10) {
stack.push(((Numeric) a).toString());
} else {
if (a instanceof CloverInteger) {
stack.push(Integer.toString(((CloverInteger) a).getInt(),
node.radix));
} else if (a instanceof CloverLong) {
stack.push(Long.toString(((CloverLong) a).getLong(),
node.radix));
} else if (a instanceof CloverDouble && node.radix == 16) {
stack.push(Double.toHexString(((CloverDouble) a)
.getDouble()));
} else {
Object[] arguments = { a, new Integer(node.radix) };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"num2str - can't convert number to string using specified radix");
}
}
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node, arguments,
"num2str - wrong type of literal");
}
return data;
}
public Object visit(CLVFStr2NumNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof CharSequence) {
try {
Object value = null;
switch (node.numType) {
case INT_VAR:
value = new CloverInteger(Integer.parseInt(
((CharSequence) a).toString(), node.radix));
break;
case LONG_VAR:
value = new CloverLong(Long.parseLong(((CharSequence) a)
.toString(), node.radix));
break;
case DECIMAL_VAR:
if (node.radix == 10) {
value = DecimalFactory.getDecimal(((CharSequence) a)
.toString());
} else {
Object[] arguments = { a, new Integer(node.radix) };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"str2num - can't convert string to decimal number using specified radix");
}
break;
default:
// get double/number type
switch (node.radix) {
case 10:
case 16:
value = new CloverDouble(Double
.parseDouble(((CharSequence) a).toString()));
break;
default:
Object[] arguments = { a, new Integer(node.radix) };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"str2num - can't convert string to number/double number using specified radix");
}
}
stack.push(value);
} catch (NumberFormatException ex) {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,
arguments, "str2num - can't convert \"" + a + "\"");
}
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node, arguments,
"str2num - wrong type of literal");
}
return data;
}
public Object visit(CLVFDate2StrNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Date) {
stack.push(node.dateFormat.format((Date)a));
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"date2str - wrong type of literal");
}
return data;
}
public Object visit(CLVFStr2DateNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof CharSequence) {
try {
stack.push(node.dateFormat.parse(((CharSequence)a).toString()));
} catch (java.text.ParseException ex) {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"str2date - can't convert \"" + a + "\"");
}
} else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"str2date - wrong type of literal");
}
return data;
}
public Object visit(CLVFIffNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object condition = stack.pop();
if (condition instanceof Boolean) {
if (((Boolean) condition).booleanValue()) {
node.jjtGetChild(1).jjtAccept(this, data);
} else {
node.jjtGetChild(2).jjtAccept(this, data);
}
stack.push(stack.pop());
} else {
Object[] arguments = { condition };
throw new TransformLangExecutorRuntimeException(node,arguments,
"iif - condition does not evaluate to BOOLEAN value");
}
return data;
}
public Object visit(CLVFPrintErrNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
System.err.println(a != null ? a : "<null>");
// stack.push(Stack.TRUE_VAL);
return data;
}
public Object visit(CLVFPrintStackNode node, Object data) {
for (int i=stack.top;i>=0;i--){
System.err.println("["+i+"] : "+stack.stack[i]);
}
return data;
}
/***************************************************************************
* Transformation Language executor starts here.
**************************************************************************/
public Object visit(CLVFForStatement node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data); // set up of the loop
boolean condition = false;
Node loopCondition = node.jjtGetChild(1);
Node increment = node.jjtGetChild(2);
Node body;
try{
body=node.jjtGetChild(3);
}catch(ArrayIndexOutOfBoundsException ex){
body=emptyNode;
}
try {
loopCondition.jjtAccept(this, data); // evaluate the condition
condition = ((Boolean) stack.pop()).booleanValue();
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
// loop execution
while (condition) {
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) {
return data;
}
}
increment.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// evaluate the condition
loopCondition.jjtAccept(this, data);
try {
condition = ((Boolean) stack.pop()).booleanValue();
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
}
}
return data;
}
public Object visit(CLVFWhileStatement node, Object data) {
boolean condition = false;
Node loopCondition = node.jjtGetChild(0);
Node body;
try{
body=node.jjtGetChild(1);
}catch(ArrayIndexOutOfBoundsException ex){
body=emptyNode;
}
try {
loopCondition.jjtAccept(this, data); // evaluate the condition
condition = ((Boolean) stack.pop()).booleanValue();
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
// loop execution
while (condition) {
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) return data;
}
// evaluate the condition
loopCondition.jjtAccept(this, data);
try {
condition = ((Boolean) stack.pop()).booleanValue();
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
}
}
return data;
}
public Object visit(CLVFIfStatement node, Object data) {
boolean condition = false;
try {
node.jjtGetChild(0).jjtAccept(this, data); // evaluate the
// condition
condition = ((Boolean) stack.pop()).booleanValue();
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"condition does not evaluate to BOOLEAN value");
} catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
// first if
if (condition) {
node.jjtGetChild(1).jjtAccept(this, data);
//TODO: stack.pop(); -problem with return statement!!! // in case there is anything on top of stack
} else { // if else part exists
if (node.jjtGetNumChildren() > 2) {
node.jjtGetChild(2).jjtAccept(this, data);
//TODO: stack.pop(); // in case there is anything on top of stack
}
}
return data;
}
public Object visit(CLVFDoStatement node, Object data) {
boolean condition = false;
Node loopCondition = node.jjtGetChild(1);
Node body = node.jjtGetChild(0);
// loop execution
do {
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) return data;
}
// evaluate the condition
loopCondition.jjtAccept(this, data);
try {
condition = ((Boolean) stack.pop()).booleanValue();
}catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
} while (condition);
return data;
}
public Object visit(CLVFSwitchStatement node, Object data) {
// get value of switch && push/leave it on stack
boolean match=false;
node.jjtGetChild(0).jjtAccept(this, data);
Object switchVal=stack.pop();
int numChildren = node.jjtGetNumChildren();
int numCases = node.hasDefaultClause ? numChildren-1 : numChildren;
// loop over remaining case statements
for (int i = 1; i < numCases; i++) {
stack.push(switchVal);
if (node.jjtGetChild(i).jjtAccept(this, data)==Stack.TRUE_VAL){
match=true;
}
if (breakFlag) {
if (breakType == BREAK_BREAK) {
breakFlag = false;
}
break;
}
}
// test whether execute default branch
if (node.hasDefaultClause && !match){
node.jjtGetChild(numChildren-1).jjtAccept(this, data);
}
return data;
}
public Object visit(CLVFCaseExpression node, Object data) {
// test if literal (as child 0) is equal to data on stack
// if so, execute block (child 1)
boolean match = false;
Object switchVal = stack.pop();
node.jjtGetChild(0).jjtAccept(this, data);
Object value = stack.pop();
try {
if (switchVal instanceof Numeric) {
match = (((Numeric) value).compareTo((Numeric) switchVal) == 0);
} else if (switchVal instanceof CharSequence) {
match = (Compare.compare((CharSequence) switchVal,
(CharSequence) value) == 0);
} else if (switchVal instanceof Date) {
match = (((Date) switchVal).compareTo((Date) value) == 0);
} else if (switchVal instanceof Boolean) {
match = ((Boolean) switchVal).equals((Boolean) value);
}
} catch (ClassCastException ex) {
Object[] args=new Object[] {switchVal,value};
throw new TransformLangExecutorRuntimeException(node,args,"incompatible literals in case clause");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid case value");
}
if (match){
node.jjtGetChild(1).jjtAccept(this, data);
return Stack.TRUE_VAL;
}else{
return Stack.FALSE_VAL;
}
}
public Object visit(CLVFPlusPlusNode node, Object data) {
Node childNode = node.jjtGetChild(0);
if (childNode instanceof CLVFVariableLiteral) {
CLVFVariableLiteral varNode=(CLVFVariableLiteral) childNode;
Object var=stack.getVar(varNode.localVar, varNode.varSlot);
if (var instanceof Numeric){
((Numeric)var).add(Stack.NUM_ONE);
stack.push(((Numeric)var).duplicateNumeric());
}else if (var instanceof Date){
stack.calendar.setTime((Date)var);
stack.calendar.add(Calendar.DATE, 1);
stack.push(stack.calendar.getTime());
}else{
throw new TransformLangExecutorRuntimeException(node,"variable is not of numeric or date type");
}
} else {
childNode.jjtAccept(this, data);
try {
Numeric num = ((Numeric) stack.pop()).duplicateNumeric();
num.add(Stack.NUM_ONE);
stack.push(num);
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"expression is not of numeric type");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid numeric expression");
}
}
return data;
}
public Object visit(CLVFMinusMinusNode node, Object data) {
Node childNode = node.jjtGetChild(0);
if (childNode instanceof CLVFVariableLiteral) {
CLVFVariableLiteral varNode=(CLVFVariableLiteral) childNode;
Object var=stack.getVar(varNode.localVar, varNode.varSlot);
if (var instanceof Numeric){
((Numeric)var).sub(Stack.NUM_ONE);
stack.push(((Numeric)var).duplicateNumeric());
}else if (var instanceof Date){
stack.calendar.setTime((Date)var);
stack.calendar.add(Calendar.DATE, 1);
stack.push(stack.calendar.getTime());
}else{
throw new TransformLangExecutorRuntimeException(node,"variable is not of numeric or date type");
}
} else {
childNode.jjtAccept(this, data);
try {
Numeric num = ((Numeric) stack.pop()).duplicateNumeric();
num.add(Stack.NUM_ONE);
stack.push(num);
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,"expression is not of numeric type");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid numeric expression");
}
}
return data;
}
public Object visit(CLVFBlock node, Object data) {
int childern = node.jjtGetNumChildren();
for (int i = 0; i < childern; i++) {
node.jjtGetChild(i).jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// have we seen contiue/break/return statement ??
if (breakFlag){
return data;
}
}
return data;
}
/*
* Loop & block & function control nodes
*/
public Object visit(CLVFBreakStatement node, Object data) {
breakFlag = true; // we encountered break statement;
breakType=BREAK_BREAK;
return data;
}
public Object visit(CLVFContinueStatement node, Object data) {
breakFlag = true; // we encountered continue statement;
breakType= BREAK_CONTINUE;
return data;
}
public Object visit(CLVFReturnStatement node, Object data) {
if (node.jjtHasChildren()){
node.jjtGetChild(0).jjtAccept(this, data);
}
breakFlag = true;
breakType = BREAK_RETURN;
return data;
}
public Object visit(CLVFBreakpointNode node, Object data) {
// TODO
return data;
}
/*
* Variable declarations
*/
public Object visit(CLVFVarDeclaration node, Object data) {
// test for duplicite declaration - should have been done before
/*if (stack.symtab.containsKey(node.name)) {
throw new TransformLangExecutorRuntimeException(node,
"variable already declared - \"" + node.name + "\"");
}*/
Object value;
// create global/local variable
switch (node.type) {
case INT_VAR:
value= new CloverInteger(0);
break;
case LONG_VAR:
value= new CloverLong(0);
break;
case DOUBLE_VAR:
value= new CloverDouble(0);
break;
case DECIMAL_VAR:
if (node.length>0){
if (node.precision>0){
value = DecimalFactory.getDecimal(node.length,node.precision);
}else{
value = DecimalFactory.getDecimal(node.length,0);
}
}else{
value= DecimalFactory.getDecimal();
}
break;
case STRING_VAR:
value= new StringBuffer();
break;
case DATE_VAR:
value=new Date();
break;
case BOOLEAN_VAR:
value= Stack.FALSE_VAL;
break;
default:
throw new TransformLangExecutorRuntimeException(node,
"variable declaration - "
+ "unknown variable type for variable \""
+ node.name + "\"");
}
stack.storeVar(node.localVar, node.varSlot, value);
return data;
}
public Object visit(CLVFVariableLiteral node, Object data) {
Object var = stack.getVar(node.localVar, node.varSlot);
// variable can be null
stack.push(var);
/*
if (var != null) {
stack.push(var);
} else {
throw new TransformLangExecutorRuntimeException(node, "unknown variable \""
+ node.varName + "\"");
}
*/
return data;
}
public Object visit(CLVFAssignment node, Object data) {
CLVFVariableLiteral childNode=(CLVFVariableLiteral) node.jjtGetChild(0);
Object variable = stack.getVar(childNode.localVar,childNode.varSlot);
node.jjtGetChild(1).jjtAccept(this, data);
Object value = stack.pop();
try {
if (variable instanceof Numeric) {
((Numeric) variable).setValue((Numeric) value);
} else if (variable instanceof StringBuffer) {
StringBuffer var = (StringBuffer) variable;
var.setLength(0);
StringUtils.strBuffAppend(var,(CharSequence) value);
} else if (variable instanceof Boolean) {
stack.storeVar(childNode.localVar,childNode.varSlot, (Boolean)value); // boolean is not updatable - we replace the reference
// stack.put(varName,((Boolean)value).booleanValue() ?
// Stack.TRUE_VAL : Stack.FALSE_VAL);
} else if (variable instanceof Date) {
((Date) variable).setTime(((Date) value).getTime());
} else {
throw new TransformLangExecutorRuntimeException(node,
"unknown variable \"" + childNode.varName + "\"");
}
} catch (ClassCastException ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + value + "\" to variable \""
+ childNode.varName + "\" - incompatible data types");
} catch (NumberFormatException ex){
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of number \"" + value + "\" to variable \"" + childNode.varName + "\" : "+ex.getMessage());
} catch (Exception ex){
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + value + "\" to variable \"" + childNode.varName + "\" : "+ex.getMessage());
}
return data;
}
public Object visit(CLVFMapping node, Object data) {
DataField field=outputRecords[node.recordNo].getField(node.fieldNo);
int arity=node.arity; // how many children we have defined
Object value=null;
try{
// we try till success or no more options
for (int i=0;i<arity;i++){
node.jjtGetChild(i).jjtAccept(this, data);
value=stack.pop();
try{
// TODO: small hack
if (field instanceof Numeric){
((Numeric)field).setValue((Numeric)value);
}else{
field.setValue(value);
}
}catch(BadDataFormatException ex){
if (i == arity)
throw ex;
else
continue;
}catch(Exception ex){
if (i == arity)
throw ex;
else
continue;
}
break; // success during assignment, finish looping
}
}catch(BadDataFormatException ex){
if (!outputRecords[node.recordNo].getField(node.fieldNo).getMetadata().isNullable()){
throw new TransformLangExecutorRuntimeException(node,"can't assign NULL to \"" + node.fieldName + "\"");
}else{
throw new TransformLangExecutorRuntimeException(node,"data format exception when mapping \"" + node.fieldName + "\" - assigning \""
+ value + "\"");
}
}catch(TransformLangExecutorRuntimeException ex){
throw ex;
}catch(Exception ex){
String msg=ex.getMessage();
throw new TransformLangExecutorRuntimeException(node,
(msg!=null ? msg : "") +
" when mapping \"" + node.fieldName + "\" ("+DataFieldMetadata.type2Str(field.getType())
+") - assigning \"" + value + "\" ("+(value!=null ? value.getClass(): "unknown class" )+")");
}
return data;
}
/*
* Declaration & calling of Functions here
*/
public Object visit(CLVFFunctionCallStatement node, Object data) {
//put call parameters on stack
node.childrenAccept(this,data);
CLVFFunctionDeclaration executionNode=node.callNode;
// open call frame
stack.pushFuncCallFrame();
// store call parameters from stack as local variables
for (int i=executionNode.numParams-1;i>=0; stack.storeLocalVar(i--,stack.pop()));
// execute function body
// loop execution
Object returnData;
int numChildren=executionNode.jjtGetNumChildren();
for (int i=0;i<numChildren;i++){
executionNode.jjtGetChild(i).jjtAccept(this,data);
returnData=stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_RETURN){
if (returnData!=null)
stack.push(returnData);
break;
}
}
}
stack.popFuncCallFrame();
return data;
}
public Object visit(CLVFFunctionDeclaration node, Object data) {
return data;
}
public Object visit(CLVFStatementExpression node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
return data;
}
public Object executeFunction(CLVFFunctionDeclaration executionNode, Object[] data) {
//put call parameters on stack
if (data==null){
data=new Object[0];
}
// open call frame
stack.pushFuncCallFrame();
// store call parameters from stack as local variables
for (int i=executionNode.numParams-1;i>=0; stack.storeLocalVar(i--,data[i]));
// execute function body
// loop execution
Object returnData;
int numChildren=executionNode.jjtGetNumChildren();
for (int i=0;i<numChildren;i++){
executionNode.jjtGetChild(i).jjtAccept(this,data);
returnData=stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_RETURN){
if (returnData!=null)
stack.push(returnData);
break;
}
}
}
stack.popFuncCallFrame();
return data;
}
/*
* MATH functions log,log10,exp,pow,sqrt,round
*/
public Object visit(CLVFSqrtNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Numeric) {
try{
stack.push(new CloverDouble(Math.sqrt(((Numeric)a).getDouble()) ));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"Error when executing SQRT function",ex);
}
}else {
Object[] arguments = { a};
throw new TransformLangExecutorRuntimeException(node,arguments,
"sqrt - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFLogNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Numeric) {
try{
stack.push(new CloverDouble(Math.log(((Numeric)a).getDouble()) ));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"Error when executing LOG function",ex);
}
}else {
Object[] arguments = { a};
throw new TransformLangExecutorRuntimeException(node,arguments,
"log - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFLog10Node node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Numeric) {
try{
stack.push(new CloverDouble( Math.log10(((Numeric)a).getDouble())));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"Error when executing LOG10 function",ex);
}
}else {
Object[] arguments = { a};
throw new TransformLangExecutorRuntimeException(node,arguments,
"log10 - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFExpNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Numeric) {
try{
stack.push(new CloverDouble( Math.exp(((Numeric)a).getDouble())));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"Error when executing EXP function",ex);
}
}else {
Object[] arguments = { a};
throw new TransformLangExecutorRuntimeException(node,arguments,
"exp - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFRoundNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Numeric) {
try{
stack.push(new CloverLong(Math.round(((Numeric)a).getDouble())));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"Error when executing ROUND function",ex);
}
}else {
Object[] arguments = { a};
throw new TransformLangExecutorRuntimeException(node,arguments,
"round - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFPowNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
Object b = stack.pop();
if (a instanceof Numeric && b instanceof Numeric) {
try{
stack.push(new CloverDouble(Math.pow(((Numeric)a).getDouble(),
((Numeric)b).getDouble())));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"Error when executing POW function",ex);
}
}else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"pow - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFPINode node, Object data) {
stack.push(Stack.NUM_PI);
return data;
}
public Object visit(CLVFTruncNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
Object a = stack.pop();
if (a instanceof Date ) {
stack.calendar.setTime((Date)a);
stack.calendar.set(Calendar.HOUR_OF_DAY, 0);
stack.calendar.set(Calendar.MINUTE , 0);
stack.calendar.set(Calendar.SECOND , 0);
stack.calendar.set(Calendar.MILLISECOND , 0);
stack.push( stack.calendar.getTime() );
}else if (a instanceof Numeric){
stack.push(new CloverLong(((Numeric)a).getLong()));
}else {
Object[] arguments = { a };
throw new TransformLangExecutorRuntimeException(node,arguments,
"trunc - wrong type of literal(s)");
}
return data;
}
} | added rethrowing of Executor run-time exception in visit(CLVFAssignment) method
git-svn-id: 7003860f782148507aa0d02fa3b12992383fb6a5@1383 a09ad3ba-1a0f-0410-b1b9-c67202f10d70
| cloveretl.engine/src/org/jetel/interpreter/TransformLangExecutor.java | added rethrowing of Executor run-time exception in visit(CLVFAssignment) method |
|
Java | unlicense | 2d805d13c03ad5e9a39ea9c611d74b79d57b5708 | 0 | KitoHo/disunity,hozmaster/disunity,summertriangle-dev/disunity,v2tmobile/disunity,kennytm/disunity,catinred2/disunity,bolabola/disunity,zodsoft/disunity,yclhehe/disunity,kennytm/disunity,ata4/disunity,yclhehe/disunity,tomagoyaky/disunity,zodsoft/disunity,v2tmobile/disunity,KitoHo/disunity,summertriangle-dev/disunity,ata4/disunity,catinred2/disunity,zhangf911/disunity,zhangf911/disunity,zchen39/disunity,zchen39/disunity,hozmaster/disunity,tomagoyaky/disunity,bolabola/disunity | /*
** 2014 September 25
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
*/
package info.ata4.unity.assetbundle;
import info.ata4.io.DataReader;
import info.ata4.io.DataWriter;
import info.ata4.io.buffer.ByteBufferOutputStream;
import info.ata4.io.socket.IOSocket;
import info.ata4.io.socket.Sockets;
import info.ata4.util.progress.DummyProgress;
import info.ata4.util.progress.Progress;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Writer;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import static java.nio.file.StandardCopyOption.*;
import static java.nio.file.StandardOpenOption.*;
import java.util.Properties;
import lzma.LzmaDecoder;
import lzma.LzmaEncoder;
import org.apache.commons.io.IOUtils;
/**
* Asset bundle file utility class.
*
* @author Nico Bergemann <barracuda415 at yahoo.de>
*/
public class AssetBundleUtils {
private static final String PROPERTIES_FILE = "bundle.properties";
private AssetBundleUtils() {
}
public static boolean isAssetBundle(Path file) {
if (!Files.isRegularFile(file)) {
return false;
}
try (InputStream is = Files.newInputStream(file)) {
byte[] header = new byte[8];
is.read(header);
String headerString = new String(header, "ASCII");
return headerString.equals(AssetBundleHeader.SIGNATURE_WEB)
|| headerString.equals(AssetBundleHeader.SIGNATURE_RAW);
} catch (IOException ex) {
}
return false;
}
public static void extract(Path file, Path outDir, Progress progress) throws IOException {
try(
AssetBundleReader assetBundle = new AssetBundleReader(file)
) {
long current = 0;
long total = 0;
for (AssetBundleEntryInfo entry : assetBundle.getEntries()) {
total += entry.getSize();
}
progress.setLimit(total);
for (AssetBundleEntry entry : assetBundle) {
if (progress.isCanceled()) {
break;
}
progress.setLabel(entry.getName());
Path entryFile = outDir.resolve(entry.getName());
Files.createDirectories(entryFile.getParent());
Files.copy(entry.getInputStream(), entryFile, REPLACE_EXISTING);
current += entry.getSize();
progress.update(current);
}
// create metadata file
AssetBundleHeader header = assetBundle.getHeader();
Properties props = new Properties();
props.setProperty("compressed", String.valueOf(header.isCompressed()));
props.setProperty("streamVersion", String.valueOf(header.getStreamVersion()));
props.setProperty("unityVersion", header.getUnityVersion().toString());
props.setProperty("unityRevision", header.getUnityRevision().toString());
Path propsFile = outDir.resolve(PROPERTIES_FILE);
try (Writer out = Files.newBufferedWriter(propsFile,
Charset.forName("US-ASCII"), WRITE, CREATE, TRUNCATE_EXISTING)) {
props.store(out, null);
}
}
}
public static void extract(Path file, Path outDir) throws IOException {
extract(file, outDir, new DummyProgress());
}
public static IOSocket getSocketForEntry(AssetBundleEntry entry) throws IOException {
IOSocket socket;
// check if the entry is larger than 128 MiB
long size = entry.getSize();
if (size > 1 << 27) {
// copy entry to temporary file
Path tmpFile = Files.createTempFile("disunity", ".assets");
socket = Sockets.forFile(tmpFile, READ, WRITE, DELETE_ON_CLOSE);
IOUtils.copy(entry.getInputStream(), socket.getOutputStream());
socket.getPositionable().position(0);
} else {
// copy entry to memory
ByteBuffer bb = ByteBuffer.allocateDirect((int) size);
IOUtils.copy(entry.getInputStream(), new ByteBufferOutputStream(bb));
bb.flip();
socket = Sockets.forByteBuffer(bb);
}
return socket;
}
public static void compress(Path inFile, Path outFile) throws IOException {
try (
IOSocket inSocket = Sockets.forBufferedReadFile(inFile);
IOSocket outSocket = Sockets.forFile(inFile, CREATE, READ, WRITE, TRUNCATE_EXISTING);
) {
compress(inSocket, outSocket);
}
}
private static void compress(IOSocket inSocket, IOSocket outSocket) throws IOException {
DataReader in = new DataReader(inSocket);
AssetBundleHeader tmpHeader = new AssetBundleHeader();
in.readStruct(tmpHeader);
// check signature
if (!tmpHeader.hasValidSignature()) {
throw new AssetBundleException("Invalid signature");
}
if (tmpHeader.isCompressed()) {
throw new AssetBundleException("Asset bundle is already compressed");
}
tmpHeader.setCompressed(true);
DataWriter out = new DataWriter(outSocket);
out.writeStruct(tmpHeader);
compressData(in, out);
// write header again with fixed file size
out.position(0);
tmpHeader.setCompleteFileSize((int) out.size());
tmpHeader.setMinimumStreamedBytes((int) out.size());
out.writeStruct(tmpHeader);
}
private static void compressData(DataReader in, DataWriter out) throws IOException {
int lc = 3;
int lp = 0;
int pb = 2;
int dictSize = 1 << 23;
LzmaEncoder enc = new LzmaEncoder();
enc.setEndMarkerMode(true);
if (!enc.setLcLpPb(lc, lp, pb)) {
throw new IOException("Invalid LZMA props");
}
if (!enc.setDictionarySize(dictSize)) {
throw new IOException("Invalid dictionary size");
}
boolean swap = out.isSwap();
out.setSwap(true);
out.write(enc.getCoderProperties());
out.writeLong(in.remaining());
out.setSwap(swap);
try (
InputStream is = new BufferedInputStream(in.getSocket().getInputStream());
OutputStream os = new BufferedOutputStream(out.getSocket().getOutputStream());
) {
enc.code(is, os);
}
}
public static void uncompress(Path inFile, Path outFile) throws IOException {
try (
IOSocket inSocket = Sockets.forBufferedReadFile(inFile);
IOSocket outSocket = Sockets.forBufferedWriteFile(inFile);
) {
uncompress(inSocket, outSocket);
}
}
public static void uncompress(IOSocket inSocket, IOSocket outSocket) throws IOException {
DataReader in = new DataReader(inSocket);
AssetBundleHeader tmpHeader = new AssetBundleHeader();
in.readStruct(tmpHeader);
// check signature
if (!tmpHeader.hasValidSignature()) {
throw new AssetBundleException("Invalid signature");
}
if (!tmpHeader.isCompressed()) {
throw new AssetBundleException("Asset bundle is not compressed");
}
tmpHeader.setCompressed(false);
DataWriter out = new DataWriter(outSocket);
out.writeStruct(tmpHeader);
in.setSwap(true);
uncompressData(in, out);
}
private static void uncompressData(DataReader in, DataWriter out) throws IOException {
boolean swap = in.isSwap();
in.setSwap(true);
byte[] lzmaProps = new byte[5];
in.readFully(lzmaProps);
long lzmaSize = in.readLong();
in.setSwap(swap);
LzmaDecoder dec = new LzmaDecoder();
if (!dec.setDecoderProperties(lzmaProps)) {
throw new IOException("Invalid LZMA props");
}
try (
InputStream is = new BufferedInputStream(in.getSocket().getInputStream());
OutputStream os = new BufferedOutputStream(out.getSocket().getOutputStream());
) {
if (!dec.code(is, os, lzmaSize)) {
throw new IOException("LZMA decoding error");
}
}
}
}
| src/info/ata4/unity/assetbundle/AssetBundleUtils.java | /*
** 2014 September 25
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
*/
package info.ata4.unity.assetbundle;
import info.ata4.io.DataReader;
import info.ata4.io.DataWriter;
import info.ata4.io.buffer.ByteBufferOutputStream;
import info.ata4.io.socket.IOSocket;
import info.ata4.io.socket.Sockets;
import info.ata4.util.progress.DummyProgress;
import info.ata4.util.progress.Progress;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.nio.file.Path;
import static java.nio.file.StandardCopyOption.*;
import static java.nio.file.StandardOpenOption.*;
import lzma.LzmaDecoder;
import lzma.LzmaEncoder;
import org.apache.commons.io.IOUtils;
/**
* Asset bundle file utility class.
*
* @author Nico Bergemann <barracuda415 at yahoo.de>
*/
public class AssetBundleUtils {
private AssetBundleUtils() {
}
public static boolean isAssetBundle(Path file) {
if (!Files.isRegularFile(file)) {
return false;
}
try (InputStream is = Files.newInputStream(file)) {
byte[] header = new byte[8];
is.read(header);
String headerString = new String(header, "ASCII");
return headerString.equals(AssetBundleHeader.SIGNATURE_WEB)
|| headerString.equals(AssetBundleHeader.SIGNATURE_RAW);
} catch (IOException ex) {
}
return false;
}
public static void extract(Path file, Path outDir, Progress progress) throws IOException {
try(
AssetBundleReader assetBundle = new AssetBundleReader(file)
) {
long current = 0;
long total = 0;
for (AssetBundleEntryInfo entry : assetBundle.getEntries()) {
total += entry.getSize();
}
progress.setLimit(total);
for (AssetBundleEntry entry : assetBundle) {
if (progress.isCanceled()) {
break;
}
progress.setLabel(entry.getName());
Path entryFile = outDir.resolve(entry.getName());
Files.createDirectories(entryFile.getParent());
Files.copy(entry.getInputStream(), entryFile, REPLACE_EXISTING);
current += entry.getSize();
progress.update(current);
}
}
}
public static void extract(Path file, Path outDir) throws IOException {
extract(file, outDir, new DummyProgress());
}
public static IOSocket getSocketForEntry(AssetBundleEntry entry) throws IOException {
IOSocket socket;
// check if the entry is larger than 128 MiB
long size = entry.getSize();
if (size > 1 << 27) {
// copy entry to temporary file
Path tmpFile = Files.createTempFile("disunity", ".assets");
socket = Sockets.forFile(tmpFile, READ, WRITE, DELETE_ON_CLOSE);
IOUtils.copy(entry.getInputStream(), socket.getOutputStream());
socket.getPositionable().position(0);
} else {
// copy entry to memory
ByteBuffer bb = ByteBuffer.allocateDirect((int) size);
IOUtils.copy(entry.getInputStream(), new ByteBufferOutputStream(bb));
bb.flip();
socket = Sockets.forByteBuffer(bb);
}
return socket;
}
public static void compress(Path inFile, Path outFile) throws IOException {
try (
IOSocket inSocket = Sockets.forBufferedReadFile(inFile);
IOSocket outSocket = Sockets.forFile(inFile, CREATE, READ, WRITE, TRUNCATE_EXISTING);
) {
compress(inSocket, outSocket);
}
}
private static void compress(IOSocket inSocket, IOSocket outSocket) throws IOException {
DataReader in = new DataReader(inSocket);
AssetBundleHeader tmpHeader = new AssetBundleHeader();
in.readStruct(tmpHeader);
// check signature
if (!tmpHeader.hasValidSignature()) {
throw new AssetBundleException("Invalid signature");
}
if (tmpHeader.isCompressed()) {
throw new AssetBundleException("Asset bundle is already compressed");
}
tmpHeader.setCompressed(true);
DataWriter out = new DataWriter(outSocket);
out.writeStruct(tmpHeader);
compressData(in, out);
// write header again with fixed file size
out.position(0);
tmpHeader.setCompleteFileSize((int) out.size());
tmpHeader.setMinimumStreamedBytes((int) out.size());
out.writeStruct(tmpHeader);
}
private static void compressData(DataReader in, DataWriter out) throws IOException {
int lc = 3;
int lp = 0;
int pb = 2;
int dictSize = 1 << 23;
LzmaEncoder enc = new LzmaEncoder();
enc.setEndMarkerMode(true);
if (!enc.setLcLpPb(lc, lp, pb)) {
throw new IOException("Invalid LZMA props");
}
if (!enc.setDictionarySize(dictSize)) {
throw new IOException("Invalid dictionary size");
}
boolean swap = out.isSwap();
out.setSwap(true);
out.write(enc.getCoderProperties());
out.writeLong(in.remaining());
out.setSwap(swap);
try (
InputStream is = new BufferedInputStream(in.getSocket().getInputStream());
OutputStream os = new BufferedOutputStream(out.getSocket().getOutputStream());
) {
enc.code(is, os);
}
}
public static void uncompress(Path inFile, Path outFile) throws IOException {
try (
IOSocket inSocket = Sockets.forBufferedReadFile(inFile);
IOSocket outSocket = Sockets.forBufferedWriteFile(inFile);
) {
uncompress(inSocket, outSocket);
}
}
public static void uncompress(IOSocket inSocket, IOSocket outSocket) throws IOException {
DataReader in = new DataReader(inSocket);
AssetBundleHeader tmpHeader = new AssetBundleHeader();
in.readStruct(tmpHeader);
// check signature
if (!tmpHeader.hasValidSignature()) {
throw new AssetBundleException("Invalid signature");
}
if (!tmpHeader.isCompressed()) {
throw new AssetBundleException("Asset bundle is not compressed");
}
tmpHeader.setCompressed(false);
DataWriter out = new DataWriter(outSocket);
out.writeStruct(tmpHeader);
in.setSwap(true);
uncompressData(in, out);
}
private static void uncompressData(DataReader in, DataWriter out) throws IOException {
boolean swap = in.isSwap();
in.setSwap(true);
byte[] lzmaProps = new byte[5];
in.readFully(lzmaProps);
long lzmaSize = in.readLong();
in.setSwap(swap);
LzmaDecoder dec = new LzmaDecoder();
if (!dec.setDecoderProperties(lzmaProps)) {
throw new IOException("Invalid LZMA props");
}
try (
InputStream is = new BufferedInputStream(in.getSocket().getInputStream());
OutputStream os = new BufferedOutputStream(out.getSocket().getOutputStream());
) {
if (!dec.code(is, os, lzmaSize)) {
throw new IOException("LZMA decoding error");
}
}
}
}
| Added properties file when extracting asset bundles | src/info/ata4/unity/assetbundle/AssetBundleUtils.java | Added properties file when extracting asset bundles |
|
Java | apache-2.0 | b34e752db6d8bad7d040d19be545e3cfc57f3dcd | 0 | dreis2211/spring-boot,philwebb/spring-boot,michael-simons/spring-boot,scottfrederick/spring-boot,mbenson/spring-boot,lburgazzoli/spring-boot,spring-projects/spring-boot,philwebb/spring-boot,aahlenst/spring-boot,vpavic/spring-boot,Buzzardo/spring-boot,aahlenst/spring-boot,ilayaperumalg/spring-boot,lburgazzoli/spring-boot,mbenson/spring-boot,tiarebalbi/spring-boot,wilkinsona/spring-boot,ilayaperumalg/spring-boot,donhuvy/spring-boot,htynkn/spring-boot,ilayaperumalg/spring-boot,dreis2211/spring-boot,joshiste/spring-boot,chrylis/spring-boot,hello2009chen/spring-boot,rweisleder/spring-boot,hello2009chen/spring-boot,vpavic/spring-boot,Buzzardo/spring-boot,wilkinsona/spring-boot,rweisleder/spring-boot,aahlenst/spring-boot,spring-projects/spring-boot,philwebb/spring-boot,tiarebalbi/spring-boot,wilkinsona/spring-boot,scottfrederick/spring-boot,dreis2211/spring-boot,aahlenst/spring-boot,Buzzardo/spring-boot,jxblum/spring-boot,ilayaperumalg/spring-boot,yangdd1205/spring-boot,shakuzen/spring-boot,kdvolder/spring-boot,shakuzen/spring-boot,rweisleder/spring-boot,shakuzen/spring-boot,jxblum/spring-boot,royclarkson/spring-boot,chrylis/spring-boot,NetoDevel/spring-boot,mdeinum/spring-boot,wilkinsona/spring-boot,spring-projects/spring-boot,eddumelendez/spring-boot,mdeinum/spring-boot,donhuvy/spring-boot,htynkn/spring-boot,kdvolder/spring-boot,joshiste/spring-boot,chrylis/spring-boot,yangdd1205/spring-boot,mbenson/spring-boot,michael-simons/spring-boot,mdeinum/spring-boot,NetoDevel/spring-boot,jxblum/spring-boot,michael-simons/spring-boot,jxblum/spring-boot,joshiste/spring-boot,kdvolder/spring-boot,rweisleder/spring-boot,vpavic/spring-boot,lburgazzoli/spring-boot,htynkn/spring-boot,dreis2211/spring-boot,shakuzen/spring-boot,rweisleder/spring-boot,mdeinum/spring-boot,Buzzardo/spring-boot,NetoDevel/spring-boot,donhuvy/spring-boot,philwebb/spring-boot,htynkn/spring-boot,chrylis/spring-boot,ilayaperumalg/spring-boot,shakuzen/spring-boot,htynkn/spring-boot,scottfrederick/spring-boot,donhuvy/spring-boot,hello2009chen/spring-boot,tiarebalbi/spring-boot,royclarkson/spring-boot,donhuvy/spring-boot,jxblum/spring-boot,hello2009chen/spring-boot,aahlenst/spring-boot,jxblum/spring-boot,kdvolder/spring-boot,eddumelendez/spring-boot,chrylis/spring-boot,joshiste/spring-boot,vpavic/spring-boot,dreis2211/spring-boot,scottfrederick/spring-boot,eddumelendez/spring-boot,tiarebalbi/spring-boot,htynkn/spring-boot,eddumelendez/spring-boot,aahlenst/spring-boot,mbenson/spring-boot,kdvolder/spring-boot,spring-projects/spring-boot,scottfrederick/spring-boot,royclarkson/spring-boot,mbenson/spring-boot,rweisleder/spring-boot,joshiste/spring-boot,hello2009chen/spring-boot,lburgazzoli/spring-boot,kdvolder/spring-boot,wilkinsona/spring-boot,michael-simons/spring-boot,ilayaperumalg/spring-boot,vpavic/spring-boot,michael-simons/spring-boot,tiarebalbi/spring-boot,mdeinum/spring-boot,donhuvy/spring-boot,mdeinum/spring-boot,royclarkson/spring-boot,spring-projects/spring-boot,dreis2211/spring-boot,philwebb/spring-boot,tiarebalbi/spring-boot,NetoDevel/spring-boot,spring-projects/spring-boot,eddumelendez/spring-boot,joshiste/spring-boot,Buzzardo/spring-boot,mbenson/spring-boot,chrylis/spring-boot,yangdd1205/spring-boot,philwebb/spring-boot,wilkinsona/spring-boot,scottfrederick/spring-boot,eddumelendez/spring-boot,michael-simons/spring-boot,NetoDevel/spring-boot,lburgazzoli/spring-boot,royclarkson/spring-boot,Buzzardo/spring-boot,vpavic/spring-boot,shakuzen/spring-boot | /*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.web.client;
import java.time.Duration;
import java.util.Collections;
import java.util.Set;
import java.util.function.Supplier;
import org.apache.http.client.config.RequestConfig;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.springframework.http.client.BufferingClientHttpRequestFactory;
import org.springframework.http.client.ClientHttpRequestFactory;
import org.springframework.http.client.ClientHttpRequestInterceptor;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
import org.springframework.http.client.InterceptingClientHttpRequestFactory;
import org.springframework.http.client.OkHttp3ClientHttpRequestFactory;
import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.http.client.support.BasicAuthenticationInterceptor;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.http.converter.ResourceHttpMessageConverter;
import org.springframework.http.converter.StringHttpMessageConverter;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.test.web.client.MockRestServiceServer;
import org.springframework.web.client.ResponseErrorHandler;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.util.UriTemplateHandler;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.springframework.test.web.client.match.MockRestRequestMatchers.requestTo;
import static org.springframework.test.web.client.response.MockRestResponseCreators.withSuccess;
/**
* Tests for {@link RestTemplateBuilder}.
*
* @author Stephane Nicoll
* @author Phillip Webb
* @author Andy Wilkinson
* @author Dmytro Nosan
*/
public class RestTemplateBuilderTests {
private RestTemplateBuilder builder = new RestTemplateBuilder();
@Mock
private HttpMessageConverter<Object> messageConverter;
@Mock
private ClientHttpRequestInterceptor interceptor;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
}
@Test
public void createWhenCustomizersAreNullShouldThrowException() {
RestTemplateCustomizer[] customizers = null;
assertThatIllegalArgumentException()
.isThrownBy(() -> new RestTemplateBuilder(customizers))
.withMessageContaining("Customizers must not be null");
}
@Test
public void createWithCustomizersShouldApplyCustomizers() {
RestTemplateCustomizer customizer = mock(RestTemplateCustomizer.class);
RestTemplate template = new RestTemplateBuilder(customizer).build();
verify(customizer).customize(template);
}
@Test
public void buildShouldDetectRequestFactory() {
RestTemplate restTemplate = this.builder.build();
assertThat(restTemplate.getRequestFactory())
.isInstanceOf(HttpComponentsClientHttpRequestFactory.class);
}
@Test
public void detectRequestFactoryWhenFalseShouldDisableDetection() {
RestTemplate restTemplate = this.builder.detectRequestFactory(false).build();
assertThat(restTemplate.getRequestFactory())
.isInstanceOf(SimpleClientHttpRequestFactory.class);
}
@Test
public void rootUriShouldApply() {
RestTemplate restTemplate = this.builder.rootUri("http://example.com").build();
MockRestServiceServer server = MockRestServiceServer.bindTo(restTemplate).build();
server.expect(requestTo("http://example.com/hello")).andRespond(withSuccess());
restTemplate.getForEntity("/hello", String.class);
server.verify();
}
@Test
public void rootUriShouldApplyAfterUriTemplateHandler() {
UriTemplateHandler uriTemplateHandler = mock(UriTemplateHandler.class);
RestTemplate template = this.builder.uriTemplateHandler(uriTemplateHandler)
.rootUri("http://example.com").build();
UriTemplateHandler handler = template.getUriTemplateHandler();
handler.expand("/hello");
assertThat(handler).isInstanceOf(RootUriTemplateHandler.class);
verify(uriTemplateHandler).expand("http://example.com/hello");
}
@Test
public void messageConvertersWhenConvertersAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.messageConverters((HttpMessageConverter<?>[]) null))
.withMessageContaining("MessageConverters must not be null");
}
@Test
public void messageConvertersCollectionWhenConvertersAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.messageConverters((Set<HttpMessageConverter<?>>) null))
.withMessageContaining("MessageConverters must not be null");
}
@Test
public void messageConvertersShouldApply() {
RestTemplate template = this.builder.messageConverters(this.messageConverter)
.build();
assertThat(template.getMessageConverters()).containsOnly(this.messageConverter);
}
@Test
public void messageConvertersShouldReplaceExisting() {
RestTemplate template = this.builder
.messageConverters(new ResourceHttpMessageConverter())
.messageConverters(Collections.singleton(this.messageConverter)).build();
assertThat(template.getMessageConverters()).containsOnly(this.messageConverter);
}
@Test
public void additionalMessageConvertersWhenConvertersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder
.additionalMessageConverters((HttpMessageConverter<?>[]) null))
.withMessageContaining("MessageConverters must not be null");
}
@Test
public void additionalMessageConvertersCollectionWhenConvertersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder
.additionalMessageConverters((Set<HttpMessageConverter<?>>) null))
.withMessageContaining("MessageConverters must not be null");
}
@Test
public void additionalMessageConvertersShouldAddToExisting() {
HttpMessageConverter<?> resourceConverter = new ResourceHttpMessageConverter();
RestTemplate template = this.builder.messageConverters(resourceConverter)
.additionalMessageConverters(this.messageConverter).build();
assertThat(template.getMessageConverters()).containsOnly(resourceConverter,
this.messageConverter);
}
@Test
public void defaultMessageConvertersShouldSetDefaultList() {
RestTemplate template = new RestTemplate(
Collections.singletonList(new StringHttpMessageConverter()));
this.builder.defaultMessageConverters().configure(template);
assertThat(template.getMessageConverters())
.hasSameSizeAs(new RestTemplate().getMessageConverters());
}
@Test
public void defaultMessageConvertersShouldClearExisting() {
RestTemplate template = new RestTemplate(
Collections.singletonList(new StringHttpMessageConverter()));
this.builder.additionalMessageConverters(this.messageConverter)
.defaultMessageConverters().configure(template);
assertThat(template.getMessageConverters())
.hasSameSizeAs(new RestTemplate().getMessageConverters());
}
@Test
public void interceptorsWhenInterceptorsAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.interceptors((ClientHttpRequestInterceptor[]) null))
.withMessageContaining("interceptors must not be null");
}
@Test
public void interceptorsCollectionWhenInterceptorsAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.interceptors((Set<ClientHttpRequestInterceptor>) null))
.withMessageContaining("interceptors must not be null");
}
@Test
public void interceptorsShouldApply() {
RestTemplate template = this.builder.interceptors(this.interceptor).build();
assertThat(template.getInterceptors()).containsOnly(this.interceptor);
}
@Test
public void interceptorsShouldReplaceExisting() {
RestTemplate template = this.builder
.interceptors(mock(ClientHttpRequestInterceptor.class))
.interceptors(Collections.singleton(this.interceptor)).build();
assertThat(template.getInterceptors()).containsOnly(this.interceptor);
}
@Test
public void additionalInterceptorsWhenInterceptorsAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder
.additionalInterceptors((ClientHttpRequestInterceptor[]) null))
.withMessageContaining("interceptors must not be null");
}
@Test
public void additionalInterceptorsCollectionWhenInterceptorsAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder
.additionalInterceptors((Set<ClientHttpRequestInterceptor>) null))
.withMessageContaining("interceptors must not be null");
}
@Test
public void additionalInterceptorsShouldAddToExisting() {
ClientHttpRequestInterceptor interceptor = mock(
ClientHttpRequestInterceptor.class);
RestTemplate template = this.builder.interceptors(interceptor)
.additionalInterceptors(this.interceptor).build();
assertThat(template.getInterceptors()).containsOnly(interceptor,
this.interceptor);
}
@Test
public void requestFactoryClassWhenFactoryIsNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.requestFactory((Class<ClientHttpRequestFactory>) null))
.withMessageContaining("RequestFactory must not be null");
}
@Test
public void requestFactoryClassShouldApply() {
RestTemplate template = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class).build();
assertThat(template.getRequestFactory())
.isInstanceOf(SimpleClientHttpRequestFactory.class);
}
@Test
public void requestFactoryPackagePrivateClassShouldApply() {
RestTemplate template = this.builder
.requestFactory(TestClientHttpRequestFactory.class).build();
assertThat(template.getRequestFactory())
.isInstanceOf(TestClientHttpRequestFactory.class);
}
@Test
public void requestFactoryWhenSupplierIsNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder
.requestFactory((Supplier<ClientHttpRequestFactory>) null))
.withMessageContaining("RequestFactory Supplier must not be null");
}
@Test
public void requestFactoryShouldApply() {
ClientHttpRequestFactory requestFactory = mock(ClientHttpRequestFactory.class);
RestTemplate template = this.builder.requestFactory(() -> requestFactory).build();
assertThat(template.getRequestFactory()).isSameAs(requestFactory);
}
@Test
public void uriTemplateHandlerWhenHandlerIsNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.uriTemplateHandler(null))
.withMessageContaining("UriTemplateHandler must not be null");
}
@Test
public void uriTemplateHandlerShouldApply() {
UriTemplateHandler uriTemplateHandler = mock(UriTemplateHandler.class);
RestTemplate template = this.builder.uriTemplateHandler(uriTemplateHandler)
.build();
assertThat(template.getUriTemplateHandler()).isSameAs(uriTemplateHandler);
}
@Test
public void errorHandlerWhenHandlerIsNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.errorHandler(null))
.withMessageContaining("ErrorHandler must not be null");
}
@Test
public void errorHandlerShouldApply() {
ResponseErrorHandler errorHandler = mock(ResponseErrorHandler.class);
RestTemplate template = this.builder.errorHandler(errorHandler).build();
assertThat(template.getErrorHandler()).isSameAs(errorHandler);
}
@Test
public void basicAuthenticationShouldApply() {
RestTemplate template = this.builder.basicAuthentication("spring", "boot")
.build();
ClientHttpRequestInterceptor interceptor = template.getInterceptors().get(0);
assertThat(interceptor).isInstanceOf(BasicAuthenticationInterceptor.class);
assertThat(interceptor).extracting("username").containsExactly("spring");
assertThat(interceptor).extracting("password").containsExactly("boot");
}
@Test
@Deprecated
public void basicAuthorizationShouldApply() {
RestTemplate template = this.builder.basicAuthorization("spring", "boot").build();
ClientHttpRequestInterceptor interceptor = template.getInterceptors().get(0);
assertThat(interceptor).isInstanceOf(BasicAuthenticationInterceptor.class);
assertThat(interceptor).extracting("username").containsExactly("spring");
assertThat(interceptor).extracting("password").containsExactly("boot");
}
@Test
public void customizersWhenCustomizersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(
() -> this.builder.customizers((RestTemplateCustomizer[]) null))
.withMessageContaining("RestTemplateCustomizers must not be null");
}
@Test
public void customizersCollectionWhenCustomizersAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.customizers((Set<RestTemplateCustomizer>) null))
.withMessageContaining("RestTemplateCustomizers must not be null");
}
@Test
public void customizersShouldApply() {
RestTemplateCustomizer customizer = mock(RestTemplateCustomizer.class);
RestTemplate template = this.builder.customizers(customizer).build();
verify(customizer).customize(template);
}
@Test
public void customizersShouldBeAppliedLast() {
RestTemplate template = spy(new RestTemplate());
this.builder.additionalCustomizers((restTemplate) -> verify(restTemplate)
.setRequestFactory(any(ClientHttpRequestFactory.class)));
this.builder.configure(template);
}
@Test
public void customizersShouldReplaceExisting() {
RestTemplateCustomizer customizer1 = mock(RestTemplateCustomizer.class);
RestTemplateCustomizer customizer2 = mock(RestTemplateCustomizer.class);
RestTemplate template = this.builder.customizers(customizer1)
.customizers(Collections.singleton(customizer2)).build();
verifyZeroInteractions(customizer1);
verify(customizer2).customize(template);
}
@Test
public void additionalCustomizersWhenCustomizersAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.additionalCustomizers((RestTemplateCustomizer[]) null))
.withMessageContaining("RestTemplateCustomizers must not be null");
}
@Test
public void additionalCustomizersCollectionWhenCustomizersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder
.additionalCustomizers((Set<RestTemplateCustomizer>) null))
.withMessageContaining("RestTemplateCustomizers must not be null");
}
@Test
public void additionalCustomizersShouldAddToExisting() {
RestTemplateCustomizer customizer1 = mock(RestTemplateCustomizer.class);
RestTemplateCustomizer customizer2 = mock(RestTemplateCustomizer.class);
RestTemplate template = this.builder.customizers(customizer1)
.additionalCustomizers(customizer2).build();
InOrder inOrder = inOrder(customizer1, customizer2);
inOrder.verify(customizer1).customize(template);
inOrder.verify(customizer2).customize(template);
}
@Test
public void customizerShouldBeAppliedAtTheEnd() {
ResponseErrorHandler errorHandler = mock(ResponseErrorHandler.class);
ClientHttpRequestFactory requestFactory = new HttpComponentsClientHttpRequestFactory();
this.builder.interceptors(this.interceptor)
.messageConverters(this.messageConverter).rootUri("http://localhost:8080")
.errorHandler(errorHandler).basicAuthentication("spring", "boot")
.requestFactory(() -> requestFactory).customizers((restTemplate) -> {
assertThat(restTemplate.getInterceptors()).hasSize(2)
.contains(this.interceptor).anyMatch(
(ic) -> ic instanceof BasicAuthenticationInterceptor);
assertThat(restTemplate.getMessageConverters())
.contains(this.messageConverter);
assertThat(restTemplate.getUriTemplateHandler())
.isInstanceOf(RootUriTemplateHandler.class);
assertThat(restTemplate.getErrorHandler()).isEqualTo(errorHandler);
ClientHttpRequestFactory actualRequestFactory = restTemplate
.getRequestFactory();
assertThat(actualRequestFactory)
.isInstanceOf(InterceptingClientHttpRequestFactory.class);
assertThat(actualRequestFactory).hasFieldOrPropertyWithValue(
"requestFactory", requestFactory);
}).build();
}
@Test
public void buildShouldReturnRestTemplate() {
RestTemplate template = this.builder.build();
assertThat(template.getClass()).isEqualTo(RestTemplate.class);
}
@Test
public void buildClassShouldReturnClassInstance() {
RestTemplateSubclass template = this.builder.build(RestTemplateSubclass.class);
assertThat(template.getClass()).isEqualTo(RestTemplateSubclass.class);
}
@Test
public void configureShouldApply() {
RestTemplate template = new RestTemplate();
this.builder.configure(template);
assertThat(template.getRequestFactory())
.isInstanceOf(HttpComponentsClientHttpRequestFactory.class);
}
@Test
public void connectTimeoutCanBeNullToUseDefault() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class)
.setConnectTimeout(null).build().getRequestFactory();
assertThat(requestFactory).hasFieldOrPropertyWithValue("connectTimeout", -1);
}
@Test
public void readTimeoutCanBeNullToUseDefault() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class).setReadTimeout(null)
.build().getRequestFactory();
assertThat(requestFactory).hasFieldOrPropertyWithValue("readTimeout", -1);
}
@Test
public void connectTimeoutCanBeConfiguredOnHttpComponentsRequestFactory() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(HttpComponentsClientHttpRequestFactory.class)
.setConnectTimeout(Duration.ofMillis(1234)).build().getRequestFactory();
assertThat(((RequestConfig) ReflectionTestUtils.getField(requestFactory,
"requestConfig")).getConnectTimeout()).isEqualTo(1234);
}
@Test
public void readTimeoutCanBeConfiguredOnHttpComponentsRequestFactory() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(HttpComponentsClientHttpRequestFactory.class)
.setReadTimeout(Duration.ofMillis(1234)).build().getRequestFactory();
assertThat(((RequestConfig) ReflectionTestUtils.getField(requestFactory,
"requestConfig")).getSocketTimeout()).isEqualTo(1234);
}
@Test
public void connectTimeoutCanBeConfiguredOnSimpleRequestFactory() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class)
.setConnectTimeout(Duration.ofMillis(1234)).build().getRequestFactory();
assertThat(requestFactory).hasFieldOrPropertyWithValue("connectTimeout", 1234);
}
@Test
public void readTimeoutCanBeConfiguredOnSimpleRequestFactory() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class)
.setReadTimeout(Duration.ofMillis(1234)).build().getRequestFactory();
assertThat(requestFactory).hasFieldOrPropertyWithValue("readTimeout", 1234);
}
@Test
public void connectTimeoutCanBeConfiguredOnOkHttp3RequestFactory() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(OkHttp3ClientHttpRequestFactory.class)
.setConnectTimeout(Duration.ofMillis(1234)).build().getRequestFactory();
assertThat(ReflectionTestUtils.getField(
ReflectionTestUtils.getField(requestFactory, "client"), "connectTimeout"))
.isEqualTo(1234);
}
@Test
public void readTimeoutCanBeConfiguredOnOkHttp3RequestFactory() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(OkHttp3ClientHttpRequestFactory.class)
.setReadTimeout(Duration.ofMillis(1234)).build().getRequestFactory();
assertThat(ReflectionTestUtils.getField(
ReflectionTestUtils.getField(requestFactory, "client"), "readTimeout"))
.isEqualTo(1234);
}
@Test
public void connectTimeoutCanBeConfiguredOnAWrappedRequestFactory() {
SimpleClientHttpRequestFactory requestFactory = new SimpleClientHttpRequestFactory();
this.builder
.requestFactory(
() -> new BufferingClientHttpRequestFactory(requestFactory))
.setConnectTimeout(Duration.ofMillis(1234)).build();
assertThat(requestFactory).hasFieldOrPropertyWithValue("connectTimeout", 1234);
}
@Test
public void readTimeoutCanBeConfiguredOnAWrappedRequestFactory() {
SimpleClientHttpRequestFactory requestFactory = new SimpleClientHttpRequestFactory();
this.builder
.requestFactory(
() -> new BufferingClientHttpRequestFactory(requestFactory))
.setReadTimeout(Duration.ofMillis(1234)).build();
assertThat(requestFactory).hasFieldOrPropertyWithValue("readTimeout", 1234);
}
@Test
public void unwrappingDoesNotAffectRequestFactoryThatIsSetOnTheBuiltTemplate() {
SimpleClientHttpRequestFactory requestFactory = new SimpleClientHttpRequestFactory();
RestTemplate template = this.builder
.requestFactory(
() -> new BufferingClientHttpRequestFactory(requestFactory))
.build();
assertThat(template.getRequestFactory())
.isInstanceOf(BufferingClientHttpRequestFactory.class);
}
@Test
@SuppressWarnings("deprecation")
public void connectTimeoutCanBeSetWithInteger() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class)
.setConnectTimeout(1234).build().getRequestFactory();
assertThat(requestFactory).hasFieldOrPropertyWithValue("connectTimeout", 1234);
}
@Test
@SuppressWarnings("deprecation")
public void readTimeoutCanBeSetWithInteger() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class).setReadTimeout(1234)
.build().getRequestFactory();
assertThat(requestFactory).hasFieldOrPropertyWithValue("readTimeout", 1234);
}
public static class RestTemplateSubclass extends RestTemplate {
}
static class TestClientHttpRequestFactory extends SimpleClientHttpRequestFactory {
}
}
| spring-boot-project/spring-boot/src/test/java/org/springframework/boot/web/client/RestTemplateBuilderTests.java | /*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.web.client;
import java.time.Duration;
import java.util.Collections;
import java.util.Set;
import java.util.function.Supplier;
import org.apache.http.client.config.RequestConfig;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.springframework.http.client.BufferingClientHttpRequestFactory;
import org.springframework.http.client.ClientHttpRequestFactory;
import org.springframework.http.client.ClientHttpRequestInterceptor;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
import org.springframework.http.client.InterceptingClientHttpRequestFactory;
import org.springframework.http.client.OkHttp3ClientHttpRequestFactory;
import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.http.client.support.BasicAuthenticationInterceptor;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.http.converter.ResourceHttpMessageConverter;
import org.springframework.http.converter.StringHttpMessageConverter;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.test.web.client.MockRestServiceServer;
import org.springframework.web.client.ResponseErrorHandler;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.util.UriTemplateHandler;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.springframework.test.web.client.match.MockRestRequestMatchers.requestTo;
import static org.springframework.test.web.client.response.MockRestResponseCreators.withSuccess;
/**
* Tests for {@link RestTemplateBuilder}.
*
* @author Stephane Nicoll
* @author Phillip Webb
* @author Andy Wilkinson
* @author Dmytro Nosan
*/
public class RestTemplateBuilderTests {
private RestTemplateBuilder builder = new RestTemplateBuilder();
@Mock
private HttpMessageConverter<Object> messageConverter;
@Mock
private ClientHttpRequestInterceptor interceptor;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
}
@Test
public void createWhenCustomizersAreNullShouldThrowException() {
RestTemplateCustomizer[] customizers = null;
assertThatIllegalArgumentException()
.isThrownBy(() -> new RestTemplateBuilder(customizers))
.withMessageContaining("Customizers must not be null");
}
@Test
public void createWithCustomizersShouldApplyCustomizers() {
RestTemplateCustomizer customizer = mock(RestTemplateCustomizer.class);
RestTemplate template = new RestTemplateBuilder(customizer).build();
verify(customizer).customize(template);
}
@Test
public void buildShouldDetectRequestFactory() {
RestTemplate restTemplate = this.builder.build();
assertThat(restTemplate.getRequestFactory())
.isInstanceOf(HttpComponentsClientHttpRequestFactory.class);
}
@Test
public void detectRequestFactoryWhenFalseShouldDisableDetection() {
RestTemplate restTemplate = this.builder.detectRequestFactory(false).build();
assertThat(restTemplate.getRequestFactory())
.isInstanceOf(SimpleClientHttpRequestFactory.class);
}
@Test
public void rootUriShouldApply() {
RestTemplate restTemplate = this.builder.rootUri("http://example.com").build();
MockRestServiceServer server = MockRestServiceServer.bindTo(restTemplate).build();
server.expect(requestTo("http://example.com/hello")).andRespond(withSuccess());
restTemplate.getForEntity("/hello", String.class);
server.verify();
}
@Test
public void rootUriShouldApplyAfterUriTemplateHandler() {
UriTemplateHandler uriTemplateHandler = mock(UriTemplateHandler.class);
RestTemplate template = this.builder.uriTemplateHandler(uriTemplateHandler)
.rootUri("http://example.com").build();
UriTemplateHandler handler = template.getUriTemplateHandler();
handler.expand("/hello");
assertThat(handler).isInstanceOf(RootUriTemplateHandler.class);
verify(uriTemplateHandler).expand("http://example.com/hello");
}
@Test
public void messageConvertersWhenConvertersAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.messageConverters((HttpMessageConverter<?>[]) null))
.withMessageContaining("MessageConverters must not be null");
}
@Test
public void messageConvertersCollectionWhenConvertersAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.messageConverters((Set<HttpMessageConverter<?>>) null))
.withMessageContaining("MessageConverters must not be null");
}
@Test
public void messageConvertersShouldApply() {
RestTemplate template = this.builder.messageConverters(this.messageConverter)
.build();
assertThat(template.getMessageConverters()).containsOnly(this.messageConverter);
}
@Test
public void messageConvertersShouldReplaceExisting() {
RestTemplate template = this.builder
.messageConverters(new ResourceHttpMessageConverter())
.messageConverters(Collections.singleton(this.messageConverter)).build();
assertThat(template.getMessageConverters()).containsOnly(this.messageConverter);
}
@Test
public void additionalMessageConvertersWhenConvertersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder
.additionalMessageConverters((HttpMessageConverter<?>[]) null))
.withMessageContaining("MessageConverters must not be null");
}
@Test
public void additionalMessageConvertersCollectionWhenConvertersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder
.additionalMessageConverters((Set<HttpMessageConverter<?>>) null))
.withMessageContaining("MessageConverters must not be null");
}
@Test
public void additionalMessageConvertersShouldAddToExisting() {
HttpMessageConverter<?> resourceConverter = new ResourceHttpMessageConverter();
RestTemplate template = this.builder.messageConverters(resourceConverter)
.additionalMessageConverters(this.messageConverter).build();
assertThat(template.getMessageConverters()).containsOnly(resourceConverter,
this.messageConverter);
}
@Test
public void defaultMessageConvertersShouldSetDefaultList() {
RestTemplate template = new RestTemplate(
Collections.singletonList(new StringHttpMessageConverter()));
this.builder.defaultMessageConverters().configure(template);
assertThat(template.getMessageConverters())
.hasSameSizeAs(new RestTemplate().getMessageConverters());
}
@Test
public void defaultMessageConvertersShouldClearExisting() {
RestTemplate template = new RestTemplate(
Collections.singletonList(new StringHttpMessageConverter()));
this.builder.additionalMessageConverters(this.messageConverter)
.defaultMessageConverters().configure(template);
assertThat(template.getMessageConverters())
.hasSameSizeAs(new RestTemplate().getMessageConverters());
}
@Test
public void interceptorsWhenInterceptorsAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.interceptors((ClientHttpRequestInterceptor[]) null))
.withMessageContaining("interceptors must not be null");
}
@Test
public void interceptorsCollectionWhenInterceptorsAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.interceptors((Set<ClientHttpRequestInterceptor>) null))
.withMessageContaining("interceptors must not be null");
}
@Test
public void interceptorsShouldApply() {
RestTemplate template = this.builder.interceptors(this.interceptor).build();
assertThat(template.getInterceptors()).containsOnly(this.interceptor);
}
@Test
public void interceptorsShouldReplaceExisting() {
RestTemplate template = this.builder
.interceptors(mock(ClientHttpRequestInterceptor.class))
.interceptors(Collections.singleton(this.interceptor)).build();
assertThat(template.getInterceptors()).containsOnly(this.interceptor);
}
@Test
public void additionalInterceptorsWhenInterceptorsAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder
.additionalInterceptors((ClientHttpRequestInterceptor[]) null))
.withMessageContaining("interceptors must not be null");
}
@Test
public void additionalInterceptorsCollectionWhenInterceptorsAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder
.additionalInterceptors((Set<ClientHttpRequestInterceptor>) null))
.withMessageContaining("interceptors must not be null");
}
@Test
public void additionalInterceptorsShouldAddToExisting() {
ClientHttpRequestInterceptor interceptor = mock(
ClientHttpRequestInterceptor.class);
RestTemplate template = this.builder.interceptors(interceptor)
.additionalInterceptors(this.interceptor).build();
assertThat(template.getInterceptors()).containsOnly(interceptor,
this.interceptor);
}
@Test
public void requestFactoryClassWhenFactoryIsNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.requestFactory((Class<ClientHttpRequestFactory>) null))
.withMessageContaining("RequestFactory must not be null");
}
@Test
public void requestFactoryClassShouldApply() {
RestTemplate template = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class).build();
assertThat(template.getRequestFactory())
.isInstanceOf(SimpleClientHttpRequestFactory.class);
}
@Test
public void requestFactoryPackagePrivateClassShouldApply() {
RestTemplate template = this.builder
.requestFactory(TestClientHttpRequestFactory.class).build();
assertThat(template.getRequestFactory())
.isInstanceOf(TestClientHttpRequestFactory.class);
}
@Test
public void requestFactoryWhenSupplierIsNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder
.requestFactory((Supplier<ClientHttpRequestFactory>) null))
.withMessageContaining("RequestFactory Supplier must not be null");
}
@Test
public void requestFactoryShouldApply() {
ClientHttpRequestFactory requestFactory = mock(ClientHttpRequestFactory.class);
RestTemplate template = this.builder.requestFactory(() -> requestFactory).build();
assertThat(template.getRequestFactory()).isSameAs(requestFactory);
}
@Test
public void uriTemplateHandlerWhenHandlerIsNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.uriTemplateHandler(null))
.withMessageContaining("UriTemplateHandler must not be null");
}
@Test
public void uriTemplateHandlerShouldApply() {
UriTemplateHandler uriTemplateHandler = mock(UriTemplateHandler.class);
RestTemplate template = this.builder.uriTemplateHandler(uriTemplateHandler)
.build();
assertThat(template.getUriTemplateHandler()).isSameAs(uriTemplateHandler);
}
@Test
public void errorHandlerWhenHandlerIsNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder.errorHandler(null))
.withMessageContaining("ErrorHandler must not be null");
}
@Test
public void errorHandlerShouldApply() {
ResponseErrorHandler errorHandler = mock(ResponseErrorHandler.class);
RestTemplate template = this.builder.errorHandler(errorHandler).build();
assertThat(template.getErrorHandler()).isSameAs(errorHandler);
}
@Test
public void basicAuthenticationShouldApply() {
RestTemplate template = this.builder.basicAuthentication("spring", "boot")
.build();
ClientHttpRequestInterceptor interceptor = template.getInterceptors().get(0);
assertThat(interceptor).isInstanceOf(BasicAuthenticationInterceptor.class);
assertThat(interceptor).extracting("username").containsExactly("spring");
assertThat(interceptor).extracting("password").containsExactly("boot");
}
@Test
@Deprecated
public void basicAuthorizationShouldApply() {
RestTemplate template = this.builder.basicAuthorization("spring", "boot").build();
ClientHttpRequestInterceptor interceptor = template.getInterceptors().get(0);
assertThat(interceptor).isInstanceOf(BasicAuthenticationInterceptor.class);
assertThat(interceptor).extracting("username").containsExactly("spring");
assertThat(interceptor).extracting("password").containsExactly("boot");
}
@Test
public void customizersWhenCustomizersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(
() -> this.builder.customizers((RestTemplateCustomizer[]) null))
.withMessageContaining("RestTemplateCustomizers must not be null");
}
@Test
public void customizersCollectionWhenCustomizersAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.customizers((Set<RestTemplateCustomizer>) null))
.withMessageContaining("RestTemplateCustomizers must not be null");
}
@Test
public void customizersShouldApply() {
RestTemplateCustomizer customizer = mock(RestTemplateCustomizer.class);
RestTemplate template = this.builder.customizers(customizer).build();
verify(customizer).customize(template);
}
@Test
public void customizersShouldBeAppliedLast() {
RestTemplate template = spy(new RestTemplate());
this.builder.additionalCustomizers((restTemplate) -> verify(restTemplate)
.setRequestFactory(any(ClientHttpRequestFactory.class)));
this.builder.configure(template);
}
@Test
public void customizersShouldReplaceExisting() {
RestTemplateCustomizer customizer1 = mock(RestTemplateCustomizer.class);
RestTemplateCustomizer customizer2 = mock(RestTemplateCustomizer.class);
RestTemplate template = this.builder.customizers(customizer1)
.customizers(Collections.singleton(customizer2)).build();
verifyZeroInteractions(customizer1);
verify(customizer2).customize(template);
}
@Test
public void additionalCustomizersWhenCustomizersAreNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(
() -> this.builder.additionalCustomizers((RestTemplateCustomizer[]) null))
.withMessageContaining("RestTemplateCustomizers must not be null");
}
@Test
public void additionalCustomizersCollectionWhenCustomizersAreNullShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> this.builder
.additionalCustomizers((Set<RestTemplateCustomizer>) null))
.withMessageContaining("RestTemplateCustomizers must not be null");
}
@Test
public void additionalCustomizersShouldAddToExisting() {
RestTemplateCustomizer customizer1 = mock(RestTemplateCustomizer.class);
RestTemplateCustomizer customizer2 = mock(RestTemplateCustomizer.class);
RestTemplate template = this.builder.customizers(customizer1)
.additionalCustomizers(customizer2).build();
verify(customizer1).customize(template);
verify(customizer2).customize(template);
}
@Test
public void customizerShouldBeAppliedAtTheEnd() {
ResponseErrorHandler errorHandler = mock(ResponseErrorHandler.class);
ClientHttpRequestFactory requestFactory = new HttpComponentsClientHttpRequestFactory();
this.builder.interceptors(this.interceptor)
.messageConverters(this.messageConverter).rootUri("http://localhost:8080")
.errorHandler(errorHandler).basicAuthentication("spring", "boot")
.requestFactory(() -> requestFactory).customizers((restTemplate) -> {
assertThat(restTemplate.getInterceptors()).hasSize(2)
.contains(this.interceptor).anyMatch(
(ic) -> ic instanceof BasicAuthenticationInterceptor);
assertThat(restTemplate.getMessageConverters())
.contains(this.messageConverter);
assertThat(restTemplate.getUriTemplateHandler())
.isInstanceOf(RootUriTemplateHandler.class);
assertThat(restTemplate.getErrorHandler()).isEqualTo(errorHandler);
ClientHttpRequestFactory actualRequestFactory = restTemplate
.getRequestFactory();
assertThat(actualRequestFactory)
.isInstanceOf(InterceptingClientHttpRequestFactory.class);
assertThat(actualRequestFactory).hasFieldOrPropertyWithValue(
"requestFactory", requestFactory);
}).build();
}
@Test
public void buildShouldReturnRestTemplate() {
RestTemplate template = this.builder.build();
assertThat(template.getClass()).isEqualTo(RestTemplate.class);
}
@Test
public void buildClassShouldReturnClassInstance() {
RestTemplateSubclass template = this.builder.build(RestTemplateSubclass.class);
assertThat(template.getClass()).isEqualTo(RestTemplateSubclass.class);
}
@Test
public void configureShouldApply() {
RestTemplate template = new RestTemplate();
this.builder.configure(template);
assertThat(template.getRequestFactory())
.isInstanceOf(HttpComponentsClientHttpRequestFactory.class);
}
@Test
public void connectTimeoutCanBeNullToUseDefault() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class)
.setConnectTimeout(null).build().getRequestFactory();
assertThat(requestFactory).hasFieldOrPropertyWithValue("connectTimeout", -1);
}
@Test
public void readTimeoutCanBeNullToUseDefault() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class).setReadTimeout(null)
.build().getRequestFactory();
assertThat(requestFactory).hasFieldOrPropertyWithValue("readTimeout", -1);
}
@Test
public void connectTimeoutCanBeConfiguredOnHttpComponentsRequestFactory() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(HttpComponentsClientHttpRequestFactory.class)
.setConnectTimeout(Duration.ofMillis(1234)).build().getRequestFactory();
assertThat(((RequestConfig) ReflectionTestUtils.getField(requestFactory,
"requestConfig")).getConnectTimeout()).isEqualTo(1234);
}
@Test
public void readTimeoutCanBeConfiguredOnHttpComponentsRequestFactory() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(HttpComponentsClientHttpRequestFactory.class)
.setReadTimeout(Duration.ofMillis(1234)).build().getRequestFactory();
assertThat(((RequestConfig) ReflectionTestUtils.getField(requestFactory,
"requestConfig")).getSocketTimeout()).isEqualTo(1234);
}
@Test
public void connectTimeoutCanBeConfiguredOnSimpleRequestFactory() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class)
.setConnectTimeout(Duration.ofMillis(1234)).build().getRequestFactory();
assertThat(requestFactory).hasFieldOrPropertyWithValue("connectTimeout", 1234);
}
@Test
public void readTimeoutCanBeConfiguredOnSimpleRequestFactory() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class)
.setReadTimeout(Duration.ofMillis(1234)).build().getRequestFactory();
assertThat(requestFactory).hasFieldOrPropertyWithValue("readTimeout", 1234);
}
@Test
public void connectTimeoutCanBeConfiguredOnOkHttp3RequestFactory() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(OkHttp3ClientHttpRequestFactory.class)
.setConnectTimeout(Duration.ofMillis(1234)).build().getRequestFactory();
assertThat(ReflectionTestUtils.getField(
ReflectionTestUtils.getField(requestFactory, "client"), "connectTimeout"))
.isEqualTo(1234);
}
@Test
public void readTimeoutCanBeConfiguredOnOkHttp3RequestFactory() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(OkHttp3ClientHttpRequestFactory.class)
.setReadTimeout(Duration.ofMillis(1234)).build().getRequestFactory();
assertThat(ReflectionTestUtils.getField(
ReflectionTestUtils.getField(requestFactory, "client"), "readTimeout"))
.isEqualTo(1234);
}
@Test
public void connectTimeoutCanBeConfiguredOnAWrappedRequestFactory() {
SimpleClientHttpRequestFactory requestFactory = new SimpleClientHttpRequestFactory();
this.builder
.requestFactory(
() -> new BufferingClientHttpRequestFactory(requestFactory))
.setConnectTimeout(Duration.ofMillis(1234)).build();
assertThat(requestFactory).hasFieldOrPropertyWithValue("connectTimeout", 1234);
}
@Test
public void readTimeoutCanBeConfiguredOnAWrappedRequestFactory() {
SimpleClientHttpRequestFactory requestFactory = new SimpleClientHttpRequestFactory();
this.builder
.requestFactory(
() -> new BufferingClientHttpRequestFactory(requestFactory))
.setReadTimeout(Duration.ofMillis(1234)).build();
assertThat(requestFactory).hasFieldOrPropertyWithValue("readTimeout", 1234);
}
@Test
public void unwrappingDoesNotAffectRequestFactoryThatIsSetOnTheBuiltTemplate() {
SimpleClientHttpRequestFactory requestFactory = new SimpleClientHttpRequestFactory();
RestTemplate template = this.builder
.requestFactory(
() -> new BufferingClientHttpRequestFactory(requestFactory))
.build();
assertThat(template.getRequestFactory())
.isInstanceOf(BufferingClientHttpRequestFactory.class);
}
@Test
@SuppressWarnings("deprecation")
public void connectTimeoutCanBeSetWithInteger() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class)
.setConnectTimeout(1234).build().getRequestFactory();
assertThat(requestFactory).hasFieldOrPropertyWithValue("connectTimeout", 1234);
}
@Test
@SuppressWarnings("deprecation")
public void readTimeoutCanBeSetWithInteger() {
ClientHttpRequestFactory requestFactory = this.builder
.requestFactory(SimpleClientHttpRequestFactory.class).setReadTimeout(1234)
.build().getRequestFactory();
assertThat(requestFactory).hasFieldOrPropertyWithValue("readTimeout", 1234);
}
public static class RestTemplateSubclass extends RestTemplate {
}
static class TestClientHttpRequestFactory extends SimpleClientHttpRequestFactory {
}
}
| Ensure checking customizer ordering
See gh-15603
| spring-boot-project/spring-boot/src/test/java/org/springframework/boot/web/client/RestTemplateBuilderTests.java | Ensure checking customizer ordering |
|
Java | apache-2.0 | e0cbfea9da7f7ed6c7c9d0eabc0ce7a6d37371bd | 0 | ederign/gwt-project-size,ederign/gwt-project-size | package me.ederign;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.commons.io.FileUtils;
public class GWTLineCounter {
private int numberOfLines;
private boolean ignoreCommentAndSpaces;
public int count( List<String> baseDirectories ) {
numberOfLines = 0;
for ( String baseDirectory : baseDirectories ) {
process( baseDirectory );
}
return numberOfLines;
}
public int countIgnoringCommentAndSpaces( List<String> baseDirectories ) {
ignoreCommentAndSpaces = true;
return count( baseDirectories );
}
private void process( String baseDirectory ) {
File baseDir = new File( baseDirectory );
System.out.println( "----------------------------------------" );
System.out.println( "Base Directory " + baseDirectory );
List<File> gwtProject = searchForGwtProjects( baseDir );
for ( File file : gwtProject ) {
File targetDir = prepareTargetDir( file );
if ( targetDir.isDirectory() ) {
System.out.println( "Processing " + targetDir );
Collection<File> gwtClientFiles = FileUtils.listFiles( targetDir, new String[]{ "java" }, true );
gwtClientFiles.forEach( lineCounter() );
}
}
}
private Consumer<File> lineCounter() {
return new Consumer<File>() {
@Override
public void accept( File file ) {
try {
List<String> lines = Files.readAllLines( file.toPath() );
if ( ignoreCommentAndSpaces ) {
List<String> cleanedLines = filterLines( lines );
numberOfLines += cleanedLines.size();
} else {
numberOfLines += lines.size();
}
} catch ( IOException e ) {
e.printStackTrace();
}
}
};
}
private List<String> filterLines( List<String> lines ) {
boolean inABlockComment = false;
List<String> cleanedLines = new ArrayList<>();
for ( String line : lines ) {
if ( !inABlockComment ) {
if ( !isABlockComment( line ) ) {
String parsedLine = parse(line);
if ( !isAEmptyLine( parsedLine ) ) {
if ( !isALineComment( parsedLine ) ) {
cleanedLines.add( parsedLine );
}
}
} else {
inABlockComment = true;
}
} else {
if ( endOfBlockComment( line ) ) {
inABlockComment = false;
}
}
}
return cleanedLines;
}
private boolean isAEmptyLine( String parsedLine ) {
return parsedLine.isEmpty();
}
private boolean endOfBlockComment( String line ) {
return line.contains( "*/" );
}
private boolean isABlockComment( String line ) {
return line.contains( "/*" );
}
private boolean isALineComment( String line ) {
return line.startsWith( "//" );
}
private String parse( String line ) {
return line.replaceAll( "\\s", "" );
}
private File prepareTargetDir( File baseDir ) {
String baseDirString = baseDir.getParent();
if ( baseDirString.contains( "resources" ) ) {
baseDirString = baseDirString.replace( "resources", "java" );
}
return new File( baseDirString );
}
private List<File> searchForGwtProjects( File targetDir ) {
Collection<File> gwtProjects = FileUtils.listFiles( targetDir,
new String[]{ "gwt.xml" }, true );
return gwtProjects.stream().filter( cleanupGWTProjectsDir() ).collect( Collectors.toList() );
}
private static Predicate<File> cleanupGWTProjectsDir() {
return f -> ( !f.getAbsolutePath().contains( "classes" ) &&
!f.getAbsolutePath().contains( "target" ) &&
!f.getAbsolutePath().contains( "Fast" ) &&
!f.getAbsolutePath().contains( "WEB-INF" ) );
}
}
| gwt-project-size/src/main/java/me/ederign/GWTLineCounter.java | package me.ederign;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.commons.io.FileUtils;
public class GWTLineCounter {
private int numberOfLines;
private boolean ignoreCommentAndSpaces;
public int count( List<String> baseDirectories ) {
numberOfLines = 0;
for ( String baseDirectory : baseDirectories ) {
process( baseDirectory );
}
return numberOfLines;
}
public int countIgnoringCommentAndSpaces( List<String> baseDirectories ) {
ignoreCommentAndSpaces = true;
return count( baseDirectories );
}
private void process( String baseDirectory ) {
File baseDir = new File( baseDirectory );
System.out.println( "----------------------------------------" );
System.out.println( "Base Directory " + baseDirectory );
List<File> gwtProject = searchForGwtProjects( baseDir );
for ( File file : gwtProject ) {
File targetDir = prepareTargetDir( file );
if ( targetDir.isDirectory() ) {
System.out.println( "Processing " + targetDir );
Collection<File> gwtClientFiles = FileUtils.listFiles( targetDir, new String[]{ "java" }, true );
gwtClientFiles.forEach( lineCounter() );
}
}
}
private Consumer<File> lineCounter() {
return new Consumer<File>() {
@Override
public void accept( File file ) {
try {
List<String> lines = Files.readAllLines( file.toPath() );
if ( ignoreCommentAndSpaces ) {
List<String> cleanedLines = filterLines( lines );
numberOfLines += cleanedLines.size();
} else {
numberOfLines += lines.size();
}
} catch ( IOException e ) {
e.printStackTrace();
}
}
};
}
private List<String> filterLines( List<String> lines ) {
boolean inABlockComment = false;
List<String> cleanedLines = new ArrayList<>();
for ( String line : lines ) {
if ( !inABlockComment ) {
if ( !isABlockComment( line ) ) {
String parsedLine = parse(line);
if ( !isAEmptyLine( parsedLine ) ) {
if ( !isALineComment( line ) ) {
cleanedLines.add( line );
}
}
} else {
inABlockComment = true;
}
} else {
if ( endOfBlockComment( line ) ) {
inABlockComment = false;
}
}
}
return cleanedLines;
}
private boolean isAEmptyLine( String parsedLine ) {
return parsedLine.isEmpty();
}
private boolean endOfBlockComment( String line ) {
return line.contains( "*/" );
}
private boolean isABlockComment( String line ) {
return line.contains( "/*" );
}
private boolean isALineComment( String line ) {
return line.indexOf( "//" )==0;
}
private String parse( String line ) {
return line.replaceAll( "\\s", "" );
}
private File prepareTargetDir( File baseDir ) {
String baseDirString = baseDir.getParent();
if ( baseDirString.contains( "resources" ) ) {
baseDirString = baseDirString.replace( "resources", "java" );
}
return new File( baseDirString );
}
private List<File> searchForGwtProjects( File targetDir ) {
Collection<File> gwtProjects = FileUtils.listFiles( targetDir,
new String[]{ "gwt.xml" }, true );
return gwtProjects.stream().filter( cleanupGWTProjectsDir() ).collect( Collectors.toList() );
}
private static Predicate<File> cleanupGWTProjectsDir() {
return f -> ( !f.getAbsolutePath().contains( "classes" ) &&
!f.getAbsolutePath().contains( "target" ) &&
!f.getAbsolutePath().contains( "Fast" ) &&
!f.getAbsolutePath().contains( "WEB-INF" ) );
}
}
| fixing
| gwt-project-size/src/main/java/me/ederign/GWTLineCounter.java | fixing |
|
Java | apache-2.0 | 15c0be8a461deb41486bd6ec35047d7eb38079eb | 0 | joansmith/camunda-bpm-platform,skjolber/camunda-bpm-platform,camunda/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,langfr/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,jangalinski/camunda-bpm-platform,hupda-edpe/c,langfr/camunda-bpm-platform,skjolber/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,plexiti/camunda-bpm-platform,filiphr/camunda-bpm-platform,falko/camunda-bpm-platform,skjolber/camunda-bpm-platform,bentrm/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,joansmith/camunda-bpm-platform,falko/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,plexiti/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,plexiti/camunda-bpm-platform,jangalinski/camunda-bpm-platform,plexiti/camunda-bpm-platform,xasx/camunda-bpm-platform,filiphr/camunda-bpm-platform,bentrm/camunda-bpm-platform,tcrossland/camunda-bpm-platform,langfr/camunda-bpm-platform,bentrm/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,plexiti/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,falko/camunda-bpm-platform,tcrossland/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,joansmith/camunda-bpm-platform,xasx/camunda-bpm-platform,jangalinski/camunda-bpm-platform,xasx/camunda-bpm-platform,hupda-edpe/c,filiphr/camunda-bpm-platform,hupda-edpe/c,hupda-edpe/c,ingorichtsmeier/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,tcrossland/camunda-bpm-platform,skjolber/camunda-bpm-platform,bentrm/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,skjolber/camunda-bpm-platform,filiphr/camunda-bpm-platform,langfr/camunda-bpm-platform,jangalinski/camunda-bpm-platform,camunda/camunda-bpm-platform,joansmith/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,xasx/camunda-bpm-platform,jangalinski/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,camunda/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,tcrossland/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,falko/camunda-bpm-platform,hupda-edpe/c,xasx/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,tcrossland/camunda-bpm-platform,camunda/camunda-bpm-platform,langfr/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,langfr/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,AlexMinsk/camunda-bpm-platform,falko/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,filiphr/camunda-bpm-platform,jangalinski/camunda-bpm-platform,plexiti/camunda-bpm-platform,joansmith/camunda-bpm-platform,camunda/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,bentrm/camunda-bpm-platform,hupda-edpe/c,ingorichtsmeier/camunda-bpm-platform,skjolber/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,filiphr/camunda-bpm-platform,xasx/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,tcrossland/camunda-bpm-platform,falko/camunda-bpm-platform,bentrm/camunda-bpm-platform,camunda/camunda-bpm-platform,joansmith/camunda-bpm-platform | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.integrationtest.functional.el;
import static org.junit.Assert.assertEquals;
import java.util.List;
import java.util.Map;
import org.camunda.bpm.dmn.engine.DmnDecisionResult;
import org.camunda.bpm.engine.runtime.ProcessInstance;
import org.camunda.bpm.engine.runtime.VariableInstance;
import org.camunda.bpm.engine.variable.Variables;
import org.camunda.bpm.integrationtest.functional.el.beans.GreeterBean;
import org.camunda.bpm.integrationtest.util.AbstractFoxPlatformIntegrationTest;
import org.camunda.bpm.integrationtest.util.TestContainer;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.OperateOnDeployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* @author Daniel Meyer
*
*/
@RunWith(Arquillian.class)
public class DecisionContextSwitchTest extends AbstractFoxPlatformIntegrationTest {
protected static final String DMN_RESOURCE_NAME = "org/camunda/bpm/integrationtest/functional/el/BeanResolvingDecision.dmn11.xml";
@Deployment(name="bpmnDeployment")
public static WebArchive createBpmnDeployment() {
return initWebArchiveDeployment("bpmn-deployment.war")
.addAsResource("org/camunda/bpm/integrationtest/functional/el/BusinessRuleProcess.bpmn20.xml");
}
@Deployment(name="dmnDeployment")
public static WebArchive createDmnDeployment() {
return initWebArchiveDeployment("dmn-deployment.war")
.addClass(GreeterBean.class)
.addAsResource(DMN_RESOURCE_NAME);
}
@Deployment(name="clientDeployment")
public static WebArchive clientDeployment() {
WebArchive webArchive = ShrinkWrap.create(WebArchive.class, "client.war")
.addClass(AbstractFoxPlatformIntegrationTest.class);
TestContainer.addContainerSpecificResources(webArchive);
return webArchive;
}
@Test
@OperateOnDeployment("clientDeployment")
public void shouldSwitchContextWhenUsingDecisionService() {
DmnDecisionResult decisionResult = decisionService.evaluateDecisionByKey("decision", Variables.createVariables());
assertEquals("ok", decisionResult.getFirstOutput().getFirstValue());
}
@Test
@SuppressWarnings("unchecked")
@OperateOnDeployment("clientDeployment")
public void shouldSwitchContextWhenCallingFromBpmn() {
ProcessInstance pi = runtimeService.startProcessInstanceByKey("testProcess");
VariableInstance decisionResult = runtimeService.createVariableInstanceQuery()
.processInstanceIdIn(pi.getId())
.variableName("result").singleResult();
List<Map<String, Object>> result = (List<Map<String, Object>>) decisionResult.getValue();
assertEquals("ok", result.get(0).get("result"));
}
@Test
@OperateOnDeployment("clientDeployment")
public void shouldSwitchContextWhenUsingDecisionServiceAfterRedeployment() {
// given
List<org.camunda.bpm.engine.repository.Deployment> deployments = repositoryService.createDeploymentQuery()
.list();
// find dmn deployment
org.camunda.bpm.engine.repository.Deployment dmnDeployment = null;
for (org.camunda.bpm.engine.repository.Deployment deployment : deployments) {
List<String> resourceNames = repositoryService.getDeploymentResourceNames(deployment.getId());
if(resourceNames.contains(DMN_RESOURCE_NAME)) {
dmnDeployment = deployment;
}
}
if(dmnDeployment == null) {
Assert.fail("Expected to find DMN deployment");
}
org.camunda.bpm.engine.repository.Deployment deployment2 = repositoryService
.createDeployment()
.nameFromDeployment(dmnDeployment.getId())
.addDeploymentResources(dmnDeployment.getId())
.deploy();
try {
// when then
DmnDecisionResult decisionResult = decisionService.evaluateDecisionByKey("decision", Variables.createVariables());
assertEquals("ok", decisionResult.getFirstOutput().getFirstValue());
}
finally {
repositoryService.deleteDeployment(deployment2.getId(), true);
}
}
@Test
@SuppressWarnings("unchecked")
@OperateOnDeployment("clientDeployment")
public void shouldSwitchContextWhenCallingFromBpmnAfterRedeployment() {
// given
List<org.camunda.bpm.engine.repository.Deployment> deployments = repositoryService.createDeploymentQuery()
.list();
// find dmn deployment
org.camunda.bpm.engine.repository.Deployment dmnDeployment = null;
for (org.camunda.bpm.engine.repository.Deployment deployment : deployments) {
List<String> resourceNames = repositoryService.getDeploymentResourceNames(deployment.getId());
if(resourceNames.contains(DMN_RESOURCE_NAME)) {
dmnDeployment = deployment;
}
}
if(dmnDeployment == null) {
Assert.fail("Expected to find DMN deployment");
}
org.camunda.bpm.engine.repository.Deployment deployment2 = repositoryService
.createDeployment()
.nameFromDeployment(dmnDeployment.getId())
.addDeploymentResources(dmnDeployment.getId())
.deploy();
try {
// when then
ProcessInstance pi = runtimeService.startProcessInstanceByKey("testProcess");
VariableInstance decisionResult = runtimeService.createVariableInstanceQuery()
.processInstanceIdIn(pi.getId())
.variableName("result")
.singleResult();
List<Map<String, Object>> result = (List<Map<String, Object>>) decisionResult.getValue();
assertEquals("ok", result.get(0).get("result"));
}
finally {
repositoryService.deleteDeployment(deployment2.getId(), true);
}
}
}
| qa/integration-tests-engine/src/test/java/org/camunda/bpm/integrationtest/functional/el/DecisionContextSwitchTest.java | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.integrationtest.functional.el;
import java.util.List;
import org.camunda.bpm.engine.variable.Variables;
import org.camunda.bpm.integrationtest.functional.el.beans.GreeterBean;
import org.camunda.bpm.integrationtest.util.AbstractFoxPlatformIntegrationTest;
import org.camunda.bpm.integrationtest.util.TestContainer;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.OperateOnDeployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* @author Daniel Meyer
*
*/
@RunWith(Arquillian.class)
public class DecisionContextSwitchTest extends AbstractFoxPlatformIntegrationTest {
protected static final String DMN_RESOURCE_NAME = "org/camunda/bpm/integrationtest/functional/el/BeanResolvingDecision.dmn11.xml";
@Deployment(name="bpmnDeployment")
public static WebArchive createBpmnDeplyoment() {
return initWebArchiveDeployment("bpmn-deployment.war")
.addAsResource("org/camunda/bpm/integrationtest/functional/el/BusinessRuleProcess.bpmn20.xml");
}
@Deployment(name="dmnDeployment")
public static WebArchive createDmnDeplyoment() {
return initWebArchiveDeployment("dmn-deployment.war")
.addClass(GreeterBean.class)
.addAsResource(DMN_RESOURCE_NAME);
}
@Deployment(name="clientDeployment")
public static WebArchive clientDeployment() {
WebArchive webArchive = ShrinkWrap.create(WebArchive.class, "client.war")
.addClass(AbstractFoxPlatformIntegrationTest.class);
TestContainer.addContainerSpecificResources(webArchive);
return webArchive;
}
@Test
@OperateOnDeployment("clientDeployment")
public void shouldSwitchContextWhenUsingDecisionService() {
decisionService.evaluateDecisionByKey("decision", Variables.createVariables());
}
@Test
@OperateOnDeployment("clientDeployment")
public void shouldSwitchContextWhenCallingFromBpmn() {
runtimeService.startProcessInstanceByKey("testProcess");
}
@Test
@OperateOnDeployment("clientDeployment")
public void shouldSwitchContextWhenUsingDecisionServiceAfterRedeployment() {
// given
List<org.camunda.bpm.engine.repository.Deployment> deployments = repositoryService.createDeploymentQuery()
.list();
// find dmn deployment
org.camunda.bpm.engine.repository.Deployment dmnDeployment = null;
for (org.camunda.bpm.engine.repository.Deployment deployment : deployments) {
List<String> resourceNames = repositoryService.getDeploymentResourceNames(deployment.getId());
if(resourceNames.contains(DMN_RESOURCE_NAME)) {
dmnDeployment = deployment;
}
}
if(dmnDeployment == null) {
Assert.fail("Expected to find DMN deployment");
}
org.camunda.bpm.engine.repository.Deployment deployment2 = repositoryService
.createDeployment()
.nameFromDeployment(dmnDeployment.getId())
.addDeploymentResources(dmnDeployment.getId())
.deploy();
try {
// when then
decisionService.evaluateDecisionByKey("decision", Variables.createVariables());
}
finally {
repositoryService.deleteDeployment(deployment2.getId(), true);
}
}
@Test
@OperateOnDeployment("clientDeployment")
public void shouldSwitchContextWhenCallingFromBpmnAfterRedeployment() {
// given
List<org.camunda.bpm.engine.repository.Deployment> deployments = repositoryService.createDeploymentQuery()
.list();
// find dmn deployment
org.camunda.bpm.engine.repository.Deployment dmnDeployment = null;
for (org.camunda.bpm.engine.repository.Deployment deployment : deployments) {
List<String> resourceNames = repositoryService.getDeploymentResourceNames(deployment.getId());
if(resourceNames.contains(DMN_RESOURCE_NAME)) {
dmnDeployment = deployment;
}
}
if(dmnDeployment == null) {
Assert.fail("Expected to find DMN deployment");
}
org.camunda.bpm.engine.repository.Deployment deployment2 = repositoryService
.createDeployment()
.nameFromDeployment(dmnDeployment.getId())
.addDeploymentResources(dmnDeployment.getId())
.deploy();
try {
// when then
runtimeService.startProcessInstanceByKey("testProcess");
}
finally {
repositoryService.deleteDeployment(deployment2.getId(), true);
}
}
}
| chore(qa): fix typos and test decision result
related to #CAM-4795
| qa/integration-tests-engine/src/test/java/org/camunda/bpm/integrationtest/functional/el/DecisionContextSwitchTest.java | chore(qa): fix typos and test decision result |
|
Java | apache-2.0 | 6eb94780fe6115a200e4b5f4abde1008367c3dcf | 0 | skia-dev/oss-fuzz,skia-dev/oss-fuzz,skia-dev/oss-fuzz,google/oss-fuzz,skia-dev/oss-fuzz,skia-dev/oss-fuzz,google/oss-fuzz,google/oss-fuzz,google/oss-fuzz,google/oss-fuzz,skia-dev/oss-fuzz,skia-dev/oss-fuzz,google/oss-fuzz,google/oss-fuzz,skia-dev/oss-fuzz,skia-dev/oss-fuzz,google/oss-fuzz,google/oss-fuzz,skia-dev/oss-fuzz,google/oss-fuzz,skia-dev/oss-fuzz,google/oss-fuzz | import com.code_intelligence.jazzer.api.FuzzedDataProvider;
import com.code_intelligence.jazzer.api.FuzzerSecurityIssueHigh;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.PreparedStatement;
public class SqlPreparedStatementFuzzer extends TestServer {
SqlPreparedStatementFuzzer(boolean verbose) {
super(verbose);
}
void testOneInput(String fuzzyString) {
try (Connection connection = getConnection()) {
PreparedStatement preparedStatement = connection.prepareStatement("UPDATE TestTable SET value=? WHERE key=1");
preparedStatement.setString(1, fuzzyString);
preparedStatement.executeUpdate();
} catch (SQLException ex) {
/* ignore */
}
}
public static void fuzzerTestOneInput(FuzzedDataProvider fuzzedDataProvider) throws Exception {
try (TestServer fuzzer = new SqlPreparedStatementFuzzer(false)) {
fuzzer.testOneInput(fuzzedDataProvider.consumeRemainingAsAsciiString());
}
}
}
| projects/hsqldb/SqlPreparedStatementFuzzer.java | import com.code_intelligence.jazzer.api.FuzzedDataProvider;
import com.code_intelligence.jazzer.api.FuzzerSecurityIssueHigh;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.PreparedStatement;
public class SqlPreparedStatementFuzzer extends TestServer {
SqlPreparedStatementFuzzer(boolean verbose) {
super(verbose);
}
void testOneInput(String fuzzyString) {
try (Connection connection = getConnection()) {
PreparedStatement preparedStatement = connection.prepareStatement("UPDATE TestTable SET value=? WHERE key=1");
preparedStatement.setString(1, fuzzyString);
preparedStatement.executeUpdate();
} catch (SQLException ex) {
/* ignore */
}
}
public static void fuzzerTestOneInput(FuzzedDataProvider fuzzedDataProvider) throws Exception {
try (TestServer fuzzer = new SqlStatementFuzzer(false)) {
fuzzer.testOneInput(fuzzedDataProvider.consumeRemainingAsAsciiString());
}
}
} | [hsqldb] fix typo (#8689)
@kyakdan | projects/hsqldb/SqlPreparedStatementFuzzer.java | [hsqldb] fix typo (#8689) |
|
Java | apache-2.0 | ef59f41215d1b2558a7d109df6c482e91da9391c | 0 | reportportal/commons-dao | /*
* Copyright 2019 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.ta.reportportal.dao;
import com.epam.ta.reportportal.commons.querygen.CriteriaHolder;
import com.epam.ta.reportportal.commons.querygen.Filter;
import com.epam.ta.reportportal.commons.querygen.QueryBuilder;
import com.epam.ta.reportportal.commons.validation.Suppliers;
import com.epam.ta.reportportal.dao.util.QueryUtils;
import com.epam.ta.reportportal.dao.widget.WidgetProviderChain;
import com.epam.ta.reportportal.entity.enums.StatusEnum;
import com.epam.ta.reportportal.entity.widget.content.*;
import com.epam.ta.reportportal.entity.widget.content.healthcheck.ComponentHealthCheckContent;
import com.epam.ta.reportportal.entity.widget.content.healthcheck.HealthCheckTableContent;
import com.epam.ta.reportportal.entity.widget.content.healthcheck.HealthCheckTableGetParams;
import com.epam.ta.reportportal.entity.widget.content.healthcheck.HealthCheckTableInitParams;
import com.epam.ta.reportportal.exception.ReportPortalException;
import com.epam.ta.reportportal.jooq.enums.JStatusEnum;
import com.epam.ta.reportportal.jooq.enums.JTestItemTypeEnum;
import com.epam.ta.reportportal.jooq.tables.JItemAttribute;
import com.epam.ta.reportportal.util.WidgetSortUtils;
import com.epam.ta.reportportal.ws.model.ActivityResource;
import com.epam.ta.reportportal.ws.model.ErrorType;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.jooq.*;
import org.jooq.impl.DSL;
import org.jooq.util.postgres.PostgresDSL;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Repository;
import javax.annotation.Nullable;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.*;
import java.util.stream.Collectors;
import static com.epam.ta.reportportal.commons.querygen.QueryBuilder.STATISTICS_KEY;
import static com.epam.ta.reportportal.commons.querygen.constant.GeneralCriteriaConstant.CRITERIA_START_TIME;
import static com.epam.ta.reportportal.commons.querygen.constant.ItemAttributeConstant.KEY_VALUE_SEPARATOR;
import static com.epam.ta.reportportal.dao.constant.WidgetContentRepositoryConstants.*;
import static com.epam.ta.reportportal.dao.constant.WidgetRepositoryConstants.ID;
import static com.epam.ta.reportportal.dao.util.JooqFieldNameTransformer.fieldName;
import static com.epam.ta.reportportal.dao.util.QueryUtils.collectJoinFields;
import static com.epam.ta.reportportal.dao.util.WidgetContentUtil.*;
import static com.epam.ta.reportportal.jooq.Tables.*;
import static com.epam.ta.reportportal.jooq.tables.JActivity.ACTIVITY;
import static com.epam.ta.reportportal.jooq.tables.JIssue.ISSUE;
import static com.epam.ta.reportportal.jooq.tables.JIssueTicket.ISSUE_TICKET;
import static com.epam.ta.reportportal.jooq.tables.JLaunch.LAUNCH;
import static com.epam.ta.reportportal.jooq.tables.JProject.PROJECT;
import static com.epam.ta.reportportal.jooq.tables.JTestItem.TEST_ITEM;
import static com.epam.ta.reportportal.jooq.tables.JTestItemResults.TEST_ITEM_RESULTS;
import static com.epam.ta.reportportal.jooq.tables.JTicket.TICKET;
import static com.epam.ta.reportportal.jooq.tables.JUsers.USERS;
import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.*;
import static org.jooq.impl.DSL.*;
/**
* Repository that contains queries of content loading for widgets.
*
* @author Pavel Bortnik
*/
@Repository
public class WidgetContentRepositoryImpl implements WidgetContentRepository {
@Autowired
private DSLContext dsl;
@Autowired
private WidgetProviderChain<HealthCheckTableGetParams, List<HealthCheckTableContent>> healthCheckTableChain;
private static final List<JTestItemTypeEnum> HAS_METHOD_OR_CLASS = Arrays.stream(JTestItemTypeEnum.values()).filter(it -> {
String name = it.name();
return name.contains("METHOD") || name.contains("CLASS");
}).collect(Collectors.toList());
@Override
public OverallStatisticsContent overallStatisticsContent(Filter filter, Sort sort, List<String> contentFields, boolean latest,
int limit) {
return OVERALL_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryUtils.createQueryBuilderWithLatestLaunchesOption(filter, sort, latest).with(sort).with(limit).build())
.select(STATISTICS_FIELD.NAME, sum(STATISTICS.S_COUNTER).as(SUM))
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(STATISTICS)
.on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID))
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(contentFields))
.groupBy(STATISTICS_FIELD.NAME)
.fetch());
}
/**
* Returns condition for step level test item types.
* Include before/after methods and classes types depends on {@code includeMethods} param.
*
* @param includeMethods
* @return {@link Condition}
*/
private Condition itemTypeStepCondition(boolean includeMethods) {
List<JTestItemTypeEnum> itemTypes = Lists.newArrayList(JTestItemTypeEnum.STEP);
if (includeMethods) {
itemTypes.addAll(HAS_METHOD_OR_CLASS);
}
return TEST_ITEM.TYPE.in(itemTypes);
}
@Override
public List<CriteriaHistoryItem> topItemsByCriteria(Filter filter, String criteria, int limit, boolean includeMethods) {
Table<Record2<Long, BigDecimal>> criteriaTable = getTopItemsCriteriaTable(filter, criteria, limit, includeMethods);
return dsl.select(TEST_ITEM.UNIQUE_ID,
TEST_ITEM.NAME,
DSL.arrayAgg(when(fieldName(criteriaTable.getName(), CRITERIA_FLAG).cast(Integer.class).ge(1), true).otherwise(false))
.orderBy(LAUNCH.NUMBER.asc())
.as(STATUS_HISTORY),
DSL.arrayAgg(TEST_ITEM.START_TIME).orderBy(LAUNCH.NUMBER.asc()).as(START_TIME_HISTORY),
DSL.sum(fieldName(criteriaTable.getName(), CRITERIA_FLAG).cast(Integer.class)).as(CRITERIA),
DSL.count(TEST_ITEM.ITEM_ID).as(TOTAL)
)
.from(TEST_ITEM)
.join(criteriaTable)
.on(TEST_ITEM.ITEM_ID.eq(fieldName(criteriaTable.getName(), ITEM_ID).cast(Long.class)))
.join(LAUNCH)
.on(TEST_ITEM.LAUNCH_ID.eq(LAUNCH.ID))
.groupBy(TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME)
.having(DSL.sum(fieldName(criteriaTable.getName(), CRITERIA_FLAG).cast(Integer.class)).greaterThan(BigDecimal.ZERO))
.orderBy(DSL.field(DSL.name(CRITERIA)).desc(), DSL.field(DSL.name(TOTAL)).asc())
.limit(MOST_FAILED_CRITERIA_LIMIT)
.fetchInto(CriteriaHistoryItem.class);
}
private Table<Record2<Long, BigDecimal>> getTopItemsCriteriaTable(Filter filter, String criteria, int limit, boolean includeMethods) {
Sort launchSort = Sort.by(Sort.Direction.DESC, CRITERIA_START_TIME);
Table<? extends Record> launchesTable = QueryBuilder.newBuilder(filter, collectJoinFields(filter, launchSort))
.with(limit)
.with(launchSort)
.build()
.asTable(LAUNCHES);
return getCommonMostFailedQuery(criteria, launchesTable).where(itemTypeStepCondition(includeMethods))
.and(TEST_ITEM.HAS_STATS.eq(Boolean.TRUE))
.and(TEST_ITEM.HAS_CHILDREN.eq(false))
.groupBy(TEST_ITEM.ITEM_ID)
.asTable(CRITERIA_TABLE);
}
private SelectOnConditionStep<Record2<Long, BigDecimal>> getCommonMostFailedQuery(String criteria,
Table<? extends Record> launchesTable) {
if (StringUtils.endsWithAny(criteria,
StatusEnum.FAILED.getExecutionCounterField(),
StatusEnum.SKIPPED.getExecutionCounterField()
)) {
StatusEnum status = StatusEnum.fromValue(StringUtils.substringAfterLast(criteria, STATISTICS_SEPARATOR))
.orElseThrow(() -> new ReportPortalException(ErrorType.UNCLASSIFIED_REPORT_PORTAL_ERROR));
return statusCriteriaTable(JStatusEnum.valueOf(status.name()), launchesTable);
} else {
return statisticsCriteriaTable(criteria, launchesTable);
}
}
private SelectOnConditionStep<Record2<Long, BigDecimal>> statisticsCriteriaTable(String criteria,
Table<? extends Record> launchesTable) {
return dsl.select(TEST_ITEM.ITEM_ID, sum(when(STATISTICS_FIELD.NAME.eq(criteria), 1).otherwise(ZERO_QUERY_VALUE)).as(CRITERIA_FLAG))
.from(TEST_ITEM)
.join(launchesTable)
.on(TEST_ITEM.LAUNCH_ID.eq(fieldName(launchesTable.getName(), ID).cast(Long.class)))
.join(STATISTICS)
.on(TEST_ITEM.ITEM_ID.eq(STATISTICS.ITEM_ID))
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID));
}
private SelectOnConditionStep<Record2<Long, BigDecimal>> statusCriteriaTable(JStatusEnum criteria,
Table<? extends Record> launchesTable) {
return dsl.select(TEST_ITEM.ITEM_ID,
sum(when(TEST_ITEM_RESULTS.STATUS.eq(criteria), 1).otherwise(ZERO_QUERY_VALUE)).as(CRITERIA_FLAG)
)
.from(TEST_ITEM)
.join(launchesTable)
.on(TEST_ITEM.LAUNCH_ID.eq(fieldName(launchesTable.getName(), ID).cast(Long.class)))
.join(TEST_ITEM_RESULTS)
.on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID));
}
@Override
public List<FlakyCasesTableContent> flakyCasesStatistics(Filter filter, boolean includeMethods, int limit) {
return FLAKY_CASES_TABLE_FETCHER.apply(dsl.select(field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.UNIQUE_ID.getName())).as(UNIQUE_ID),
field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.NAME.getName())).as(ITEM_NAME),
DSL.arrayAgg(field(name(FLAKY_TABLE_RESULTS, TEST_ITEM_RESULTS.STATUS.getName()))).as(STATUSES),
DSL.max(field(name(FLAKY_TABLE_RESULTS, START_TIME))).as(START_TIME_HISTORY),
sum(field(name(FLAKY_TABLE_RESULTS, SWITCH_FLAG)).cast(Long.class)).as(FLAKY_COUNT),
count(field(name(FLAKY_TABLE_RESULTS, ITEM_ID))).minus(1).as(TOTAL)
)
.from(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, Sort.unsorted()))
.with(LAUNCH.NUMBER, SortOrder.DESC)
.with(limit)
.build())
.select(TEST_ITEM.ITEM_ID,
TEST_ITEM.UNIQUE_ID,
TEST_ITEM.NAME,
TEST_ITEM.START_TIME,
TEST_ITEM_RESULTS.STATUS,
when(TEST_ITEM_RESULTS.STATUS.notEqual(lag(TEST_ITEM_RESULTS.STATUS).over(orderBy(TEST_ITEM.UNIQUE_ID,
TEST_ITEM.START_TIME.desc()
)))
.and(TEST_ITEM.UNIQUE_ID.equal(lag(TEST_ITEM.UNIQUE_ID).over(orderBy(TEST_ITEM.UNIQUE_ID,
TEST_ITEM.START_TIME.desc()
)))), 1).otherwise(ZERO_QUERY_VALUE)
.as(SWITCH_FLAG)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.join(TEST_ITEM)
.on(LAUNCH.ID.eq(TEST_ITEM.LAUNCH_ID))
.join(TEST_ITEM_RESULTS)
.on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID))
.where(itemTypeStepCondition(includeMethods))
.and(TEST_ITEM.HAS_STATS.eq(Boolean.TRUE))
.and(TEST_ITEM.HAS_CHILDREN.eq(false))
.and(TEST_ITEM.RETRY_OF.isNull())
.groupBy(TEST_ITEM.ITEM_ID, TEST_ITEM_RESULTS.STATUS, TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME, TEST_ITEM.START_TIME)
.orderBy(TEST_ITEM.UNIQUE_ID, TEST_ITEM.START_TIME.desc())
.asTable(FLAKY_TABLE_RESULTS))
.groupBy(field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.UNIQUE_ID.getName())),
field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.NAME.getName()))
)
.having(count(field(name(FLAKY_TABLE_RESULTS, ITEM_ID))).gt(BigDecimal.ONE.intValue())
.and(sum(field(name(FLAKY_TABLE_RESULTS, SWITCH_FLAG)).cast(Long.class)).gt(BigDecimal.ZERO)))
.orderBy(fieldName(FLAKY_COUNT).desc(), fieldName(TOTAL).asc(), fieldName(UNIQUE_ID))
.limit(FLAKY_CASES_LIMIT)
.fetch());
}
@Override
public List<ChartStatisticsContent> launchStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) {
List<Field<?>> groupingFields = Lists.newArrayList(field(LAUNCH.ID),
field(LAUNCH.NUMBER),
field(LAUNCH.START_TIME),
field(LAUNCH.NAME),
fieldName(STATISTICS_TABLE, SF_NAME),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER)
);
groupingFields.addAll(WidgetSortUtils.fieldTransformer(filter.getTarget()).apply(sort, LAUNCHES));
return LAUNCHES_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
LAUNCH.NAME,
fieldName(STATISTICS_TABLE, SF_NAME),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(contentFields))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.groupBy(groupingFields)
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES))
.fetch());
}
@Override
public List<ChartStatisticsContent> investigatedStatistics(Filter filter, Sort sort, int limit) {
List<Field<?>> groupingFields = Lists.newArrayList(field(LAUNCH.ID),
field(LAUNCH.NUMBER),
field(LAUNCH.START_TIME),
field(LAUNCH.NAME)
);
groupingFields.addAll(WidgetSortUtils.fieldTransformer(filter.getTarget()).apply(sort, LAUNCHES));
return INVESTIGATED_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
LAUNCH.NAME,
round(val(PERCENTAGE_MULTIPLIER).mul(dsl.select(sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.onKey()
.where(STATISTICS_FIELD.NAME.eq(DEFECTS_TO_INVESTIGATE_TOTAL).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)))
.asField()
.cast(Double.class))
.div(nullif(dsl.select(sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.onKey()
.where(STATISTICS_FIELD.NAME.in(DEFECTS_AUTOMATION_BUG_TOTAL,
DEFECTS_NO_DEFECT_TOTAL,
DEFECTS_TO_INVESTIGATE_TOTAL,
DEFECTS_PRODUCT_BUG_TOTAL,
DEFECTS_SYSTEM_ISSUE_TOTAL
).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)))
.asField(), 0)), 2).as(TO_INVESTIGATE)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.groupBy(groupingFields)
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES))
.fetch());
}
@Override
public List<ChartStatisticsContent> timelineInvestigatedStatistics(Filter filter, Sort sort, int limit) {
List<Field<?>> groupingFields = Lists.newArrayList(field(LAUNCH.ID),
field(LAUNCH.NUMBER),
field(LAUNCH.START_TIME),
field(LAUNCH.NAME)
);
groupingFields.addAll(WidgetSortUtils.fieldTransformer(filter.getTarget()).apply(sort, LAUNCHES));
return dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
LAUNCH.NAME,
coalesce(DSL.select(sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.onKey()
.where(STATISTICS_FIELD.NAME.eq(DEFECTS_TO_INVESTIGATE_TOTAL).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)))
.asField()
.cast(Double.class), 0).as(TO_INVESTIGATE),
coalesce(DSL.select(sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.onKey()
.where(STATISTICS_FIELD.NAME.in(DEFECTS_AUTOMATION_BUG_TOTAL,
DEFECTS_NO_DEFECT_TOTAL,
DEFECTS_TO_INVESTIGATE_TOTAL,
DEFECTS_PRODUCT_BUG_TOTAL,
DEFECTS_SYSTEM_ISSUE_TOTAL
).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)))
.asField(), 0).as(INVESTIGATED)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.groupBy(groupingFields)
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES))
.fetch(TIMELINE_INVESTIGATED_STATISTICS_RECORD_MAPPER);
}
@Override
public PassingRateStatisticsResult passingRatePerLaunchStatistics(Filter filter, Sort sort, int limit) {
List<Field<Object>> groupingFields = WidgetSortUtils.fieldTransformer(filter.getTarget()).apply(sort, LAUNCHES);
return buildPassingRateSelect(filter, sort, limit).groupBy(groupingFields)
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES))
.fetchInto(PassingRateStatisticsResult.class)
.stream()
.findFirst()
.orElseThrow(() -> new ReportPortalException("No results for filter were found"));
}
@Override
public PassingRateStatisticsResult summaryPassingRateStatistics(Filter filter, Sort sort, int limit) {
return buildPassingRateSelect(filter, sort, limit).fetchInto(PassingRateStatisticsResult.class)
.stream()
.findFirst()
.orElseThrow(() -> new ReportPortalException("No results for filter were found"));
}
@Override
public List<ChartStatisticsContent> casesTrendStatistics(Filter filter, String contentField, Sort sort, int limit) {
List<Field<?>> groupingFields = Lists.newArrayList(field(LAUNCH.ID),
field(LAUNCH.NUMBER),
field(LAUNCH.START_TIME),
field(LAUNCH.NAME),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER)
);
groupingFields.addAll(WidgetSortUtils.fieldTransformer(filter.getTarget()).apply(sort, LAUNCHES));
return CASES_GROWTH_TREND_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
LAUNCH.NAME,
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER),
coalesce(fieldName(STATISTICS_TABLE, STATISTICS_COUNTER).sub(lag(fieldName(STATISTICS_TABLE,
STATISTICS_COUNTER
)).over().orderBy(LAUNCH.START_TIME.asc())), 0).as(DELTA)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.eq(contentField))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.groupBy(groupingFields)
.orderBy(LAUNCH.START_TIME.asc())
.fetch(), contentField);
}
@Override
public List<ChartStatisticsContent> bugTrendStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) {
return BUG_TREND_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
fieldName(STATISTICS_TABLE, SF_NAME),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(contentFields))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.orderBy(LAUNCH.START_TIME.asc())
.fetch());
}
@Override
public List<ChartStatisticsContent> launchesComparisonStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) {
List<String> executionStatisticsFields = contentFields.stream().filter(cf -> cf.contains(EXECUTIONS_KEY)).collect(toList());
List<String> defectStatisticsFields = contentFields.stream().filter(cf -> cf.contains(DEFECTS_KEY)).collect(toList());
return LAUNCHES_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
field(name(STATISTICS_TABLE, SF_NAME), String.class),
when(field(name(STATISTICS_TABLE, SF_NAME)).equalIgnoreCase(EXECUTIONS_TOTAL),
field(name(STATISTICS_TABLE, STATISTICS_COUNTER)).cast(Double.class)
).otherwise(round(val(PERCENTAGE_MULTIPLIER).mul(field(name(STATISTICS_TABLE, STATISTICS_COUNTER), Integer.class))
.div(nullif(DSL.select(DSL.sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))
.and(STATISTICS_FIELD.NAME.in(executionStatisticsFields)
.and(STATISTICS_FIELD.NAME.notEqual(EXECUTIONS_TOTAL))), 0).cast(Double.class)), 2))
.as(fieldName(STATISTICS_TABLE, STATISTICS_COUNTER))
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(executionStatisticsFields))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES))
.unionAll(DSL.select(LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
field(name(STATISTICS_TABLE, SF_NAME), String.class),
round(val(PERCENTAGE_MULTIPLIER).mul(field(name(STATISTICS_TABLE, STATISTICS_COUNTER), Integer.class))
.div(nullif(DSL.select(DSL.sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))
.and(STATISTICS_FIELD.NAME.in(defectStatisticsFields)), 0).cast(Double.class)), 2)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID,
STATISTICS.S_COUNTER.as(STATISTICS_COUNTER),
STATISTICS_FIELD.NAME.as(SF_NAME)
)
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(defectStatisticsFields))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES)))
.fetch());
}
@Override
public List<LaunchesDurationContent> launchesDurationStatistics(Filter filter, Sort sort, boolean isLatest, int limit) {
return dsl.with(LAUNCHES)
.as(QueryUtils.createQueryBuilderWithLatestLaunchesOption(filter, sort, isLatest).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
LAUNCH.STATUS,
LAUNCH.START_TIME,
LAUNCH.END_TIME,
timestampDiff(LAUNCH.END_TIME, LAUNCH.START_TIME).as(DURATION)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES))
.fetchInto(LaunchesDurationContent.class);
}
@Override
public List<NotPassedCasesContent> notPassedCasesStatistics(Filter filter, Sort sort, int limit) {
return dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER),
coalesce(round(val(PERCENTAGE_MULTIPLIER).mul(DSL.select(DSL.sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(EXECUTIONS_SKIPPED, EXECUTIONS_FAILED))
.and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))
.asField()
.cast(Double.class))
.div(nullif(field(name(STATISTICS_TABLE, STATISTICS_COUNTER), Integer.class), 0).cast(Double.class)), 2), 0)
.as(PERCENTAGE)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_TOTAL))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.orderBy(LAUNCH.START_TIME.asc())
.fetch(NOT_PASSED_CASES_CONTENT_RECORD_MAPPER);
}
@Override
public List<LaunchesTableContent> launchesTableStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) {
Map<String, String> criteria = filter.getTarget()
.getCriteriaHolders()
.stream()
.collect(Collectors.toMap(CriteriaHolder::getFilterCriteria, CriteriaHolder::getQueryCriteria));
boolean isAttributePresent = contentFields.remove("attributes");
List<Field<?>> selectFields = contentFields.stream()
.filter(cf -> !cf.startsWith(STATISTICS_KEY))
.map(cf -> field(ofNullable(criteria.get(cf)).orElseThrow(() -> new ReportPortalException(Suppliers.formattedSupplier(
"Unknown table field - '{}'",
cf
).get()))))
.collect(Collectors.toList());
Collections.addAll(selectFields, LAUNCH.ID, fieldName(STATISTICS_TABLE, STATISTICS_COUNTER), fieldName(STATISTICS_TABLE, SF_NAME));
if (isAttributePresent) {
Collections.addAll(selectFields, ITEM_ATTRIBUTE.ID.as(ATTR_ID), ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE);
}
List<String> statisticsFields = contentFields.stream().filter(cf -> cf.startsWith(STATISTICS_KEY)).collect(toList());
return LAUNCHES_TABLE_FETCHER.apply(buildLaunchesTableQuery(selectFields, statisticsFields, filter, sort, limit, isAttributePresent)
.fetch(), contentFields);
}
@Override
public List<ActivityResource> activityStatistics(Filter filter, Sort sort, int limit) {
return dsl.with(ACTIVITIES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(ACTIVITY.ID,
ACTIVITY.ACTION,
ACTIVITY.ENTITY,
ACTIVITY.CREATION_DATE,
ACTIVITY.DETAILS,
ACTIVITY.PROJECT_ID,
ACTIVITY.OBJECT_ID,
USERS.LOGIN,
PROJECT.NAME
)
.from(ACTIVITY)
.join(ACTIVITIES)
.on(fieldName(ACTIVITIES, ID).cast(Long.class).eq(ACTIVITY.ID))
.join(USERS)
.on(ACTIVITY.USER_ID.eq(USERS.ID))
.join(PROJECT)
.on(ACTIVITY.PROJECT_ID.eq(PROJECT.ID))
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, ACTIVITIES))
.fetch()
.map(ACTIVITY_MAPPER);
}
@Override
public Map<String, UniqueBugContent> uniqueBugStatistics(Filter filter, Sort sort, boolean isLatest, int limit) {
Map<String, UniqueBugContent> content = UNIQUE_BUG_CONTENT_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryUtils.createQueryBuilderWithLatestLaunchesOption(filter, sort, isLatest).with(limit).with(sort).build())
.select(TICKET.TICKET_ID,
TICKET.SUBMIT_DATE,
TICKET.URL,
TICKET.SUBMITTER,
TEST_ITEM.ITEM_ID,
TEST_ITEM.NAME,
TEST_ITEM.PATH,
TEST_ITEM.LAUNCH_ID,
fieldName(ITEM_ATTRIBUTES, KEY),
fieldName(ITEM_ATTRIBUTES, VALUE)
)
.from(TEST_ITEM)
.join(LAUNCHES)
.on(fieldName(LAUNCHES, ID).cast(Long.class).eq(TEST_ITEM.LAUNCH_ID))
.join(TEST_ITEM_RESULTS)
.on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID))
.leftJoin(ISSUE)
.on(TEST_ITEM.ITEM_ID.eq(ISSUE.ISSUE_ID))
.leftJoin(ISSUE_TICKET)
.on(ISSUE.ISSUE_ID.eq(ISSUE_TICKET.ISSUE_ID))
.join(TICKET)
.on(ISSUE_TICKET.TICKET_ID.eq(TICKET.ID))
.leftJoin(lateral(dsl.select(ITEM_ATTRIBUTE.ITEM_ID, ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE)
.from(ITEM_ATTRIBUTE)
.where(ITEM_ATTRIBUTE.ITEM_ID.eq(TEST_ITEM.ITEM_ID).andNot(ITEM_ATTRIBUTE.SYSTEM))).as(ITEM_ATTRIBUTES))
.on(TEST_ITEM.ITEM_ID.eq(fieldName(ITEM_ATTRIBUTES, ITEM_ID).cast(Long.class)))
.orderBy(TICKET.SUBMIT_DATE.desc())
.fetch());
return content;
}
@Override
public Map<String, List<ProductStatusStatisticsContent>> productStatusGroupedByFilterStatistics(Map<Filter, Sort> filterSortMapping,
List<String> contentFields, Map<String, String> customColumns, boolean isLatest, int limit) {
Select<? extends Record> select = filterSortMapping.entrySet()
.stream()
.map(f -> (Select<? extends Record>) buildFilterGroupedQuery(f.getKey(),
isLatest,
f.getValue(),
limit,
contentFields,
customColumns
))
.collect(Collectors.toList())
.stream()
.reduce((prev, curr) -> curr = prev.unionAll(curr))
.orElseThrow(() -> new ReportPortalException(ErrorType.BAD_REQUEST_ERROR,
"Query building for Product Status Widget failed"
));
Map<String, List<ProductStatusStatisticsContent>> productStatusContent = PRODUCT_STATUS_FILTER_GROUPED_FETCHER.apply(select.fetch(),
customColumns
);
productStatusContent.put(TOTAL, countFilterTotalStatistics(productStatusContent));
return productStatusContent;
}
@Override
public List<ProductStatusStatisticsContent> productStatusGroupedByLaunchesStatistics(Filter filter, List<String> contentFields,
Map<String, String> customColumns, Sort sort, boolean isLatest, int limit) {
List<Field<?>> selectFields = getCommonProductStatusFields(filter, contentFields);
List<ProductStatusStatisticsContent> productStatusStatisticsResult = PRODUCT_STATUS_LAUNCH_GROUPED_FETCHER.apply(buildProductStatusQuery(
filter,
isLatest,
sort,
limit,
selectFields,
contentFields,
customColumns
).orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES)).fetch(),
customColumns
);
if (!productStatusStatisticsResult.isEmpty()) {
productStatusStatisticsResult.add(countLaunchTotalStatistics(productStatusStatisticsResult));
}
return productStatusStatisticsResult;
}
@Override
public List<MostTimeConsumingTestCasesContent> mostTimeConsumingTestCasesStatistics(Filter filter, int limit) {
return dsl.with(ITEMS)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter)).with(limit).build())
.select(TEST_ITEM.ITEM_ID.as(ID),
TEST_ITEM.UNIQUE_ID,
TEST_ITEM.NAME,
TEST_ITEM.TYPE,
TEST_ITEM.PATH,
TEST_ITEM.START_TIME,
TEST_ITEM_RESULTS.END_TIME,
TEST_ITEM_RESULTS.DURATION,
TEST_ITEM_RESULTS.STATUS
)
.from(TEST_ITEM)
.join(ITEMS)
.on(fieldName(ITEMS, ID).cast(Long.class).eq(TEST_ITEM.ITEM_ID))
.join(TEST_ITEM_RESULTS)
.on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID))
.orderBy(fieldName(TEST_ITEM_RESULTS.DURATION).desc())
.fetchInto(MostTimeConsumingTestCasesContent.class);
}
@Override
public List<TopPatternTemplatesContent> patternTemplate(Filter filter, Sort sort, @Nullable String attributeKey,
@Nullable String patternName, boolean isLatest, int launchesLimit, int attributesLimit) {
Condition attributeKeyCondition = ofNullable(attributeKey).map(ITEM_ATTRIBUTE.KEY::eq).orElseGet(DSL::noCondition);
Field<?> launchIdsField = isLatest ? DSL.max(LAUNCH.ID).as(ID) : DSL.arrayAgg(LAUNCH.ID).as(ID);
List<Field<?>> groupingFields = isLatest ?
Lists.newArrayList(LAUNCH.NAME, ITEM_ATTRIBUTE.VALUE) :
Lists.newArrayList(ITEM_ATTRIBUTE.VALUE);
Map<String, List<Long>> attributeIdsMapping = PATTERN_TEMPLATES_AGGREGATION_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(launchesLimit).build())
.select(launchIdsField, ITEM_ATTRIBUTE.VALUE)
.from(LAUNCH)
.join(LAUNCHES)
.on(fieldName(LAUNCHES, ID).cast(Long.class).eq(LAUNCH.ID))
.join(ITEM_ATTRIBUTE)
.on(LAUNCH.ID.eq(ITEM_ATTRIBUTE.LAUNCH_ID))
.where(attributeKeyCondition)
.and(ITEM_ATTRIBUTE.VALUE.in(dsl.select(ITEM_ATTRIBUTE.VALUE)
.from(ITEM_ATTRIBUTE)
.join(LAUNCHES)
.on(fieldName(LAUNCHES, ID).cast(Long.class).eq(ITEM_ATTRIBUTE.LAUNCH_ID))
.where(attributeKeyCondition)
.groupBy(ITEM_ATTRIBUTE.VALUE)
.orderBy(DSL.when(ITEM_ATTRIBUTE.VALUE.likeRegex(VERSION_PATTERN),
PostgresDSL.stringToArray(ITEM_ATTRIBUTE.VALUE, VERSION_DELIMITER).cast(Integer[].class)
), ITEM_ATTRIBUTE.VALUE.sort(SortOrder.ASC))
.limit(attributesLimit)))
.groupBy(groupingFields)
.orderBy(DSL.when(ITEM_ATTRIBUTE.VALUE.likeRegex(VERSION_PATTERN),
PostgresDSL.stringToArray(ITEM_ATTRIBUTE.VALUE, VERSION_DELIMITER).cast(Integer[].class)
), ITEM_ATTRIBUTE.VALUE.sort(SortOrder.ASC))
.fetch(), isLatest);
return StringUtils.isBlank(patternName) ?
buildPatternTemplatesQuery(attributeIdsMapping) :
buildPatternTemplatesQueryGroupedByPattern(attributeIdsMapping, patternName);
}
@Override
public List<ComponentHealthCheckContent> componentHealthCheck(Filter launchFilter, Sort launchSort, boolean isLatest, int launchesLimit,
Filter testItemFilter, String currentLevelKey) {
Table<? extends Record> launchesTable = QueryUtils.createQueryBuilderWithLatestLaunchesOption(launchFilter, launchSort, isLatest)
.with(launchesLimit)
.with(launchSort)
.build()
.asTable(LAUNCHES);
return COMPONENT_HEALTH_CHECK_FETCHER.apply(dsl.select(fieldName(ITEMS, VALUE),
DSL.count(fieldName(ITEMS, ITEM_ID)).as(TOTAL),
DSL.round(DSL.val(PERCENTAGE_MULTIPLIER)
.mul(DSL.count(fieldName(ITEMS, ITEM_ID))
.filterWhere(fieldName(ITEMS, STATUS).cast(JStatusEnum.class).eq(JStatusEnum.PASSED)))
.div(DSL.nullif(DSL.count(fieldName(ITEMS, ITEM_ID)), 0)), 2).as(PASSING_RATE)
)
.from(dsl.with(ITEMS)
.as(QueryBuilder.newBuilder(testItemFilter, collectJoinFields(testItemFilter))
.addJointToStart(launchesTable,
JoinType.JOIN,
TEST_ITEM.LAUNCH_ID.eq(fieldName(launchesTable.getName(), ID).cast(Long.class))
)
.build())
.select(TEST_ITEM.ITEM_ID, TEST_ITEM_RESULTS.STATUS, ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE)
.from(TEST_ITEM)
.join(ITEMS)
.on(TEST_ITEM.ITEM_ID.eq(fieldName(ITEMS, ID).cast(Long.class)))
.join(TEST_ITEM_RESULTS)
.on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID))
.join(ITEM_ATTRIBUTE)
.on((TEST_ITEM.ITEM_ID.eq(ITEM_ATTRIBUTE.ITEM_ID)
.or(TEST_ITEM.LAUNCH_ID.eq(ITEM_ATTRIBUTE.LAUNCH_ID))).and(ITEM_ATTRIBUTE.KEY.eq(currentLevelKey)
.and(ITEM_ATTRIBUTE.SYSTEM.isFalse())))
.groupBy(TEST_ITEM.ITEM_ID, TEST_ITEM_RESULTS.STATUS, ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE)
.asTable(ITEMS))
.groupBy(fieldName(ITEMS, VALUE))
.orderBy(DSL.round(DSL.val(PERCENTAGE_MULTIPLIER)
.mul(DSL.count(fieldName(ITEMS, ITEM_ID))
.filterWhere(fieldName(ITEMS, STATUS).cast(JStatusEnum.class).eq(JStatusEnum.PASSED)))
.div(DSL.nullif(DSL.count(fieldName(ITEMS, ITEM_ID)), 0)), 2))
.fetch());
}
@Override
public void generateCumulativeTrendChartView(boolean refresh, String viewName, Filter launchFilter, Sort launchesSort,
List<String> attributes, int launchesLimit) {
if (refresh) {
removeWidgetView(viewName);
}
final String FIRST_LEVEL = "first_level";
final SelectJoinStep<Record5<Long, String, Long, String, String>> FIRST_LEVEL_TABLE = dsl.with(FIRST_LEVEL)
.as(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(launchFilter, collectJoinFields(launchFilter))
.with(launchesSort)
.with(launchesLimit)
.build())
.select(max(LAUNCH.ID).as(ID),
LAUNCH.NAME,
arrayAggDistinct(LAUNCH.ID).as(AGGREGATED_LAUNCHES_IDS),
ITEM_ATTRIBUTE.KEY.as(ATTRIBUTE_KEY),
ITEM_ATTRIBUTE.VALUE.as(ATTRIBUTE_VALUE)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.join(ITEM_ATTRIBUTE)
.on(LAUNCH.ID.eq(ITEM_ATTRIBUTE.LAUNCH_ID))
.and(ITEM_ATTRIBUTE.KEY.eq(attributes.get(0)).and(ITEM_ATTRIBUTE.SYSTEM.isFalse()))
.groupBy(LAUNCH.NAME, ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE))
.select(fieldName(FIRST_LEVEL, ID).cast(Long.class).as(ID),
fieldName(FIRST_LEVEL, NAME).cast(String.class).as(NAME),
val(null, fieldName(FIRST_LEVEL, ID).cast(Long.class)).as(FIRST_LEVEL_ID),
fieldName(FIRST_LEVEL, ATTRIBUTE_KEY).cast(String.class).as(ATTRIBUTE_KEY),
fieldName(FIRST_LEVEL, ATTRIBUTE_VALUE).cast(String.class).as(ATTRIBUTE_VALUE)
)
.from(FIRST_LEVEL);
SelectQuery<Record5<Long, String, Long, String, String>> query;
if (attributes.size() == 2 && attributes.get(1) != null) {
final SelectHavingStep<Record5<Long, String, Long, String, String>> SECOND_LEVEL_TABLE = dsl.select(max(LAUNCH.ID).as(ID),
LAUNCH.NAME,
max(fieldName(FIRST_LEVEL, ID)).cast(Long.class).as(FIRST_LEVEL_ID),
ITEM_ATTRIBUTE.KEY.as(ATTRIBUTE_KEY),
ITEM_ATTRIBUTE.VALUE.as(ATTRIBUTE_VALUE)
)
.from(FIRST_LEVEL)
.join(LAUNCH)
.on(Suppliers.formattedSupplier("{} = any({})", LAUNCH.ID, AGGREGATED_LAUNCHES_IDS).get())
.join(ITEM_ATTRIBUTE)
.on(LAUNCH.ID.eq(ITEM_ATTRIBUTE.LAUNCH_ID))
.and(ITEM_ATTRIBUTE.KEY.eq(attributes.get(1)).and(ITEM_ATTRIBUTE.SYSTEM.isFalse()))
.groupBy(LAUNCH.NAME,
fieldName(FIRST_LEVEL, ATTRIBUTE_KEY),
fieldName(FIRST_LEVEL, ATTRIBUTE_VALUE),
ITEM_ATTRIBUTE.KEY,
ITEM_ATTRIBUTE.VALUE
);
query = FIRST_LEVEL_TABLE.union(SECOND_LEVEL_TABLE).getQuery();
} else {
query = FIRST_LEVEL_TABLE.getQuery();
}
dsl.execute(DSL.sql(String.format("CREATE MATERIALIZED VIEW %s AS (%s)", DSL.name(viewName), query.toString())));
}
@Override
public List<CumulativeTrendChartEntry> cumulativeTrendChart(String viewName, String levelAttributeKey, @Nullable String subAttributeKey,
@Nullable String parentAttribute) {
final SelectOnConditionStep<? extends Record4> baseQuery = dsl.select(DSL.arrayAgg(fieldName(viewName, ID)).as(LAUNCHES),
fieldName(viewName, ATTRIBUTE_VALUE),
STATISTICS_FIELD.NAME,
sum(STATISTICS.S_COUNTER).as(STATISTICS_COUNTER)
)
.from(viewName)
.join(STATISTICS)
.on(fieldName(viewName, ID).cast(Long.class).eq(STATISTICS.LAUNCH_ID))
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID));
if (parentAttribute != null) {
String[] split = parentAttribute.split(KEY_VALUE_SEPARATOR);
final SelectConditionStep<Record1<Long>> subLevelLaunches = selectDistinct(fieldName(viewName, ID).cast(Long.class)).from(
viewName)
.where(fieldName(viewName, ATTRIBUTE_KEY).cast(String.class).eq(split[0]))
.and(fieldName(viewName, ATTRIBUTE_VALUE).cast(String.class).eq(split[1]));
baseQuery.where(fieldName(viewName, FIRST_LEVEL_ID).cast(Long.class).in(subLevelLaunches));
}
List<CumulativeTrendChartEntry> accumulatedLaunches = CUMULATIVE_TREND_CHART_FETCHER.apply(baseQuery.where(fieldName(ATTRIBUTE_KEY).cast(
String.class).eq(levelAttributeKey)).groupBy(fieldName(viewName, ATTRIBUTE_VALUE), STATISTICS_FIELD.NAME).orderBy(when(
fieldName(viewName, ATTRIBUTE_VALUE).likeRegex(VERSION_PATTERN),
PostgresDSL.stringToArray(field(name(viewName, ATTRIBUTE_VALUE), String.class), VERSION_DELIMITER).cast(Integer[].class)
), fieldName(viewName, ATTRIBUTE_VALUE).sort(SortOrder.ASC)).fetch());
if (!StringUtils.isEmpty(subAttributeKey)) {
accumulatedLaunches.forEach(attributeLaunches -> CUMULATIVE_TOOLTIP_FETCHER.accept(
attributeLaunches,
dsl.selectDistinct(fieldName(viewName, ATTRIBUTE_KEY), fieldName(viewName, ATTRIBUTE_VALUE))
.from(viewName)
.where(fieldName(viewName, ATTRIBUTE_KEY).cast(String.class)
.eq(subAttributeKey)
.and(fieldName(viewName, ID).in(attributeLaunches.getContent().getLaunchIds())))
.fetch()
));
}
return accumulatedLaunches;
}
@Override
public void generateComponentHealthCheckTable(boolean refresh, HealthCheckTableInitParams params, Filter launchFilter, Sort launchSort,
int launchesLimit, boolean isLatest) {
if (refresh) {
removeWidgetView(params.getViewName());
}
Table<? extends Record> launchesTable = QueryUtils.createQueryBuilderWithLatestLaunchesOption(launchFilter, launchSort, isLatest)
.with(launchesLimit)
.with(launchSort)
.build()
.asTable(LAUNCHES);
List<Field<?>> selectFields = Lists.newArrayList(TEST_ITEM.ITEM_ID, ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE);
ofNullable(params.getCustomKey()).ifPresent(key -> selectFields.add(DSL.arrayAggDistinct(fieldName(CUSTOM_ATTRIBUTE, VALUE))
.filterWhere(fieldName(CUSTOM_ATTRIBUTE, VALUE).isNotNull())
.as(CUSTOM_COLUMN)));
SelectOnConditionStep<Record> baseQuery = select(selectFields).from(TEST_ITEM)
.join(launchesTable)
.on(TEST_ITEM.LAUNCH_ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.join(TEST_ITEM_RESULTS)
.on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID))
.join(ITEM_ATTRIBUTE)
.on(and(TEST_ITEM.ITEM_ID.eq(ITEM_ATTRIBUTE.ITEM_ID).or(TEST_ITEM.LAUNCH_ID.eq(ITEM_ATTRIBUTE.LAUNCH_ID))).and(
ITEM_ATTRIBUTE.KEY.in(params.getAttributeKeys())).and(ITEM_ATTRIBUTE.SYSTEM.isFalse()));
dsl.execute(DSL.sql(Suppliers.formattedSupplier("CREATE MATERIALIZED VIEW {} AS ({})",
DSL.name(params.getViewName()),
ofNullable(params.getCustomKey()).map(key -> {
JItemAttribute customAttribute = ITEM_ATTRIBUTE.as(CUSTOM_ATTRIBUTE);
return baseQuery.leftJoin(customAttribute)
.on(DSL.condition(Operator.OR,
TEST_ITEM.ITEM_ID.eq(customAttribute.ITEM_ID),
TEST_ITEM.LAUNCH_ID.eq(customAttribute.LAUNCH_ID)
)
.and(customAttribute.KEY.eq(key)));
})
.orElse(baseQuery)
.where(TEST_ITEM.HAS_STATS.isTrue()
.and(TEST_ITEM.HAS_CHILDREN.isFalse())
.and(TEST_ITEM.TYPE.eq(JTestItemTypeEnum.STEP))
.and(TEST_ITEM.RETRY_OF.isNull())
.and(TEST_ITEM_RESULTS.STATUS.notEqual(JStatusEnum.IN_PROGRESS)))
.groupBy(TEST_ITEM.ITEM_ID, ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE)
.getQuery()
).get()));
}
@Override
public void removeWidgetView(String viewName) {
dsl.execute(DSL.sql(Suppliers.formattedSupplier("DROP MATERIALIZED VIEW IF EXISTS {}", DSL.name(viewName)).get()));
}
@Override
public List<HealthCheckTableContent> componentHealthCheckTable(HealthCheckTableGetParams params) {
return healthCheckTableChain.apply(params);
}
private SelectSeekStepN<? extends Record> buildLaunchesTableQuery(Collection<Field<?>> selectFields,
Collection<String> statisticsFields, Filter filter, Sort sort, int limit, boolean isAttributePresent) {
SelectOnConditionStep<? extends Record> select = dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(selectFields)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(statisticsFields))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.join(USERS)
.on(LAUNCH.USER_ID.eq(USERS.ID));
if (isAttributePresent) {
select = select.leftJoin(ITEM_ATTRIBUTE).on(LAUNCH.ID.eq(ITEM_ATTRIBUTE.LAUNCH_ID));
}
return select.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES));
}
private SelectOnConditionStep<? extends Record> buildPassingRateSelect(Filter filter, Sort sort, int limit) {
return dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(sum(when(fieldName(STATISTICS_TABLE, SF_NAME).cast(String.class).eq(EXECUTIONS_PASSED),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER).cast(Integer.class)
).otherwise(0)).as(PASSED), sum(when(fieldName(STATISTICS_TABLE, SF_NAME).cast(String.class).eq(EXECUTIONS_TOTAL),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER).cast(Integer.class)
).otherwise(0)).as(TOTAL), max(LAUNCH.NUMBER).as(NUMBER))
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(EXECUTIONS_PASSED, EXECUTIONS_TOTAL))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)));
}
private SelectSeekStepN<? extends Record> buildFilterGroupedQuery(Filter filter, boolean isLatest, Sort sort, int limit,
Collection<String> contentFields, Map<String, String> customColumns) {
List<Field<?>> fields = getCommonProductStatusFields(filter, contentFields);
fields.add(DSL.selectDistinct(FILTER.NAME).from(FILTER).where(FILTER.ID.eq(filter.getId())).asField(FILTER_NAME));
return buildProductStatusQuery(filter,
isLatest,
sort,
limit,
fields,
contentFields,
customColumns
).orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES));
}
private List<Field<?>> getCommonProductStatusFields(Filter filter, Collection<String> contentFields) {
Map<String, String> criteria = filter.getTarget()
.getCriteriaHolders()
.stream()
.collect(Collectors.toMap(CriteriaHolder::getFilterCriteria, CriteriaHolder::getQueryCriteria));
List<Field<?>> selectFields = contentFields.stream()
.filter(cf -> !cf.startsWith(STATISTICS_KEY))
.map(criteria::get)
.filter(Objects::nonNull)
.map(DSL::field)
.collect(Collectors.toList());
Collections.addAll(selectFields,
LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
fieldName(STATISTICS_TABLE, SF_NAME),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER),
round(val(PERCENTAGE_MULTIPLIER).mul(dsl.select(sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.onKey()
.where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_PASSED).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)))
.asField()
.cast(Double.class))
.div(nullif(dsl.select(sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.onKey()
.where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_TOTAL).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)))
.asField(), 0)), 2).as(PASSING_RATE),
timestampDiff(LAUNCH.END_TIME, LAUNCH.START_TIME).as(DURATION)
);
return selectFields;
}
private SelectOnConditionStep<? extends Record> buildProductStatusQuery(Filter filter, boolean isLatest, Sort sort, int limit,
Collection<Field<?>> fields, Collection<String> contentFields, Map<String, String> customColumns) {
List<Condition> attributesKeyConditions = customColumns.values()
.stream()
.map(customColumn -> ofNullable(customColumn).map(ITEM_ATTRIBUTE.KEY::eq).orElseGet(ITEM_ATTRIBUTE.KEY::isNull))
.collect(Collectors.toList());
Optional<Condition> combinedAttributeKeyCondition = attributesKeyConditions.stream().reduce((prev, curr) -> curr = prev.or(curr));
List<String> statisticsFields = contentFields.stream().filter(cf -> cf.startsWith(STATISTICS_KEY)).collect(toList());
return combinedAttributeKeyCondition.map(c -> {
Collections.addAll(fields,
fieldName(ATTR_TABLE, ATTR_ID),
fieldName(ATTR_TABLE, ATTRIBUTE_VALUE),
fieldName(ATTR_TABLE, ATTRIBUTE_KEY)
);
return getProductStatusSelect(filter, isLatest, sort, limit, fields, statisticsFields).leftJoin(DSL.select(ITEM_ATTRIBUTE.ID.as(
ATTR_ID),
ITEM_ATTRIBUTE.VALUE.as(ATTRIBUTE_VALUE),
ITEM_ATTRIBUTE.KEY.as(ATTRIBUTE_KEY),
ITEM_ATTRIBUTE.LAUNCH_ID.as(LAUNCH_ID)
).from(ITEM_ATTRIBUTE).where(c).asTable(ATTR_TABLE)).on(LAUNCH.ID.eq(fieldName(ATTR_TABLE, LAUNCH_ID).cast(Long.class)));
}).orElseGet(() -> getProductStatusSelect(filter, isLatest, sort, limit, fields, statisticsFields));
}
private SelectOnConditionStep<Record> getProductStatusSelect(Filter filter, boolean isLatest, Sort sort, int limit,
Collection<Field<?>> fields, Collection<String> contentFields) {
return dsl.with(LAUNCHES)
.as(QueryUtils.createQueryBuilderWithLatestLaunchesOption(filter, sort, isLatest).with(sort).with(limit).build())
.select(fields)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(contentFields))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)));
}
private ProductStatusStatisticsContent countLaunchTotalStatistics(List<ProductStatusStatisticsContent> launchesStatisticsResult) {
Map<String, Integer> total = launchesStatisticsResult.stream()
.flatMap(lsc -> lsc.getValues().entrySet().stream())
.collect(Collectors.groupingBy(Map.Entry::getKey, summingInt(entry -> Integer.parseInt(entry.getValue()))));
Double averagePassingRate = launchesStatisticsResult.stream()
.collect(averagingDouble(lsc -> ofNullable(lsc.getPassingRate()).orElse(0D)));
ProductStatusStatisticsContent launchesStatisticsContent = new ProductStatusStatisticsContent();
launchesStatisticsContent.setTotalStatistics(total);
Double roundedAveragePassingRate = BigDecimal.valueOf(averagePassingRate).setScale(2, RoundingMode.HALF_UP).doubleValue();
launchesStatisticsContent.setAveragePassingRate(roundedAveragePassingRate);
return launchesStatisticsContent;
}
private List<ProductStatusStatisticsContent> countFilterTotalStatistics(
Map<String, List<ProductStatusStatisticsContent>> launchesStatisticsResult) {
Map<String, Integer> total = launchesStatisticsResult.values()
.stream()
.flatMap(Collection::stream)
.flatMap(lsc -> lsc.getValues().entrySet().stream())
.collect(Collectors.groupingBy(Map.Entry::getKey, summingInt(entry -> Integer.parseInt(entry.getValue()))));
Double averagePassingRate = launchesStatisticsResult.values()
.stream()
.flatMap(Collection::stream)
.collect(averagingDouble(lsc -> ofNullable(lsc.getPassingRate()).orElse(0D)));
ProductStatusStatisticsContent launchesStatisticsContent = new ProductStatusStatisticsContent();
launchesStatisticsContent.setTotalStatistics(total);
Double roundedAveragePassingRate = BigDecimal.valueOf(averagePassingRate).setScale(2, RoundingMode.HALF_UP).doubleValue();
launchesStatisticsContent.setAveragePassingRate(roundedAveragePassingRate);
return Lists.newArrayList(launchesStatisticsContent);
}
private List<TopPatternTemplatesContent> buildPatternTemplatesQuery(Map<String, List<Long>> attributeIdsMapping) {
return attributeIdsMapping.entrySet()
.stream()
.map(entry -> (Select<? extends Record>) dsl.select(DSL.val(entry.getKey()).as(ATTRIBUTE_VALUE),
PATTERN_TEMPLATE.NAME,
DSL.countDistinct(PATTERN_TEMPLATE_TEST_ITEM.ITEM_ID).as(TOTAL)
)
.from(PATTERN_TEMPLATE)
.join(PATTERN_TEMPLATE_TEST_ITEM)
.on(PATTERN_TEMPLATE.ID.eq(PATTERN_TEMPLATE_TEST_ITEM.PATTERN_ID))
.join(TEST_ITEM)
.on(PATTERN_TEMPLATE_TEST_ITEM.ITEM_ID.eq(TEST_ITEM.ITEM_ID))
.join(LAUNCH)
.on(TEST_ITEM.LAUNCH_ID.eq(LAUNCH.ID))
.where(LAUNCH.ID.in(entry.getValue()))
.groupBy(PATTERN_TEMPLATE.NAME)
.orderBy(field(TOTAL).desc())
.limit(PATTERNS_COUNT))
.reduce((prev, curr) -> curr = prev.unionAll(curr))
.map(select -> TOP_PATTERN_TEMPLATES_FETCHER.apply(select.fetch()))
.orElseGet(Collections::emptyList);
}
private List<TopPatternTemplatesContent> buildPatternTemplatesQueryGroupedByPattern(Map<String, List<Long>> attributeIdsMapping,
String patternTemplateName) {
return attributeIdsMapping.entrySet()
.stream()
.map(entry -> (Select<? extends Record>) dsl.select(DSL.val(entry.getKey()).as(ATTRIBUTE_VALUE),
LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
DSL.countDistinct(PATTERN_TEMPLATE_TEST_ITEM.ITEM_ID).as(TOTAL)
)
.from(PATTERN_TEMPLATE)
.join(PATTERN_TEMPLATE_TEST_ITEM)
.on(PATTERN_TEMPLATE.ID.eq(PATTERN_TEMPLATE_TEST_ITEM.PATTERN_ID))
.join(TEST_ITEM)
.on(PATTERN_TEMPLATE_TEST_ITEM.ITEM_ID.eq(TEST_ITEM.ITEM_ID))
.join(LAUNCH)
.on(TEST_ITEM.LAUNCH_ID.eq(LAUNCH.ID))
.where(LAUNCH.ID.in(entry.getValue()))
.and(PATTERN_TEMPLATE.NAME.eq(patternTemplateName))
.groupBy(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, PATTERN_TEMPLATE.NAME)
.having(DSL.countDistinct(PATTERN_TEMPLATE_TEST_ITEM.ITEM_ID).gt(BigDecimal.ZERO.intValue()))
.orderBy(field(TOTAL).desc()))
.reduce((prev, curr) -> curr = prev.unionAll(curr))
.map(select -> TOP_PATTERN_TEMPLATES_GROUPED_FETCHER.apply(select.fetch()))
.orElseGet(Collections::emptyList);
}
}
| src/main/java/com/epam/ta/reportportal/dao/WidgetContentRepositoryImpl.java | /*
* Copyright 2019 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.ta.reportportal.dao;
import com.epam.ta.reportportal.commons.querygen.CriteriaHolder;
import com.epam.ta.reportportal.commons.querygen.Filter;
import com.epam.ta.reportportal.commons.querygen.QueryBuilder;
import com.epam.ta.reportportal.commons.validation.Suppliers;
import com.epam.ta.reportportal.dao.util.QueryUtils;
import com.epam.ta.reportportal.dao.widget.WidgetProviderChain;
import com.epam.ta.reportportal.entity.enums.StatusEnum;
import com.epam.ta.reportportal.entity.widget.content.*;
import com.epam.ta.reportportal.entity.widget.content.healthcheck.ComponentHealthCheckContent;
import com.epam.ta.reportportal.entity.widget.content.healthcheck.HealthCheckTableContent;
import com.epam.ta.reportportal.entity.widget.content.healthcheck.HealthCheckTableGetParams;
import com.epam.ta.reportportal.entity.widget.content.healthcheck.HealthCheckTableInitParams;
import com.epam.ta.reportportal.exception.ReportPortalException;
import com.epam.ta.reportportal.jooq.enums.JStatusEnum;
import com.epam.ta.reportportal.jooq.enums.JTestItemTypeEnum;
import com.epam.ta.reportportal.jooq.tables.JItemAttribute;
import com.epam.ta.reportportal.util.WidgetSortUtils;
import com.epam.ta.reportportal.ws.model.ActivityResource;
import com.epam.ta.reportportal.ws.model.ErrorType;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.jooq.*;
import org.jooq.impl.DSL;
import org.jooq.util.postgres.PostgresDSL;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Repository;
import javax.annotation.Nullable;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.*;
import java.util.stream.Collectors;
import static com.epam.ta.reportportal.commons.querygen.QueryBuilder.STATISTICS_KEY;
import static com.epam.ta.reportportal.commons.querygen.constant.GeneralCriteriaConstant.CRITERIA_START_TIME;
import static com.epam.ta.reportportal.commons.querygen.constant.ItemAttributeConstant.KEY_VALUE_SEPARATOR;
import static com.epam.ta.reportportal.dao.constant.WidgetContentRepositoryConstants.*;
import static com.epam.ta.reportportal.dao.constant.WidgetRepositoryConstants.ID;
import static com.epam.ta.reportportal.dao.util.JooqFieldNameTransformer.fieldName;
import static com.epam.ta.reportportal.dao.util.QueryUtils.collectJoinFields;
import static com.epam.ta.reportportal.dao.util.WidgetContentUtil.*;
import static com.epam.ta.reportportal.jooq.Tables.*;
import static com.epam.ta.reportportal.jooq.tables.JActivity.ACTIVITY;
import static com.epam.ta.reportportal.jooq.tables.JIssue.ISSUE;
import static com.epam.ta.reportportal.jooq.tables.JIssueTicket.ISSUE_TICKET;
import static com.epam.ta.reportportal.jooq.tables.JLaunch.LAUNCH;
import static com.epam.ta.reportportal.jooq.tables.JProject.PROJECT;
import static com.epam.ta.reportportal.jooq.tables.JTestItem.TEST_ITEM;
import static com.epam.ta.reportportal.jooq.tables.JTestItemResults.TEST_ITEM_RESULTS;
import static com.epam.ta.reportportal.jooq.tables.JTicket.TICKET;
import static com.epam.ta.reportportal.jooq.tables.JUsers.USERS;
import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.*;
import static org.jooq.impl.DSL.*;
/**
* Repository that contains queries of content loading for widgets.
*
* @author Pavel Bortnik
*/
@Repository
public class WidgetContentRepositoryImpl implements WidgetContentRepository {
@Autowired
private DSLContext dsl;
@Autowired
private WidgetProviderChain<HealthCheckTableGetParams, List<HealthCheckTableContent>> healthCheckTableChain;
private static final List<JTestItemTypeEnum> HAS_METHOD_OR_CLASS = Arrays.stream(JTestItemTypeEnum.values()).filter(it -> {
String name = it.name();
return name.contains("METHOD") || name.contains("CLASS");
}).collect(Collectors.toList());
@Override
public OverallStatisticsContent overallStatisticsContent(Filter filter, Sort sort, List<String> contentFields, boolean latest,
int limit) {
return OVERALL_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryUtils.createQueryBuilderWithLatestLaunchesOption(filter, sort, latest).with(sort).with(limit).build())
.select(STATISTICS_FIELD.NAME, sum(STATISTICS.S_COUNTER).as(SUM))
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(STATISTICS)
.on(LAUNCH.ID.eq(STATISTICS.LAUNCH_ID))
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(contentFields))
.groupBy(STATISTICS_FIELD.NAME)
.fetch());
}
/**
* Returns condition for step level test item types.
* Include before/after methods and classes types depends on {@code includeMethods} param.
*
* @param includeMethods
* @return {@link Condition}
*/
private Condition itemTypeStepCondition(boolean includeMethods) {
List<JTestItemTypeEnum> itemTypes = Lists.newArrayList(JTestItemTypeEnum.STEP);
if (includeMethods) {
itemTypes.addAll(HAS_METHOD_OR_CLASS);
}
return TEST_ITEM.TYPE.in(itemTypes);
}
@Override
public List<CriteriaHistoryItem> topItemsByCriteria(Filter filter, String criteria, int limit, boolean includeMethods) {
Table<Record2<Long, BigDecimal>> criteriaTable = getTopItemsCriteriaTable(filter, criteria, limit, includeMethods);
return dsl.select(TEST_ITEM.UNIQUE_ID,
TEST_ITEM.NAME,
DSL.arrayAgg(when(fieldName(criteriaTable.getName(), CRITERIA_FLAG).cast(Integer.class).ge(1), true).otherwise(false))
.orderBy(LAUNCH.NUMBER.asc())
.as(STATUS_HISTORY),
DSL.arrayAgg(TEST_ITEM.START_TIME).orderBy(LAUNCH.NUMBER.asc()).as(START_TIME_HISTORY),
DSL.sum(fieldName(criteriaTable.getName(), CRITERIA_FLAG).cast(Integer.class)).as(CRITERIA),
DSL.count(TEST_ITEM.ITEM_ID).as(TOTAL)
)
.from(TEST_ITEM)
.join(criteriaTable)
.on(TEST_ITEM.ITEM_ID.eq(fieldName(criteriaTable.getName(), ITEM_ID).cast(Long.class)))
.join(LAUNCH)
.on(TEST_ITEM.LAUNCH_ID.eq(LAUNCH.ID))
.groupBy(TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME)
.having(DSL.sum(fieldName(criteriaTable.getName(), CRITERIA_FLAG).cast(Integer.class)).greaterThan(BigDecimal.ZERO))
.orderBy(DSL.field(DSL.name(CRITERIA)).desc(), DSL.field(DSL.name(TOTAL)).asc())
.limit(MOST_FAILED_CRITERIA_LIMIT)
.fetchInto(CriteriaHistoryItem.class);
}
private Table<Record2<Long, BigDecimal>> getTopItemsCriteriaTable(Filter filter, String criteria, int limit, boolean includeMethods) {
Sort launchSort = Sort.by(Sort.Direction.DESC, CRITERIA_START_TIME);
Table<? extends Record> launchesTable = QueryBuilder.newBuilder(filter, collectJoinFields(filter, launchSort))
.with(limit)
.with(launchSort)
.build()
.asTable(LAUNCHES);
return getCommonMostFailedQuery(criteria, launchesTable).where(itemTypeStepCondition(includeMethods))
.and(TEST_ITEM.HAS_STATS.eq(Boolean.TRUE))
.and(TEST_ITEM.HAS_CHILDREN.eq(false))
.groupBy(TEST_ITEM.ITEM_ID)
.asTable(CRITERIA_TABLE);
}
private SelectOnConditionStep<Record2<Long, BigDecimal>> getCommonMostFailedQuery(String criteria,
Table<? extends Record> launchesTable) {
if (StringUtils.endsWithAny(criteria,
StatusEnum.FAILED.getExecutionCounterField(),
StatusEnum.SKIPPED.getExecutionCounterField()
)) {
StatusEnum status = StatusEnum.fromValue(StringUtils.substringAfterLast(criteria, STATISTICS_SEPARATOR))
.orElseThrow(() -> new ReportPortalException(ErrorType.UNCLASSIFIED_REPORT_PORTAL_ERROR));
return statusCriteriaTable(JStatusEnum.valueOf(status.name()), launchesTable);
} else {
return statisticsCriteriaTable(criteria, launchesTable);
}
}
private SelectOnConditionStep<Record2<Long, BigDecimal>> statisticsCriteriaTable(String criteria,
Table<? extends Record> launchesTable) {
return dsl.select(TEST_ITEM.ITEM_ID, sum(when(STATISTICS_FIELD.NAME.eq(criteria), 1).otherwise(ZERO_QUERY_VALUE)).as(CRITERIA_FLAG))
.from(TEST_ITEM)
.join(launchesTable)
.on(TEST_ITEM.LAUNCH_ID.eq(fieldName(launchesTable.getName(), ID).cast(Long.class)))
.join(STATISTICS)
.on(TEST_ITEM.ITEM_ID.eq(STATISTICS.ITEM_ID))
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID));
}
private SelectOnConditionStep<Record2<Long, BigDecimal>> statusCriteriaTable(JStatusEnum criteria,
Table<? extends Record> launchesTable) {
return dsl.select(TEST_ITEM.ITEM_ID,
sum(when(TEST_ITEM_RESULTS.STATUS.eq(criteria), 1).otherwise(ZERO_QUERY_VALUE)).as(CRITERIA_FLAG)
)
.from(TEST_ITEM)
.join(launchesTable)
.on(TEST_ITEM.LAUNCH_ID.eq(fieldName(launchesTable.getName(), ID).cast(Long.class)))
.join(TEST_ITEM_RESULTS)
.on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID));
}
@Override
public List<FlakyCasesTableContent> flakyCasesStatistics(Filter filter, boolean includeMethods, int limit) {
return FLAKY_CASES_TABLE_FETCHER.apply(dsl.select(field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.UNIQUE_ID.getName())).as(UNIQUE_ID),
field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.NAME.getName())).as(ITEM_NAME),
DSL.arrayAgg(field(name(FLAKY_TABLE_RESULTS, TEST_ITEM_RESULTS.STATUS.getName()))).as(STATUSES),
DSL.max(field(name(FLAKY_TABLE_RESULTS, START_TIME))).as(START_TIME_HISTORY),
sum(field(name(FLAKY_TABLE_RESULTS, SWITCH_FLAG)).cast(Long.class)).as(FLAKY_COUNT),
count(field(name(FLAKY_TABLE_RESULTS, ITEM_ID))).minus(1).as(TOTAL)
)
.from(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, Sort.unsorted()))
.with(LAUNCH.NUMBER, SortOrder.DESC)
.with(limit)
.build())
.select(TEST_ITEM.ITEM_ID,
TEST_ITEM.UNIQUE_ID,
TEST_ITEM.NAME,
TEST_ITEM.START_TIME,
TEST_ITEM_RESULTS.STATUS,
when(TEST_ITEM_RESULTS.STATUS.notEqual(lag(TEST_ITEM_RESULTS.STATUS).over(orderBy(TEST_ITEM.UNIQUE_ID,
TEST_ITEM.START_TIME.desc()
)))
.and(TEST_ITEM.UNIQUE_ID.equal(lag(TEST_ITEM.UNIQUE_ID).over(orderBy(TEST_ITEM.UNIQUE_ID,
TEST_ITEM.START_TIME.desc()
)))), 1).otherwise(ZERO_QUERY_VALUE)
.as(SWITCH_FLAG)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.join(TEST_ITEM)
.on(LAUNCH.ID.eq(TEST_ITEM.LAUNCH_ID))
.join(TEST_ITEM_RESULTS)
.on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID))
.where(itemTypeStepCondition(includeMethods))
.and(TEST_ITEM.HAS_STATS.eq(Boolean.TRUE))
.and(TEST_ITEM.HAS_CHILDREN.eq(false))
.and(TEST_ITEM.RETRY_OF.isNull())
.groupBy(TEST_ITEM.ITEM_ID, TEST_ITEM_RESULTS.STATUS, TEST_ITEM.UNIQUE_ID, TEST_ITEM.NAME, TEST_ITEM.START_TIME)
.orderBy(TEST_ITEM.UNIQUE_ID, TEST_ITEM.START_TIME.desc())
.asTable(FLAKY_TABLE_RESULTS))
.groupBy(field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.UNIQUE_ID.getName())),
field(name(FLAKY_TABLE_RESULTS, TEST_ITEM.NAME.getName()))
)
.having(count(field(name(FLAKY_TABLE_RESULTS, ITEM_ID))).gt(BigDecimal.ONE.intValue())
.and(sum(field(name(FLAKY_TABLE_RESULTS, SWITCH_FLAG)).cast(Long.class)).gt(BigDecimal.ZERO)))
.orderBy(fieldName(FLAKY_COUNT).desc(), fieldName(TOTAL).asc(), fieldName(UNIQUE_ID))
.limit(FLAKY_CASES_LIMIT)
.fetch());
}
@Override
public List<ChartStatisticsContent> launchStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) {
List<Field<?>> groupingFields = Lists.newArrayList(field(LAUNCH.ID),
field(LAUNCH.NUMBER),
field(LAUNCH.START_TIME),
field(LAUNCH.NAME),
fieldName(STATISTICS_TABLE, SF_NAME),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER)
);
groupingFields.addAll(WidgetSortUtils.fieldTransformer(filter.getTarget()).apply(sort, LAUNCHES));
return LAUNCHES_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
LAUNCH.NAME,
fieldName(STATISTICS_TABLE, SF_NAME),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(contentFields))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.groupBy(groupingFields)
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES))
.fetch());
}
@Override
public List<ChartStatisticsContent> investigatedStatistics(Filter filter, Sort sort, int limit) {
List<Field<?>> groupingFields = Lists.newArrayList(field(LAUNCH.ID),
field(LAUNCH.NUMBER),
field(LAUNCH.START_TIME),
field(LAUNCH.NAME)
);
groupingFields.addAll(WidgetSortUtils.fieldTransformer(filter.getTarget()).apply(sort, LAUNCHES));
return INVESTIGATED_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
LAUNCH.NAME,
round(val(PERCENTAGE_MULTIPLIER).mul(dsl.select(sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.onKey()
.where(STATISTICS_FIELD.NAME.eq(DEFECTS_TO_INVESTIGATE_TOTAL).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)))
.asField()
.cast(Double.class))
.div(nullif(dsl.select(sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.onKey()
.where(STATISTICS_FIELD.NAME.in(DEFECTS_AUTOMATION_BUG_TOTAL,
DEFECTS_NO_DEFECT_TOTAL,
DEFECTS_TO_INVESTIGATE_TOTAL,
DEFECTS_PRODUCT_BUG_TOTAL,
DEFECTS_SYSTEM_ISSUE_TOTAL
).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)))
.asField(), 0)), 2).as(TO_INVESTIGATE)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.groupBy(groupingFields)
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES))
.fetch());
}
@Override
public List<ChartStatisticsContent> timelineInvestigatedStatistics(Filter filter, Sort sort, int limit) {
List<Field<?>> groupingFields = Lists.newArrayList(field(LAUNCH.ID),
field(LAUNCH.NUMBER),
field(LAUNCH.START_TIME),
field(LAUNCH.NAME)
);
groupingFields.addAll(WidgetSortUtils.fieldTransformer(filter.getTarget()).apply(sort, LAUNCHES));
return dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
LAUNCH.NAME,
coalesce(DSL.select(sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.onKey()
.where(STATISTICS_FIELD.NAME.eq(DEFECTS_TO_INVESTIGATE_TOTAL).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)))
.asField()
.cast(Double.class), 0).as(TO_INVESTIGATE),
coalesce(DSL.select(sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.onKey()
.where(STATISTICS_FIELD.NAME.in(DEFECTS_AUTOMATION_BUG_TOTAL,
DEFECTS_NO_DEFECT_TOTAL,
DEFECTS_TO_INVESTIGATE_TOTAL,
DEFECTS_PRODUCT_BUG_TOTAL,
DEFECTS_SYSTEM_ISSUE_TOTAL
).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)))
.asField(), 0).as(INVESTIGATED)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.groupBy(groupingFields)
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES))
.fetch(TIMELINE_INVESTIGATED_STATISTICS_RECORD_MAPPER);
}
@Override
public PassingRateStatisticsResult passingRatePerLaunchStatistics(Filter filter, Sort sort, int limit) {
List<Field<Object>> groupingFields = WidgetSortUtils.fieldTransformer(filter.getTarget()).apply(sort, LAUNCHES);
return buildPassingRateSelect(filter, sort, limit).groupBy(groupingFields)
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES))
.fetchInto(PassingRateStatisticsResult.class)
.stream()
.findFirst()
.orElseThrow(() -> new ReportPortalException("No results for filter were found"));
}
@Override
public PassingRateStatisticsResult summaryPassingRateStatistics(Filter filter, Sort sort, int limit) {
return buildPassingRateSelect(filter, sort, limit).fetchInto(PassingRateStatisticsResult.class)
.stream()
.findFirst()
.orElseThrow(() -> new ReportPortalException("No results for filter were found"));
}
@Override
public List<ChartStatisticsContent> casesTrendStatistics(Filter filter, String contentField, Sort sort, int limit) {
List<Field<?>> groupingFields = Lists.newArrayList(field(LAUNCH.ID),
field(LAUNCH.NUMBER),
field(LAUNCH.START_TIME),
field(LAUNCH.NAME),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER)
);
groupingFields.addAll(WidgetSortUtils.fieldTransformer(filter.getTarget()).apply(sort, LAUNCHES));
return CASES_GROWTH_TREND_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
LAUNCH.NAME,
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER),
coalesce(fieldName(STATISTICS_TABLE, STATISTICS_COUNTER).sub(lag(fieldName(STATISTICS_TABLE,
STATISTICS_COUNTER
)).over().orderBy(LAUNCH.START_TIME.asc())), 0).as(DELTA)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.eq(contentField))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.groupBy(groupingFields)
.orderBy(LAUNCH.START_TIME.asc())
.fetch(), contentField);
}
@Override
public List<ChartStatisticsContent> bugTrendStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) {
return BUG_TREND_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
fieldName(STATISTICS_TABLE, SF_NAME),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(contentFields))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.orderBy(LAUNCH.START_TIME.asc())
.fetch());
}
@Override
public List<ChartStatisticsContent> launchesComparisonStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) {
List<String> executionStatisticsFields = contentFields.stream().filter(cf -> cf.contains(EXECUTIONS_KEY)).collect(toList());
List<String> defectStatisticsFields = contentFields.stream().filter(cf -> cf.contains(DEFECTS_KEY)).collect(toList());
return LAUNCHES_STATISTICS_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
field(name(STATISTICS_TABLE, SF_NAME), String.class),
when(field(name(STATISTICS_TABLE, SF_NAME)).equalIgnoreCase(EXECUTIONS_TOTAL),
field(name(STATISTICS_TABLE, STATISTICS_COUNTER)).cast(Double.class)
).otherwise(round(val(PERCENTAGE_MULTIPLIER).mul(field(name(STATISTICS_TABLE, STATISTICS_COUNTER), Integer.class))
.div(nullif(DSL.select(DSL.sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))
.and(STATISTICS_FIELD.NAME.in(executionStatisticsFields)
.and(STATISTICS_FIELD.NAME.notEqual(EXECUTIONS_TOTAL))), 0).cast(Double.class)), 2))
.as(fieldName(STATISTICS_TABLE, STATISTICS_COUNTER))
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(executionStatisticsFields))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES))
.unionAll(DSL.select(LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
field(name(STATISTICS_TABLE, SF_NAME), String.class),
round(val(PERCENTAGE_MULTIPLIER).mul(field(name(STATISTICS_TABLE, STATISTICS_COUNTER), Integer.class))
.div(nullif(DSL.select(DSL.sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))
.and(STATISTICS_FIELD.NAME.in(defectStatisticsFields)), 0).cast(Double.class)), 2)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID,
STATISTICS.S_COUNTER.as(STATISTICS_COUNTER),
STATISTICS_FIELD.NAME.as(SF_NAME)
)
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(defectStatisticsFields))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES)))
.fetch());
}
@Override
public List<LaunchesDurationContent> launchesDurationStatistics(Filter filter, Sort sort, boolean isLatest, int limit) {
return dsl.with(LAUNCHES)
.as(QueryUtils.createQueryBuilderWithLatestLaunchesOption(filter, sort, isLatest).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
LAUNCH.STATUS,
LAUNCH.START_TIME,
LAUNCH.END_TIME,
timestampDiff(LAUNCH.END_TIME, LAUNCH.START_TIME).as(DURATION)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES))
.fetchInto(LaunchesDurationContent.class);
}
@Override
public List<NotPassedCasesContent> notPassedCasesStatistics(Filter filter, Sort sort, int limit) {
return dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
LAUNCH.START_TIME,
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER),
coalesce(round(val(PERCENTAGE_MULTIPLIER).mul(DSL.select(DSL.sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(EXECUTIONS_SKIPPED, EXECUTIONS_FAILED))
.and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID))
.asField()
.cast(Double.class))
.div(nullif(field(name(STATISTICS_TABLE, STATISTICS_COUNTER), Integer.class), 0).cast(Double.class)), 2), 0)
.as(PERCENTAGE)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_TOTAL))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.orderBy(LAUNCH.START_TIME.asc())
.fetch(NOT_PASSED_CASES_CONTENT_RECORD_MAPPER);
}
@Override
public List<LaunchesTableContent> launchesTableStatistics(Filter filter, List<String> contentFields, Sort sort, int limit) {
Map<String, String> criteria = filter.getTarget()
.getCriteriaHolders()
.stream()
.collect(Collectors.toMap(CriteriaHolder::getFilterCriteria, CriteriaHolder::getQueryCriteria));
boolean isAttributePresent = contentFields.remove("attributes");
List<Field<?>> selectFields = contentFields.stream()
.filter(cf -> !cf.startsWith(STATISTICS_KEY))
.map(cf -> field(ofNullable(criteria.get(cf)).orElseThrow(() -> new ReportPortalException(Suppliers.formattedSupplier(
"Unknown table field - '{}'",
cf
).get()))))
.collect(Collectors.toList());
Collections.addAll(selectFields, LAUNCH.ID, fieldName(STATISTICS_TABLE, STATISTICS_COUNTER), fieldName(STATISTICS_TABLE, SF_NAME));
if (isAttributePresent) {
Collections.addAll(selectFields, ITEM_ATTRIBUTE.ID.as(ATTR_ID), ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE);
}
List<String> statisticsFields = contentFields.stream().filter(cf -> cf.startsWith(STATISTICS_KEY)).collect(toList());
return LAUNCHES_TABLE_FETCHER.apply(buildLaunchesTableQuery(selectFields, statisticsFields, filter, sort, limit, isAttributePresent)
.fetch(), contentFields);
}
@Override
public List<ActivityResource> activityStatistics(Filter filter, Sort sort, int limit) {
return dsl.with(ACTIVITIES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(ACTIVITY.ID,
ACTIVITY.ACTION,
ACTIVITY.ENTITY,
ACTIVITY.CREATION_DATE,
ACTIVITY.DETAILS,
ACTIVITY.PROJECT_ID,
ACTIVITY.OBJECT_ID,
USERS.LOGIN,
PROJECT.NAME
)
.from(ACTIVITY)
.join(ACTIVITIES)
.on(fieldName(ACTIVITIES, ID).cast(Long.class).eq(ACTIVITY.ID))
.join(USERS)
.on(ACTIVITY.USER_ID.eq(USERS.ID))
.join(PROJECT)
.on(ACTIVITY.PROJECT_ID.eq(PROJECT.ID))
.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, ACTIVITIES))
.fetch()
.map(ACTIVITY_MAPPER);
}
@Override
public Map<String, UniqueBugContent> uniqueBugStatistics(Filter filter, Sort sort, boolean isLatest, int limit) {
Map<String, UniqueBugContent> content = UNIQUE_BUG_CONTENT_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryUtils.createQueryBuilderWithLatestLaunchesOption(filter, sort, isLatest).with(limit).with(sort).build())
.select(TICKET.TICKET_ID,
TICKET.SUBMIT_DATE,
TICKET.URL,
TICKET.SUBMITTER,
TEST_ITEM.ITEM_ID,
TEST_ITEM.NAME,
TEST_ITEM.PATH,
TEST_ITEM.LAUNCH_ID,
fieldName(ITEM_ATTRIBUTES, KEY),
fieldName(ITEM_ATTRIBUTES, VALUE)
)
.from(TEST_ITEM)
.join(LAUNCHES)
.on(fieldName(LAUNCHES, ID).cast(Long.class).eq(TEST_ITEM.LAUNCH_ID))
.join(TEST_ITEM_RESULTS)
.on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID))
.leftJoin(ISSUE)
.on(TEST_ITEM.ITEM_ID.eq(ISSUE.ISSUE_ID))
.leftJoin(ISSUE_TICKET)
.on(ISSUE.ISSUE_ID.eq(ISSUE_TICKET.ISSUE_ID))
.join(TICKET)
.on(ISSUE_TICKET.TICKET_ID.eq(TICKET.ID))
.leftJoin(lateral(dsl.select(ITEM_ATTRIBUTE.ITEM_ID, ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE)
.from(ITEM_ATTRIBUTE)
.where(ITEM_ATTRIBUTE.ITEM_ID.eq(TEST_ITEM.ITEM_ID).andNot(ITEM_ATTRIBUTE.SYSTEM))).as(ITEM_ATTRIBUTES))
.on(TEST_ITEM.ITEM_ID.eq(fieldName(ITEM_ATTRIBUTES, ITEM_ID).cast(Long.class)))
.orderBy(TICKET.SUBMIT_DATE.desc())
.fetch());
return content;
}
@Override
public Map<String, List<ProductStatusStatisticsContent>> productStatusGroupedByFilterStatistics(Map<Filter, Sort> filterSortMapping,
List<String> contentFields, Map<String, String> customColumns, boolean isLatest, int limit) {
Select<? extends Record> select = filterSortMapping.entrySet()
.stream()
.map(f -> (Select<? extends Record>) buildFilterGroupedQuery(f.getKey(),
isLatest,
f.getValue(),
limit,
contentFields,
customColumns
))
.collect(Collectors.toList())
.stream()
.reduce((prev, curr) -> curr = prev.unionAll(curr))
.orElseThrow(() -> new ReportPortalException(ErrorType.BAD_REQUEST_ERROR,
"Query building for Product Status Widget failed"
));
Map<String, List<ProductStatusStatisticsContent>> productStatusContent = PRODUCT_STATUS_FILTER_GROUPED_FETCHER.apply(select.fetch(),
customColumns
);
productStatusContent.put(TOTAL, countFilterTotalStatistics(productStatusContent));
return productStatusContent;
}
@Override
public List<ProductStatusStatisticsContent> productStatusGroupedByLaunchesStatistics(Filter filter, List<String> contentFields,
Map<String, String> customColumns, Sort sort, boolean isLatest, int limit) {
List<Field<?>> selectFields = getCommonProductStatusFields(filter, contentFields);
List<ProductStatusStatisticsContent> productStatusStatisticsResult = PRODUCT_STATUS_LAUNCH_GROUPED_FETCHER.apply(buildProductStatusQuery(
filter,
isLatest,
sort,
limit,
selectFields,
contentFields,
customColumns
).orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES)).fetch(),
customColumns
);
if (!productStatusStatisticsResult.isEmpty()) {
productStatusStatisticsResult.add(countLaunchTotalStatistics(productStatusStatisticsResult));
}
return productStatusStatisticsResult;
}
@Override
public List<MostTimeConsumingTestCasesContent> mostTimeConsumingTestCasesStatistics(Filter filter, int limit) {
return dsl.with(ITEMS)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter)).with(limit).build())
.select(TEST_ITEM.ITEM_ID.as(ID),
TEST_ITEM.UNIQUE_ID,
TEST_ITEM.NAME,
TEST_ITEM.TYPE,
TEST_ITEM.PATH,
TEST_ITEM.START_TIME,
TEST_ITEM_RESULTS.END_TIME,
TEST_ITEM_RESULTS.DURATION,
TEST_ITEM_RESULTS.STATUS
)
.from(TEST_ITEM)
.join(ITEMS)
.on(fieldName(ITEMS, ID).cast(Long.class).eq(TEST_ITEM.ITEM_ID))
.join(TEST_ITEM_RESULTS)
.on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID))
.orderBy(fieldName(TEST_ITEM_RESULTS.DURATION).desc())
.fetchInto(MostTimeConsumingTestCasesContent.class);
}
@Override
public List<TopPatternTemplatesContent> patternTemplate(Filter filter, Sort sort, @Nullable String attributeKey,
@Nullable String patternName, boolean isLatest, int launchesLimit, int attributesLimit) {
Condition attributeKeyCondition = ofNullable(attributeKey).map(ITEM_ATTRIBUTE.KEY::eq).orElseGet(DSL::noCondition);
Field<?> launchIdsField = isLatest ? DSL.max(LAUNCH.ID).as(ID) : DSL.arrayAgg(LAUNCH.ID).as(ID);
List<Field<?>> groupingFields = isLatest ?
Lists.newArrayList(LAUNCH.NAME, ITEM_ATTRIBUTE.VALUE) :
Lists.newArrayList(ITEM_ATTRIBUTE.VALUE);
Map<String, List<Long>> attributeIdsMapping = PATTERN_TEMPLATES_AGGREGATION_FETCHER.apply(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(launchesLimit).build())
.select(launchIdsField, ITEM_ATTRIBUTE.VALUE)
.from(LAUNCH)
.join(LAUNCHES)
.on(fieldName(LAUNCHES, ID).cast(Long.class).eq(LAUNCH.ID))
.join(ITEM_ATTRIBUTE)
.on(LAUNCH.ID.eq(ITEM_ATTRIBUTE.LAUNCH_ID))
.where(attributeKeyCondition)
.and(ITEM_ATTRIBUTE.VALUE.in(dsl.select(ITEM_ATTRIBUTE.VALUE)
.from(ITEM_ATTRIBUTE)
.join(LAUNCHES)
.on(fieldName(LAUNCHES, ID).cast(Long.class).eq(ITEM_ATTRIBUTE.LAUNCH_ID))
.where(attributeKeyCondition)
.groupBy(ITEM_ATTRIBUTE.VALUE)
.orderBy(DSL.when(ITEM_ATTRIBUTE.VALUE.likeRegex(VERSION_PATTERN),
PostgresDSL.stringToArray(ITEM_ATTRIBUTE.VALUE, VERSION_DELIMITER).cast(Integer[].class)
), ITEM_ATTRIBUTE.VALUE.sort(SortOrder.ASC))
.limit(attributesLimit)))
.groupBy(groupingFields)
.orderBy(DSL.when(ITEM_ATTRIBUTE.VALUE.likeRegex(VERSION_PATTERN),
PostgresDSL.stringToArray(ITEM_ATTRIBUTE.VALUE, VERSION_DELIMITER).cast(Integer[].class)
), ITEM_ATTRIBUTE.VALUE.sort(SortOrder.ASC))
.fetch(), isLatest);
return StringUtils.isBlank(patternName) ?
buildPatternTemplatesQuery(attributeIdsMapping) :
buildPatternTemplatesQueryGroupedByPattern(attributeIdsMapping, patternName);
}
@Override
public List<ComponentHealthCheckContent> componentHealthCheck(Filter launchFilter, Sort launchSort, boolean isLatest, int launchesLimit,
Filter testItemFilter, String currentLevelKey) {
Table<? extends Record> launchesTable = QueryUtils.createQueryBuilderWithLatestLaunchesOption(launchFilter, launchSort, isLatest)
.with(launchesLimit)
.with(launchSort)
.build()
.asTable(LAUNCHES);
return COMPONENT_HEALTH_CHECK_FETCHER.apply(dsl.select(fieldName(ITEMS, VALUE),
DSL.count(fieldName(ITEMS, ITEM_ID)).as(TOTAL),
DSL.round(DSL.val(PERCENTAGE_MULTIPLIER)
.mul(DSL.count(fieldName(ITEMS, ITEM_ID))
.filterWhere(fieldName(ITEMS, STATUS).cast(JStatusEnum.class).eq(JStatusEnum.PASSED)))
.div(DSL.nullif(DSL.count(fieldName(ITEMS, ITEM_ID)), 0)), 2).as(PASSING_RATE)
)
.from(dsl.with(ITEMS)
.as(QueryBuilder.newBuilder(testItemFilter, collectJoinFields(testItemFilter))
.addJointToStart(launchesTable,
JoinType.JOIN,
TEST_ITEM.LAUNCH_ID.eq(fieldName(launchesTable.getName(), ID).cast(Long.class))
)
.build())
.select(TEST_ITEM.ITEM_ID, TEST_ITEM_RESULTS.STATUS, ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE)
.from(TEST_ITEM)
.join(ITEMS)
.on(TEST_ITEM.ITEM_ID.eq(fieldName(ITEMS, ID).cast(Long.class)))
.join(TEST_ITEM_RESULTS)
.on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID))
.join(ITEM_ATTRIBUTE)
.on((TEST_ITEM.ITEM_ID.eq(ITEM_ATTRIBUTE.ITEM_ID)
.or(TEST_ITEM.LAUNCH_ID.eq(ITEM_ATTRIBUTE.LAUNCH_ID))).and(ITEM_ATTRIBUTE.KEY.eq(currentLevelKey)
.and(ITEM_ATTRIBUTE.SYSTEM.isFalse())))
.groupBy(TEST_ITEM.ITEM_ID, TEST_ITEM_RESULTS.STATUS, ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE)
.asTable(ITEMS))
.groupBy(fieldName(ITEMS, VALUE))
.orderBy(DSL.round(DSL.val(PERCENTAGE_MULTIPLIER)
.mul(DSL.count(fieldName(ITEMS, ITEM_ID))
.filterWhere(fieldName(ITEMS, STATUS).cast(JStatusEnum.class).eq(JStatusEnum.PASSED)))
.div(DSL.nullif(DSL.count(fieldName(ITEMS, ITEM_ID)), 0)), 2))
.fetch());
}
@Override
public void generateCumulativeTrendChartView(boolean refresh, String viewName, Filter launchFilter, Sort launchesSort,
List<String> attributes, int launchesLimit) {
if (refresh) {
removeWidgetView(viewName);
}
final String FIRST_LEVEL = "first_level";
final SelectJoinStep<Record5<Long, String, Long, String, String>> FIRST_LEVEL_TABLE = dsl.with(FIRST_LEVEL)
.as(dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(launchFilter, collectJoinFields(launchFilter))
.with(launchesSort)
.with(launchesLimit)
.build())
.select(max(LAUNCH.ID).as(ID),
LAUNCH.NAME,
arrayAggDistinct(LAUNCH.ID).as(AGGREGATED_LAUNCHES_IDS),
ITEM_ATTRIBUTE.KEY.as(ATTRIBUTE_KEY),
ITEM_ATTRIBUTE.VALUE.as(ATTRIBUTE_VALUE)
)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.join(ITEM_ATTRIBUTE)
.on(LAUNCH.ID.eq(ITEM_ATTRIBUTE.LAUNCH_ID))
.and(ITEM_ATTRIBUTE.KEY.eq(attributes.get(0)).and(ITEM_ATTRIBUTE.SYSTEM.isFalse()))
.groupBy(LAUNCH.NAME, ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE))
.select(fieldName(FIRST_LEVEL, ID).cast(Long.class).as(ID),
fieldName(FIRST_LEVEL, NAME).cast(String.class).as(NAME),
val(null, fieldName(FIRST_LEVEL, ID).cast(Long.class)).as(FIRST_LEVEL_ID),
fieldName(FIRST_LEVEL, ATTRIBUTE_KEY).cast(String.class).as(ATTRIBUTE_KEY),
fieldName(FIRST_LEVEL, ATTRIBUTE_VALUE).cast(String.class).as(ATTRIBUTE_VALUE)
)
.from(FIRST_LEVEL);
SelectQuery<Record5<Long, String, Long, String, String>> query;
if (attributes.size() == 2 && attributes.get(1) != null) {
final SelectHavingStep<Record5<Long, String, Long, String, String>> SECOND_LEVEL_TABLE = dsl.select(max(LAUNCH.ID).as(ID),
LAUNCH.NAME,
max(fieldName(FIRST_LEVEL, ID)).cast(Long.class).as(FIRST_LEVEL_ID),
ITEM_ATTRIBUTE.KEY.as(ATTRIBUTE_KEY),
ITEM_ATTRIBUTE.VALUE.as(ATTRIBUTE_VALUE)
)
.from(FIRST_LEVEL)
.join(LAUNCH)
.on(Suppliers.formattedSupplier("{} = any({})", LAUNCH.ID, AGGREGATED_LAUNCHES_IDS).get())
.join(ITEM_ATTRIBUTE)
.on(LAUNCH.ID.eq(ITEM_ATTRIBUTE.LAUNCH_ID))
.and(ITEM_ATTRIBUTE.KEY.eq(attributes.get(1)).and(ITEM_ATTRIBUTE.SYSTEM.isFalse()))
.groupBy(LAUNCH.NAME,
fieldName(FIRST_LEVEL, ATTRIBUTE_KEY),
fieldName(FIRST_LEVEL, ATTRIBUTE_VALUE),
ITEM_ATTRIBUTE.KEY,
ITEM_ATTRIBUTE.VALUE
);
query = FIRST_LEVEL_TABLE.union(SECOND_LEVEL_TABLE).getQuery();
} else {
query = FIRST_LEVEL_TABLE.getQuery();
}
dsl.execute(DSL.sql(Suppliers.formattedSupplier("CREATE MATERIALIZED VIEW {} AS ({})", DSL.name(viewName), query).get()));
}
@Override
public List<CumulativeTrendChartEntry> cumulativeTrendChart(String viewName, String levelAttributeKey, @Nullable String subAttributeKey,
@Nullable String parentAttribute) {
final SelectOnConditionStep<? extends Record4> baseQuery = dsl.select(DSL.arrayAgg(fieldName(viewName, ID)).as(LAUNCHES),
fieldName(viewName, ATTRIBUTE_VALUE),
STATISTICS_FIELD.NAME,
sum(STATISTICS.S_COUNTER).as(STATISTICS_COUNTER)
)
.from(viewName)
.join(STATISTICS)
.on(fieldName(viewName, ID).cast(Long.class).eq(STATISTICS.LAUNCH_ID))
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID));
if (parentAttribute != null) {
String[] split = parentAttribute.split(KEY_VALUE_SEPARATOR);
final SelectConditionStep<Record1<Long>> subLevelLaunches = selectDistinct(fieldName(viewName, ID).cast(Long.class)).from(
viewName)
.where(fieldName(viewName, ATTRIBUTE_KEY).cast(String.class).eq(split[0]))
.and(fieldName(viewName, ATTRIBUTE_VALUE).cast(String.class).eq(split[1]));
baseQuery.where(fieldName(viewName, FIRST_LEVEL_ID).cast(Long.class).in(subLevelLaunches));
}
List<CumulativeTrendChartEntry> accumulatedLaunches = CUMULATIVE_TREND_CHART_FETCHER.apply(baseQuery.where(fieldName(ATTRIBUTE_KEY).cast(
String.class).eq(levelAttributeKey)).groupBy(fieldName(viewName, ATTRIBUTE_VALUE), STATISTICS_FIELD.NAME).orderBy(when(
fieldName(viewName, ATTRIBUTE_VALUE).likeRegex(VERSION_PATTERN),
PostgresDSL.stringToArray(field(name(viewName, ATTRIBUTE_VALUE), String.class), VERSION_DELIMITER).cast(Integer[].class)
), fieldName(viewName, ATTRIBUTE_VALUE).sort(SortOrder.ASC)).fetch());
if (!StringUtils.isEmpty(subAttributeKey)) {
accumulatedLaunches.forEach(attributeLaunches -> CUMULATIVE_TOOLTIP_FETCHER.accept(
attributeLaunches,
dsl.selectDistinct(fieldName(viewName, ATTRIBUTE_KEY), fieldName(viewName, ATTRIBUTE_VALUE))
.from(viewName)
.where(fieldName(viewName, ATTRIBUTE_KEY).cast(String.class)
.eq(subAttributeKey)
.and(fieldName(viewName, ID).in(attributeLaunches.getContent().getLaunchIds())))
.fetch()
));
}
return accumulatedLaunches;
}
@Override
public void generateComponentHealthCheckTable(boolean refresh, HealthCheckTableInitParams params, Filter launchFilter, Sort launchSort,
int launchesLimit, boolean isLatest) {
if (refresh) {
removeWidgetView(params.getViewName());
}
Table<? extends Record> launchesTable = QueryUtils.createQueryBuilderWithLatestLaunchesOption(launchFilter, launchSort, isLatest)
.with(launchesLimit)
.with(launchSort)
.build()
.asTable(LAUNCHES);
List<Field<?>> selectFields = Lists.newArrayList(TEST_ITEM.ITEM_ID, ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE);
ofNullable(params.getCustomKey()).ifPresent(key -> selectFields.add(DSL.arrayAggDistinct(fieldName(CUSTOM_ATTRIBUTE, VALUE))
.filterWhere(fieldName(CUSTOM_ATTRIBUTE, VALUE).isNotNull())
.as(CUSTOM_COLUMN)));
SelectOnConditionStep<Record> baseQuery = select(selectFields).from(TEST_ITEM)
.join(launchesTable)
.on(TEST_ITEM.LAUNCH_ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.join(TEST_ITEM_RESULTS)
.on(TEST_ITEM.ITEM_ID.eq(TEST_ITEM_RESULTS.RESULT_ID))
.join(ITEM_ATTRIBUTE)
.on(and(TEST_ITEM.ITEM_ID.eq(ITEM_ATTRIBUTE.ITEM_ID).or(TEST_ITEM.LAUNCH_ID.eq(ITEM_ATTRIBUTE.LAUNCH_ID))).and(
ITEM_ATTRIBUTE.KEY.in(params.getAttributeKeys())).and(ITEM_ATTRIBUTE.SYSTEM.isFalse()));
dsl.execute(DSL.sql(Suppliers.formattedSupplier("CREATE MATERIALIZED VIEW {} AS ({})",
DSL.name(params.getViewName()),
ofNullable(params.getCustomKey()).map(key -> {
JItemAttribute customAttribute = ITEM_ATTRIBUTE.as(CUSTOM_ATTRIBUTE);
return baseQuery.leftJoin(customAttribute)
.on(DSL.condition(Operator.OR,
TEST_ITEM.ITEM_ID.eq(customAttribute.ITEM_ID),
TEST_ITEM.LAUNCH_ID.eq(customAttribute.LAUNCH_ID)
)
.and(customAttribute.KEY.eq(key)));
})
.orElse(baseQuery)
.where(TEST_ITEM.HAS_STATS.isTrue()
.and(TEST_ITEM.HAS_CHILDREN.isFalse())
.and(TEST_ITEM.TYPE.eq(JTestItemTypeEnum.STEP))
.and(TEST_ITEM.RETRY_OF.isNull())
.and(TEST_ITEM_RESULTS.STATUS.notEqual(JStatusEnum.IN_PROGRESS)))
.groupBy(TEST_ITEM.ITEM_ID, ITEM_ATTRIBUTE.KEY, ITEM_ATTRIBUTE.VALUE)
.getQuery()
).get()));
}
@Override
public void removeWidgetView(String viewName) {
dsl.execute(DSL.sql(Suppliers.formattedSupplier("DROP MATERIALIZED VIEW IF EXISTS {}", DSL.name(viewName)).get()));
}
@Override
public List<HealthCheckTableContent> componentHealthCheckTable(HealthCheckTableGetParams params) {
return healthCheckTableChain.apply(params);
}
private SelectSeekStepN<? extends Record> buildLaunchesTableQuery(Collection<Field<?>> selectFields,
Collection<String> statisticsFields, Filter filter, Sort sort, int limit, boolean isAttributePresent) {
SelectOnConditionStep<? extends Record> select = dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(selectFields)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(statisticsFields))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)))
.join(USERS)
.on(LAUNCH.USER_ID.eq(USERS.ID));
if (isAttributePresent) {
select = select.leftJoin(ITEM_ATTRIBUTE).on(LAUNCH.ID.eq(ITEM_ATTRIBUTE.LAUNCH_ID));
}
return select.orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES));
}
private SelectOnConditionStep<? extends Record> buildPassingRateSelect(Filter filter, Sort sort, int limit) {
return dsl.with(LAUNCHES)
.as(QueryBuilder.newBuilder(filter, collectJoinFields(filter, sort)).with(sort).with(limit).build())
.select(sum(when(fieldName(STATISTICS_TABLE, SF_NAME).cast(String.class).eq(EXECUTIONS_PASSED),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER).cast(Integer.class)
).otherwise(0)).as(PASSED), sum(when(fieldName(STATISTICS_TABLE, SF_NAME).cast(String.class).eq(EXECUTIONS_TOTAL),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER).cast(Integer.class)
).otherwise(0)).as(TOTAL), max(LAUNCH.NUMBER).as(NUMBER))
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(EXECUTIONS_PASSED, EXECUTIONS_TOTAL))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)));
}
private SelectSeekStepN<? extends Record> buildFilterGroupedQuery(Filter filter, boolean isLatest, Sort sort, int limit,
Collection<String> contentFields, Map<String, String> customColumns) {
List<Field<?>> fields = getCommonProductStatusFields(filter, contentFields);
fields.add(DSL.selectDistinct(FILTER.NAME).from(FILTER).where(FILTER.ID.eq(filter.getId())).asField(FILTER_NAME));
return buildProductStatusQuery(filter,
isLatest,
sort,
limit,
fields,
contentFields,
customColumns
).orderBy(WidgetSortUtils.sortingTransformer(filter.getTarget()).apply(sort, LAUNCHES));
}
private List<Field<?>> getCommonProductStatusFields(Filter filter, Collection<String> contentFields) {
Map<String, String> criteria = filter.getTarget()
.getCriteriaHolders()
.stream()
.collect(Collectors.toMap(CriteriaHolder::getFilterCriteria, CriteriaHolder::getQueryCriteria));
List<Field<?>> selectFields = contentFields.stream()
.filter(cf -> !cf.startsWith(STATISTICS_KEY))
.map(criteria::get)
.filter(Objects::nonNull)
.map(DSL::field)
.collect(Collectors.toList());
Collections.addAll(selectFields,
LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
fieldName(STATISTICS_TABLE, SF_NAME),
fieldName(STATISTICS_TABLE, STATISTICS_COUNTER),
round(val(PERCENTAGE_MULTIPLIER).mul(dsl.select(sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.onKey()
.where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_PASSED).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)))
.asField()
.cast(Double.class))
.div(nullif(dsl.select(sum(STATISTICS.S_COUNTER))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.onKey()
.where(STATISTICS_FIELD.NAME.eq(EXECUTIONS_TOTAL).and(STATISTICS.LAUNCH_ID.eq(LAUNCH.ID)))
.asField(), 0)), 2).as(PASSING_RATE),
timestampDiff(LAUNCH.END_TIME, LAUNCH.START_TIME).as(DURATION)
);
return selectFields;
}
private SelectOnConditionStep<? extends Record> buildProductStatusQuery(Filter filter, boolean isLatest, Sort sort, int limit,
Collection<Field<?>> fields, Collection<String> contentFields, Map<String, String> customColumns) {
List<Condition> attributesKeyConditions = customColumns.values()
.stream()
.map(customColumn -> ofNullable(customColumn).map(ITEM_ATTRIBUTE.KEY::eq).orElseGet(ITEM_ATTRIBUTE.KEY::isNull))
.collect(Collectors.toList());
Optional<Condition> combinedAttributeKeyCondition = attributesKeyConditions.stream().reduce((prev, curr) -> curr = prev.or(curr));
List<String> statisticsFields = contentFields.stream().filter(cf -> cf.startsWith(STATISTICS_KEY)).collect(toList());
return combinedAttributeKeyCondition.map(c -> {
Collections.addAll(fields,
fieldName(ATTR_TABLE, ATTR_ID),
fieldName(ATTR_TABLE, ATTRIBUTE_VALUE),
fieldName(ATTR_TABLE, ATTRIBUTE_KEY)
);
return getProductStatusSelect(filter, isLatest, sort, limit, fields, statisticsFields).leftJoin(DSL.select(ITEM_ATTRIBUTE.ID.as(
ATTR_ID),
ITEM_ATTRIBUTE.VALUE.as(ATTRIBUTE_VALUE),
ITEM_ATTRIBUTE.KEY.as(ATTRIBUTE_KEY),
ITEM_ATTRIBUTE.LAUNCH_ID.as(LAUNCH_ID)
).from(ITEM_ATTRIBUTE).where(c).asTable(ATTR_TABLE)).on(LAUNCH.ID.eq(fieldName(ATTR_TABLE, LAUNCH_ID).cast(Long.class)));
}).orElseGet(() -> getProductStatusSelect(filter, isLatest, sort, limit, fields, statisticsFields));
}
private SelectOnConditionStep<Record> getProductStatusSelect(Filter filter, boolean isLatest, Sort sort, int limit,
Collection<Field<?>> fields, Collection<String> contentFields) {
return dsl.with(LAUNCHES)
.as(QueryUtils.createQueryBuilderWithLatestLaunchesOption(filter, sort, isLatest).with(sort).with(limit).build())
.select(fields)
.from(LAUNCH)
.join(LAUNCHES)
.on(LAUNCH.ID.eq(fieldName(LAUNCHES, ID).cast(Long.class)))
.leftJoin(DSL.select(STATISTICS.LAUNCH_ID, STATISTICS.S_COUNTER.as(STATISTICS_COUNTER), STATISTICS_FIELD.NAME.as(SF_NAME))
.from(STATISTICS)
.join(STATISTICS_FIELD)
.on(STATISTICS.STATISTICS_FIELD_ID.eq(STATISTICS_FIELD.SF_ID))
.where(STATISTICS_FIELD.NAME.in(contentFields))
.asTable(STATISTICS_TABLE))
.on(LAUNCH.ID.eq(fieldName(STATISTICS_TABLE, LAUNCH_ID).cast(Long.class)));
}
private ProductStatusStatisticsContent countLaunchTotalStatistics(List<ProductStatusStatisticsContent> launchesStatisticsResult) {
Map<String, Integer> total = launchesStatisticsResult.stream()
.flatMap(lsc -> lsc.getValues().entrySet().stream())
.collect(Collectors.groupingBy(Map.Entry::getKey, summingInt(entry -> Integer.parseInt(entry.getValue()))));
Double averagePassingRate = launchesStatisticsResult.stream()
.collect(averagingDouble(lsc -> ofNullable(lsc.getPassingRate()).orElse(0D)));
ProductStatusStatisticsContent launchesStatisticsContent = new ProductStatusStatisticsContent();
launchesStatisticsContent.setTotalStatistics(total);
Double roundedAveragePassingRate = BigDecimal.valueOf(averagePassingRate).setScale(2, RoundingMode.HALF_UP).doubleValue();
launchesStatisticsContent.setAveragePassingRate(roundedAveragePassingRate);
return launchesStatisticsContent;
}
private List<ProductStatusStatisticsContent> countFilterTotalStatistics(
Map<String, List<ProductStatusStatisticsContent>> launchesStatisticsResult) {
Map<String, Integer> total = launchesStatisticsResult.values()
.stream()
.flatMap(Collection::stream)
.flatMap(lsc -> lsc.getValues().entrySet().stream())
.collect(Collectors.groupingBy(Map.Entry::getKey, summingInt(entry -> Integer.parseInt(entry.getValue()))));
Double averagePassingRate = launchesStatisticsResult.values()
.stream()
.flatMap(Collection::stream)
.collect(averagingDouble(lsc -> ofNullable(lsc.getPassingRate()).orElse(0D)));
ProductStatusStatisticsContent launchesStatisticsContent = new ProductStatusStatisticsContent();
launchesStatisticsContent.setTotalStatistics(total);
Double roundedAveragePassingRate = BigDecimal.valueOf(averagePassingRate).setScale(2, RoundingMode.HALF_UP).doubleValue();
launchesStatisticsContent.setAveragePassingRate(roundedAveragePassingRate);
return Lists.newArrayList(launchesStatisticsContent);
}
private List<TopPatternTemplatesContent> buildPatternTemplatesQuery(Map<String, List<Long>> attributeIdsMapping) {
return attributeIdsMapping.entrySet()
.stream()
.map(entry -> (Select<? extends Record>) dsl.select(DSL.val(entry.getKey()).as(ATTRIBUTE_VALUE),
PATTERN_TEMPLATE.NAME,
DSL.countDistinct(PATTERN_TEMPLATE_TEST_ITEM.ITEM_ID).as(TOTAL)
)
.from(PATTERN_TEMPLATE)
.join(PATTERN_TEMPLATE_TEST_ITEM)
.on(PATTERN_TEMPLATE.ID.eq(PATTERN_TEMPLATE_TEST_ITEM.PATTERN_ID))
.join(TEST_ITEM)
.on(PATTERN_TEMPLATE_TEST_ITEM.ITEM_ID.eq(TEST_ITEM.ITEM_ID))
.join(LAUNCH)
.on(TEST_ITEM.LAUNCH_ID.eq(LAUNCH.ID))
.where(LAUNCH.ID.in(entry.getValue()))
.groupBy(PATTERN_TEMPLATE.NAME)
.orderBy(field(TOTAL).desc())
.limit(PATTERNS_COUNT))
.reduce((prev, curr) -> curr = prev.unionAll(curr))
.map(select -> TOP_PATTERN_TEMPLATES_FETCHER.apply(select.fetch()))
.orElseGet(Collections::emptyList);
}
private List<TopPatternTemplatesContent> buildPatternTemplatesQueryGroupedByPattern(Map<String, List<Long>> attributeIdsMapping,
String patternTemplateName) {
return attributeIdsMapping.entrySet()
.stream()
.map(entry -> (Select<? extends Record>) dsl.select(DSL.val(entry.getKey()).as(ATTRIBUTE_VALUE),
LAUNCH.ID,
LAUNCH.NAME,
LAUNCH.NUMBER,
DSL.countDistinct(PATTERN_TEMPLATE_TEST_ITEM.ITEM_ID).as(TOTAL)
)
.from(PATTERN_TEMPLATE)
.join(PATTERN_TEMPLATE_TEST_ITEM)
.on(PATTERN_TEMPLATE.ID.eq(PATTERN_TEMPLATE_TEST_ITEM.PATTERN_ID))
.join(TEST_ITEM)
.on(PATTERN_TEMPLATE_TEST_ITEM.ITEM_ID.eq(TEST_ITEM.ITEM_ID))
.join(LAUNCH)
.on(TEST_ITEM.LAUNCH_ID.eq(LAUNCH.ID))
.where(LAUNCH.ID.in(entry.getValue()))
.and(PATTERN_TEMPLATE.NAME.eq(patternTemplateName))
.groupBy(LAUNCH.ID, LAUNCH.NAME, LAUNCH.NUMBER, PATTERN_TEMPLATE.NAME)
.having(DSL.countDistinct(PATTERN_TEMPLATE_TEST_ITEM.ITEM_ID).gt(BigDecimal.ZERO.intValue()))
.orderBy(field(TOTAL).desc()))
.reduce((prev, curr) -> curr = prev.unionAll(curr))
.map(select -> TOP_PATTERN_TEMPLATES_GROUPED_FETCHER.apply(select.fetch()))
.orElseGet(Collections::emptyList);
}
}
| Fix ctc widget based on array filter
| src/main/java/com/epam/ta/reportportal/dao/WidgetContentRepositoryImpl.java | Fix ctc widget based on array filter |
|
Java | apache-2.0 | fe05559c038d6f6a530ba8b2f4bf74c5072b8e84 | 0 | zongjingyao/AndroidActionSheet | package cn.zjy.actionsheet;
import android.app.DialogFragment;
import android.app.Fragment;
import android.app.FragmentManager;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import android.widget.TextView;
/**
* Created by zongjingyao on 16/10/4.
*/
public class ActionSheet extends DialogFragment implements View.OnClickListener {
public static final String TAG = "ActionSheet";
public static final String TITLE = "title";
public static final String TITLE_COLOR = "title_color";
public static final String TITLE_TEXT_SIZE = "title_text_size";
public static final String CANCEL_BTN_TITLE = "cancel_btn_title";
public static final String CANCEL_BTN_TITLE_COLOR = "cancel_btn_title_color";
public static final String CANCEL_BTN_TEXT_SIZE = "cancel_btn_text_size";
public static final String OTHER_BTN_TITLES = "other_btn_titles";
public static final String OTHER_BTN_TITLE_COLORS = "other_btn_title_colors";
public static final String OTHER_BTN_TEXT_SIZE = "other_btn_text_size";
public static final String CANCELABLE_ON_TOUCH_OUTSIDE = "cancelable_on_touch_outside";
private static final float DEFAULT_TITLE_TEXT_SIZE = 18;
private static final float DEFAULT_BTN_TEXT_SIZE = 20;
private static final int DEFAULT_TITLE_TEXT_COLOR = Color.parseColor("#929292");
private static final int DEFAULT_BTN_TEXT_COLOR = Color.BLACK;
private ActionSheetListener mActionSheetListener;
private int mClickedBtnIdx = -1;
public void show(FragmentManager fragmentManager) {
Fragment fragment = fragmentManager.findFragmentByTag(TAG);
if (fragment != null && fragment instanceof ActionSheet) {
((ActionSheet) fragment).dismiss();
}
show(fragmentManager, TAG);
}
public void setActionSheetListener(ActionSheetListener listener) {
mActionSheetListener = listener;
}
@Override
public void onDismiss(DialogInterface dialog) {
super.onDismiss(dialog);
if (mActionSheetListener != null) {
mActionSheetListener.onDismiss(this, mClickedBtnIdx >= 0);
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setStyle(DialogFragment.STYLE_NORMAL, 0);
}
@Override
public void onStart() {
super.onStart();
Window window = getDialog().getWindow();
if (window == null) return;
window.setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
window.setGravity(Gravity.BOTTOM | Gravity.LEFT | Gravity.RIGHT);
WindowManager.LayoutParams params = window.getAttributes();
params.windowAnimations = R.style.ActionSheetAnimation;
params.width = WindowManager.LayoutParams.MATCH_PARENT;
window.setAttributes(params);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
Window window = getDialog().getWindow();
if (window != null) {
getDialog().getWindow().requestFeature(Window.FEATURE_NO_TITLE);
}
return createView();
}
private View createView() {
Bundle args = getArguments();
getDialog().setCanceledOnTouchOutside(args.getBoolean(CANCELABLE_ON_TOUCH_OUTSIDE, true));
Context context = getActivity();
LinearLayout layout = new LinearLayout(context);
FrameLayout.LayoutParams lp = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT,
FrameLayout.LayoutParams.MATCH_PARENT);
int padding = (int) context.getResources().getDimension(R.dimen.action_sheet_padding);
layout.setLayoutParams(lp);
layout.setOrientation(LinearLayout.VERTICAL);
layout.setGravity(Gravity.CENTER_HORIZONTAL);
layout.setPadding(padding, 0, padding, padding);
addTitle(layout);
addOtherBtns(layout);
addCancelBtn(layout);
return layout;
}
private void addTitle(LinearLayout layout) {
Bundle args = getArguments();
String title = args.getString(TITLE);
if (TextUtils.isEmpty(title)) return;
int titleColor = args.getInt(TITLE_COLOR, DEFAULT_TITLE_TEXT_COLOR);
float textSize = args.getFloat(TITLE_TEXT_SIZE, DEFAULT_TITLE_TEXT_SIZE);
int titleHeight = (int) getActivity().getResources().getDimension(R.dimen.action_sheet_title_height);
int bottomMargin = (int) getActivity().getResources().getDimension(R.dimen.action_sheet_btn_gap);
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT,
titleHeight);
lp.bottomMargin = bottomMargin;
int background = R.drawable.action_sheet_other_btns_bg_top;
String[] titles = args.getStringArray(OTHER_BTN_TITLES);
if (titles == null || titles.length == 0) {
background = R.drawable.action_sheet_other_btns_bg_single;
}
TextView tvTitle = new TextView(getActivity());
tvTitle.setText(title);
tvTitle.setTextSize(textSize);
tvTitle.setTextColor(titleColor);
tvTitle.setGravity(Gravity.CENTER);
tvTitle.setBackgroundResource(background);
layout.addView(tvTitle, lp);
}
private void addOtherBtns(LinearLayout layout) {
Bundle args = getArguments();
String[] titles = args.getStringArray(OTHER_BTN_TITLES);
if (titles == null || titles.length == 0) return;
int[] colors = args.getIntArray(OTHER_BTN_TITLE_COLORS);
float textSize = args.getFloat(OTHER_BTN_TEXT_SIZE, DEFAULT_BTN_TEXT_SIZE);
int btnHeight = (int) getActivity().getResources().getDimension(R.dimen.action_sheet_btn_height);
int bottomMargin = (int) getActivity().getResources().getDimension(R.dimen.action_sheet_btn_gap);
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT,
btnHeight);
lp.bottomMargin = bottomMargin;
int len = titles.length;
for (int i = 0; i < len; i++) {
Button btn = new Button(getActivity());
btn.setText(titles[i]);
btn.setAllCaps(false);
btn.setTextSize(textSize);
btn.setTextColor(colors != null && i < colors.length ? colors[i] : DEFAULT_BTN_TEXT_COLOR);
btn.setTag(i);
btn.setBackgroundResource(getOtherBtnBackground(i));
btn.setOnClickListener(this);
layout.addView(btn, lp);
}
}
private int getOtherBtnBackground(int index) {
int background = R.drawable.action_sheet_other_btns_bg_single;
Bundle args = getArguments();
String[] btnTitles = args.getStringArray(OTHER_BTN_TITLES);
boolean hasTitle = !TextUtils.isEmpty(args.getString(TITLE));
if (btnTitles == null || btnTitles.length == 0)
return background;
int totalCount = btnTitles.length;
if (hasTitle) {
totalCount++;
index++;
}
if (totalCount == 1) {
background = R.drawable.action_sheet_other_btns_bg_single;
} else {
if (index == 0) {
background = R.drawable.action_sheet_other_btns_bg_top;
} else if (index == totalCount - 1) {
background = R.drawable.action_sheet_other_btns_bg_bottom;
} else {
background = R.drawable.action_sheet_other_btns_bg_middle;
}
}
return background;
}
private void addCancelBtn(LinearLayout layout) {
Bundle args = getArguments();
String cancelTitle = args.getString(CANCEL_BTN_TITLE);
if (TextUtils.isEmpty(cancelTitle)) return;
int cancelBtnColor = args.getInt(CANCEL_BTN_TITLE_COLOR, DEFAULT_BTN_TEXT_COLOR);
float textSize = args.getFloat(CANCEL_BTN_TEXT_SIZE, DEFAULT_BTN_TEXT_SIZE);
int btnHeight = (int) getActivity().getResources().getDimension(R.dimen.action_sheet_btn_height);
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT,
btnHeight);
lp.topMargin = (int) getActivity().getResources().getDimension(R.dimen.action_sheet_padding);
String[] otherBtnTitles = args.getStringArray(OTHER_BTN_TITLES);
int index = otherBtnTitles == null ? 0 : otherBtnTitles.length;
Button btnCancel = new Button(getActivity());
btnCancel.setText(cancelTitle);
btnCancel.setAllCaps(false);
btnCancel.setTextSize(textSize);
btnCancel.setTextColor(cancelBtnColor);
btnCancel.setOnClickListener(this);
btnCancel.setBackgroundResource(R.drawable.action_sheet_cancel_btn_bg);
btnCancel.setTag(index);
layout.addView(btnCancel, lp);
}
@Override
public void onClick(View v) {
mClickedBtnIdx = (int) v.getTag();
if (mActionSheetListener != null && mClickedBtnIdx >= 0) {
mActionSheetListener.onButtonClicked(ActionSheet.this, mClickedBtnIdx);
}
dismiss();
}
public static class Builder {
private String mTitle;
private int mTitleColor;
private float mTitleTextSize = -1;
private String mCancelBtnTitle;
private int mCancelBtnTitleColor;
private float mCancelBtnTextSize = -1;
private String[] mOtherBtnTitles;
private int[] mOtherBtnTitleColors;
private float mOtherBtnTextSize = -1;
private boolean mCancelableOnTouchOutside;
private ActionSheetListener mActionSheetListener;
public Builder setTitle(String title, int titleColor) {
mTitle = title;
mTitleColor = titleColor;
return this;
}
public Builder setTitleTextSize(float textSize) {
mTitleTextSize = textSize;
return this;
}
public Builder setCancelBtn(String title, int titleColor) {
mCancelBtnTitle = title;
mCancelBtnTitleColor = titleColor;
return this;
}
public Builder setCancelBtnTextSize(float textSize) {
mCancelBtnTextSize = textSize;
return this;
}
public Builder setOtherBtn(String[] titles, int[] titleColors) {
mOtherBtnTitles = titles;
mOtherBtnTitleColors = titleColors;
return this;
}
public Builder setOtherBtnTextSize(float textSize) {
mOtherBtnTextSize = textSize;
return this;
}
public Builder setCancelableOnTouchOutside(boolean cancelable) {
mCancelableOnTouchOutside = cancelable;
return this;
}
public Builder setActionSheetListener(ActionSheetListener listener) {
mActionSheetListener = listener;
return this;
}
public ActionSheet build() {
Bundle bundle = new Bundle();
bundle.putString(TITLE, mTitle);
bundle.putInt(TITLE_COLOR, mTitleColor);
if (mTitleTextSize > 0) {
bundle.putFloat(TITLE_TEXT_SIZE, mTitleTextSize);
}
bundle.putString(CANCEL_BTN_TITLE, mCancelBtnTitle);
bundle.putInt(CANCEL_BTN_TITLE_COLOR, mCancelBtnTitleColor);
if (mCancelBtnTextSize > 0) {
bundle.putFloat(CANCEL_BTN_TEXT_SIZE, mCancelBtnTextSize);
}
bundle.putStringArray(OTHER_BTN_TITLES, mOtherBtnTitles);
bundle.putIntArray(OTHER_BTN_TITLE_COLORS, mOtherBtnTitleColors);
if (mOtherBtnTextSize > 0) {
bundle.putFloat(OTHER_BTN_TEXT_SIZE, mOtherBtnTextSize);
}
bundle.putBoolean(CANCELABLE_ON_TOUCH_OUTSIDE, mCancelableOnTouchOutside);
ActionSheet actionSheet = new ActionSheet();
actionSheet.setActionSheetListener(mActionSheetListener);
actionSheet.setArguments(bundle);
return actionSheet;
}
}
public interface ActionSheetListener {
void onDismiss(ActionSheet actionSheet, boolean isByBtn);
void onButtonClicked(ActionSheet actionSheet, int index);
}
}
| actionsheet/src/main/java/cn/zjy/actionsheet/ActionSheet.java | package cn.zjy.actionsheet;
import android.app.Dialog;
import android.app.DialogFragment;
import android.app.Fragment;
import android.app.FragmentManager;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import android.widget.TextView;
/**
* Created by zongjingyao on 16/10/4.
*/
public class ActionSheet extends DialogFragment implements View.OnClickListener {
public static final String TAG = "ActionSheet";
public static final String TITLE = "title";
public static final String TITLE_COLOR = "title_color";
public static final String TITLE_TEXT_SIZE = "title_text_size";
public static final String CANCEL_BTN_TITLE = "cancel_btn_title";
public static final String CANCEL_BTN_TITLE_COLOR = "cancel_btn_title_color";
public static final String CANCEL_BTN_TEXT_SIZE = "cancel_btn_text_size";
public static final String OTHER_BTN_TITLES = "other_btn_titles";
public static final String OTHER_BTN_TITLE_COLORS = "other_btn_title_colors";
public static final String OTHER_BTN_TEXT_SIZE = "other_btn_text_size";
public static final String CANCELABLE_ON_TOUCH_OUTSIDE = "cancelable_on_touch_outside";
private static final float DEFAULT_TITLE_TEXT_SIZE = 18;
private static final float DEFAULT_BTN_TEXT_SIZE = 20;
private static final int DEFAULT_TITLE_TEXT_COLOR = Color.parseColor("#929292");
private static final int DEFAULT_BTN_TEXT_COLOR = Color.BLACK;
private ActionSheetListener mActionSheetListener;
private int mClickedBtnIdx = -1;
public void show(FragmentManager fragmentManager) {
Fragment fragment = fragmentManager.findFragmentByTag(TAG);
if (fragment != null && fragment instanceof ActionSheet) {
((ActionSheet) fragment).dismiss();
}
show(fragmentManager, TAG);
}
public void setActionSheetListener(ActionSheetListener listener) {
mActionSheetListener = listener;
}
@Override
public void onDismiss(DialogInterface dialog) {
super.onDismiss(dialog);
if (mActionSheetListener != null) {
mActionSheetListener.onDismiss(this, mClickedBtnIdx >= 0);
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setStyle(DialogFragment.STYLE_NORMAL, 0);
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
Dialog dialog = super.onCreateDialog(savedInstanceState);
Window window = dialog.getWindow();
if (window != null) {
window.setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
window.setGravity(Gravity.BOTTOM | Gravity.LEFT | Gravity.RIGHT);
WindowManager.LayoutParams params = window.getAttributes();
params.windowAnimations = R.style.ActionSheetAnimation;
window.setAttributes(params);
}
return dialog;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return createView();
}
private View createView() {
Context context = getActivity();
LinearLayout layout = new LinearLayout(context);
FrameLayout.LayoutParams lp = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.MATCH_PARENT,
FrameLayout.LayoutParams.MATCH_PARENT);
int padding = (int) context.getResources().getDimension(R.dimen.action_sheet_padding);
layout.setLayoutParams(lp);
layout.setOrientation(LinearLayout.VERTICAL);
layout.setGravity(Gravity.CENTER_HORIZONTAL);
layout.setPadding(padding, 0, padding, padding);
addTitle(layout);
addOtherBtns(layout);
addCancelBtn(layout);
return layout;
}
private void addTitle(LinearLayout layout) {
Bundle args = getArguments();
String title = args.getString(TITLE);
if (TextUtils.isEmpty(title)) return;
int titleColor = args.getInt(TITLE_COLOR, DEFAULT_TITLE_TEXT_COLOR);
float textSize = args.getFloat(TITLE_TEXT_SIZE, DEFAULT_TITLE_TEXT_SIZE);
int titleHeight = (int) getActivity().getResources().getDimension(R.dimen.action_sheet_title_height);
int bottomMargin = (int) getActivity().getResources().getDimension(R.dimen.action_sheet_btn_gap);
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT,
titleHeight);
lp.bottomMargin = bottomMargin;
int background = R.drawable.action_sheet_other_btns_bg_top;
String[] titles = args.getStringArray(OTHER_BTN_TITLES);
if (titles == null || titles.length == 0) {
background = R.drawable.action_sheet_other_btns_bg_single;
}
TextView tvTitle = new TextView(getActivity());
tvTitle.setText(title);
tvTitle.setTextSize(textSize);
tvTitle.setTextColor(titleColor);
tvTitle.setGravity(Gravity.CENTER);
tvTitle.setBackgroundResource(background);
layout.addView(tvTitle, lp);
}
private void addOtherBtns(LinearLayout layout) {
Bundle args = getArguments();
String[] titles = args.getStringArray(OTHER_BTN_TITLES);
if (titles == null || titles.length == 0) return;
int[] colors = args.getIntArray(OTHER_BTN_TITLE_COLORS);
float textSize = args.getFloat(OTHER_BTN_TEXT_SIZE, DEFAULT_BTN_TEXT_SIZE);
int btnHeight = (int) getActivity().getResources().getDimension(R.dimen.action_sheet_btn_height);
int bottomMargin = (int) getActivity().getResources().getDimension(R.dimen.action_sheet_btn_gap);
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT,
btnHeight);
lp.bottomMargin = bottomMargin;
int len = titles.length;
for (int i = 0; i < len; i++) {
Button btn = new Button(getActivity());
btn.setText(titles[i]);
btn.setAllCaps(false);
btn.setTextSize(textSize);
btn.setTextColor(colors != null && i < colors.length ? colors[i] : DEFAULT_BTN_TEXT_COLOR);
btn.setTag(i);
btn.setBackgroundResource(getOtherBtnBackground(i));
btn.setOnClickListener(this);
layout.addView(btn, lp);
}
}
private int getOtherBtnBackground(int index) {
int background = R.drawable.action_sheet_other_btns_bg_single;
Bundle args = getArguments();
String[] btnTitles = args.getStringArray(OTHER_BTN_TITLES);
boolean hasTitle = !TextUtils.isEmpty(args.getString(TITLE));
if (btnTitles == null || btnTitles.length == 0)
return background;
int totalCount = btnTitles.length;
if (hasTitle) {
totalCount++;
index++;
}
if (totalCount == 1) {
background = R.drawable.action_sheet_other_btns_bg_single;
} else {
if (index == 0) {
background = R.drawable.action_sheet_other_btns_bg_top;
} else if (index == totalCount - 1) {
background = R.drawable.action_sheet_other_btns_bg_bottom;
} else {
background = R.drawable.action_sheet_other_btns_bg_middle;
}
}
return background;
}
private void addCancelBtn(LinearLayout layout) {
Bundle args = getArguments();
String cancelTitle = args.getString(CANCEL_BTN_TITLE);
if (TextUtils.isEmpty(cancelTitle)) return;
int cancelBtnColor = args.getInt(CANCEL_BTN_TITLE_COLOR, DEFAULT_BTN_TEXT_COLOR);
float textSize = args.getFloat(CANCEL_BTN_TEXT_SIZE, DEFAULT_BTN_TEXT_SIZE);
int btnHeight = (int) getActivity().getResources().getDimension(R.dimen.action_sheet_btn_height);
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT,
btnHeight);
lp.topMargin = (int) getActivity().getResources().getDimension(R.dimen.action_sheet_padding);
String[] otherBtnTitles = args.getStringArray(OTHER_BTN_TITLES);
int index = otherBtnTitles == null ? 0 : otherBtnTitles.length;
Button btnCancel = new Button(getActivity());
btnCancel.setText(cancelTitle);
btnCancel.setAllCaps(false);
btnCancel.setTextSize(textSize);
btnCancel.setTextColor(cancelBtnColor);
btnCancel.setOnClickListener(this);
btnCancel.setBackgroundResource(R.drawable.action_sheet_cancel_btn_bg);
btnCancel.setTag(index);
layout.addView(btnCancel, lp);
}
@Override
public void onClick(View v) {
mClickedBtnIdx = (int) v.getTag();
if (mActionSheetListener != null && mClickedBtnIdx >= 0) {
mActionSheetListener.onButtonClicked(ActionSheet.this, mClickedBtnIdx);
}
dismiss();
}
public static class Builder {
private String mTitle;
private int mTitleColor;
private float mTitleTextSize = -1;
private String mCancelBtnTitle;
private int mCancelBtnTitleColor;
private float mCancelBtnTextSize = -1;
private String[] mOtherBtnTitles;
private int[] mOtherBtnTitleColors;
private float mOtherBtnTextSize = -1;
private boolean mCancelableOnTouchOutside;
private ActionSheetListener mActionSheetListener;
public Builder setTitle(String title, int titleColor) {
mTitle = title;
mTitleColor = titleColor;
return this;
}
public Builder setTitleTextSize(float textSize) {
mTitleTextSize = textSize;
return this;
}
public Builder setCancelBtn(String title, int titleColor) {
mCancelBtnTitle = title;
mCancelBtnTitleColor = titleColor;
return this;
}
public Builder setCancelBtnTextSize(float textSize) {
mCancelBtnTextSize = textSize;
return this;
}
public Builder setOtherBtn(String[] titles, int[] titleColors) {
mOtherBtnTitles = titles;
mOtherBtnTitleColors = titleColors;
return this;
}
public Builder setOtherBtnTextSize(float textSize) {
mOtherBtnTextSize = textSize;
return this;
}
public Builder setCancelableOnTouchOutside(boolean cancelable) {
mCancelableOnTouchOutside = cancelable;
return this;
}
public Builder setActionSheetListener(ActionSheetListener listener) {
mActionSheetListener = listener;
return this;
}
public ActionSheet build() {
Bundle bundle = new Bundle();
bundle.putString(TITLE, mTitle);
bundle.putInt(TITLE_COLOR, mTitleColor);
if (mTitleTextSize > 0) {
bundle.putFloat(TITLE_TEXT_SIZE, mTitleTextSize);
}
bundle.putString(CANCEL_BTN_TITLE, mCancelBtnTitle);
bundle.putInt(CANCEL_BTN_TITLE_COLOR, mCancelBtnTitleColor);
if (mCancelBtnTextSize > 0) {
bundle.putFloat(CANCEL_BTN_TEXT_SIZE, mCancelBtnTextSize);
}
bundle.putStringArray(OTHER_BTN_TITLES, mOtherBtnTitles);
bundle.putIntArray(OTHER_BTN_TITLE_COLORS, mOtherBtnTitleColors);
if (mOtherBtnTextSize > 0) {
bundle.putFloat(OTHER_BTN_TEXT_SIZE, mOtherBtnTextSize);
}
bundle.putBoolean(CANCELABLE_ON_TOUCH_OUTSIDE, mCancelableOnTouchOutside);
ActionSheet actionSheet = new ActionSheet();
actionSheet.setActionSheetListener(mActionSheetListener);
actionSheet.setArguments(bundle);
return actionSheet;
}
}
public interface ActionSheetListener {
void onDismiss(ActionSheet actionSheet, boolean isByBtn);
void onButtonClicked(ActionSheet actionSheet, int index);
}
}
| fix bugs
| actionsheet/src/main/java/cn/zjy/actionsheet/ActionSheet.java | fix bugs |
|
Java | apache-2.0 | 299d18a9044576610a6a657b08493f9a6d504908 | 0 | b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl | /*
* Copyright 2022 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.fhir.core.request.conceptmap;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import com.b2international.snowowl.core.ResourceURI;
import com.b2international.snowowl.core.ServiceProvider;
import com.b2international.snowowl.core.request.ResourceRequests;
import com.b2international.snowowl.fhir.core.exceptions.BadRequestException;
/**
* @since 8.7.1
*/
public interface FhirWriteSupport {
/**
* Checks whether a version can be created based on an incoming resource snapshot state (FHIR Resource representation).
*
* @param context
* @param resourceUri
* @param newVersionToCreate
* @param newVersionEffectiveDate
* @return a boolean value indicating whether the version can be created either forcefully or via the standard way.
* @throws BadRequestException - if the version creation cannot be completed for any reason
*/
default boolean ensureVersionCanBeCreated(
final ServiceProvider context,
final ResourceURI resourceUri,
final String newVersionToCreate,
final LocalDate newVersionEffectiveDate) {
return ResourceRequests.prepareSearchVersion()
.setLimit(1)
.filterByResource(resourceUri)
.sortBy("effectiveTime:desc")
.buildAsync()
.execute(context)
.first()
.map(latestVersion -> {
final int newVersionEffectiveDateCompareValue = newVersionEffectiveDate.compareTo(latestVersion.getEffectiveTime());
// disallow importing versions with earlier effective date
if (newVersionEffectiveDateCompareValue < 0) {
throw new BadRequestException(String.format("A version for effective time '%s' already exists, can't add content with effective time '%s'.",
latestVersion.getEffectiveTime().format(DateTimeFormatter.ISO_LOCAL_DATE),
newVersionEffectiveDate.format(DateTimeFormatter.ISO_LOCAL_DATE))
);
}
// the incoming version uses the same effective date value, allow patching only when the version value matches
if (newVersionEffectiveDateCompareValue == 0) {
if (!newVersionToCreate.equals(latestVersion.getVersion())) {
throw new BadRequestException(String.format("A different version ('%s') is already using the given effective time '%s' value.",
latestVersion.getVersion(),
newVersionEffectiveDate.format(DateTimeFormatter.ISO_LOCAL_DATE))
);
}
// if the same effective date and version is given, then allow recreating the version forcefully
return true;
}
// in all other cases check whether the version id is already taken
final boolean versionAlreadyExists = ResourceRequests.prepareSearchVersion()
.setLimit(0)
.filterByResource(resourceUri)
.filterByVersionId(newVersionToCreate)
.buildAsync()
.execute(context)
.getTotal() > 0;
if (versionAlreadyExists) {
throw new BadRequestException(String.format("A version with identifier '%s' already exists.", newVersionToCreate));
}
// if not, allow version creation
return false;
})
// no version present for this resource, let version create proceed without the need for force creation
.orElse(false);
}
}
| fhir/com.b2international.snowowl.fhir.core/src/com/b2international/snowowl/fhir/core/request/conceptmap/FhirWriteSupport.java | /*
* Copyright 2022 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.fhir.core.request.conceptmap;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.Optional;
import com.b2international.snowowl.core.ResourceURI;
import com.b2international.snowowl.core.ServiceProvider;
import com.b2international.snowowl.core.request.ResourceRequests;
import com.b2international.snowowl.core.version.Version;
import com.b2international.snowowl.fhir.core.exceptions.BadRequestException;
/**
* @since 8.7.1
*/
public interface FhirWriteSupport {
default boolean ensureVersionCanBeCreated(
final ServiceProvider context,
final ResourceURI conceptMapUri,
final String versionId,
final LocalDate effectiveDate) {
final Optional<Version> latestVersion = ResourceRequests.prepareSearchVersion()
.setLimit(1)
.filterByResource(conceptMapUri)
.sortBy("effectiveTime:desc")
.buildAsync()
.execute(context)
.first();
final Optional<String> latestVersionId = latestVersion.map(lv -> {
// Allow importing with the same effective date as the latest version
if (lv.getEffectiveTime().compareTo(effectiveDate) > 0) {
throw new BadRequestException(String.format("A concept map version for effective time '%s' already exists, can't add content with effective time '%s'.",
lv.getEffectiveTime().format(DateTimeFormatter.ISO_LOCAL_DATE),
effectiveDate.format(DateTimeFormatter.ISO_LOCAL_DATE)));
}
return lv.getVersion();
});
// Force re-create latest version if the specified ID is equal to the ID of the version instance
final boolean force = latestVersionId.map(versionId::equals)
.orElse(Boolean.FALSE);
if (!force) {
final boolean versionAlreadyExists = ResourceRequests.prepareSearchVersion()
.setLimit(0)
.filterByResource(conceptMapUri)
.filterByVersionId(versionId)
.buildAsync()
.execute(context)
.getTotal() > 0;
if (versionAlreadyExists) {
throw new BadRequestException(String.format("A concept map version with identifier '%s' already exists.", versionId));
}
}
return force;
}
}
| fix(fhir): ensure that effective time cannot be reused...
...when creating a resource via FHIR | fhir/com.b2international.snowowl.fhir.core/src/com/b2international/snowowl/fhir/core/request/conceptmap/FhirWriteSupport.java | fix(fhir): ensure that effective time cannot be reused... |
|
Java | apache-2.0 | 7b3e37f48f58ef68fe30d29c1daa499203dd5839 | 0 | tkurz/sesame-vocab-builder,ja-fra/sesame-vocab-builder,ja-fra/sesame-vocab-builder,ansell/rdf4j-schema-generator,tkurz/sesame-vocab-builder,ansell/rdf4j-schema-generator | package com.github.tkurz.sesame.vocab;
import org.apache.commons.cli.*;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpHeaders;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.ResponseHandler;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.methods.RequestBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.openrdf.model.util.GraphUtilException;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.RDFParserRegistry;
import org.openrdf.rio.Rio;
import org.openrdf.rio.UnsupportedRDFormatException;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Iterator;
import java.util.Properties;
import java.util.Set;
/**
* ...
* <p/>
* @author Thomas Kurz ([email protected])
* @author Jakob Frank ([email protected])
*/
public class Main {
public static void main(String [] args) {
try {
CommandLineParser parser = new PosixParser();
CommandLine cli = parser.parse(getCliOpts(), args);
if (cli.hasOption('h')) {
printHelp();
return;
}
// two args must be left over: <input-inputFile> <output-inputFile>
String[] cliArgs = cli.getArgs();
final String input, output;
switch (cliArgs.length) {
case 0:
throw new ParseException("Missing input-file");
case 1:
input = cliArgs[0];
output = null;
break;
case 2:
input = cliArgs[0];
output = cliArgs[1];
break;
default:
throw new ParseException("too many arguments");
}
RDFFormat format = Rio.getParserFormatForMIMEType(cli.getOptionValue('f', null));
Path tempFile = null;
final VocabBuilder builder;
if (input.startsWith("http://")) {
tempFile = Files.createTempFile("vocab-builder", "."+(format!=null?format.getDefaultFileExtension():"cache"));
URL url = new URL(input);
try {
fetchVocab(url, tempFile);
} catch (URISyntaxException e) {
throw new ParseException("Invalid input URL: " +e.getMessage());
}
builder = new VocabBuilder(tempFile.toString(), format);
} else
builder = new VocabBuilder(input, format);
if (cli.hasOption('p')) {
builder.setPackageName(cli.getOptionValue('p'));
}
if (cli.hasOption('n')) {
builder.setName(cli.getOptionValue('n'));
}
if (cli.hasOption('u')) {
builder.setPrefix(cli.getOptionValue('u'));
}
if (cli.hasOption('l')) {
builder.setPreferredLanguage(cli.getOptionValue('l'));
}
if (cli.hasOption('s')) {
try {
builder.setIndent(StringUtils.repeat(' ', Integer.parseInt(cli.getOptionValue('s', "4"))));
} catch (NumberFormatException e) {
throw new ParseException("indent must be numeric");
}
} else {
builder.setIndent("\t");
}
if (output != null) {
System.out.printf("Starting generation%n");
Path outFile = Paths.get(output);
builder.generate(outFile);
System.out.printf("Generation finished, result available in '%s'%n", output);
} else {
builder.generate(System.out);
}
if (tempFile != null) {
Files.deleteIfExists(tempFile);
}
} catch (UnsupportedRDFormatException e) {
System.err.printf("%s%nTry setting the format explicitly%n", e.getMessage());
} catch (ParseException e) {
printHelp(e.getMessage());
} catch (RDFParseException e) {
System.err.println("Could not parse input file: " + e.getMessage());
} catch (FileNotFoundException e) {
System.err.println("Could not read input-file: " + e.getMessage());
} catch (IOException e) {
System.err.println("Error during file-access: " + e.getMessage());
} catch (GraphUtilException e) {
e.printStackTrace();
} catch (GenerationException e) {
System.err.println(e.getMessage());
}
}
private static void printHelp() {
printHelp(null);
}
private static void printHelp(String error) {
HelpFormatter hf = new HelpFormatter();
PrintWriter w = new PrintWriter(System.out);
if (error != null) {
hf.printWrapped(w, 80, error);
w.println();
}
hf.printWrapped(w, 80, "usage: Main [options...] <input-file> [<output-file>]");
hf.printWrapped(w, 80, " <input-file> the input file to read from");
hf.printWrapped(w, 80, " [<output-file>] the output file to write, StdOut if omitted");
hf.printOptions(w, 80, getCliOpts(), 2, 2);
w.flush();
w.close();
}
@SuppressWarnings({"AccessStaticViaInstance", "static-access"})
private static Options getCliOpts() {
Options o = new Options();
o.addOption(OptionBuilder
.withLongOpt("format")
.withDescription("mime-type of the input file (will try to guess if absent)")
.hasArgs(1)
.withArgName("input-format")
.isRequired(false)
.create('f'));
o.addOption(OptionBuilder
.withLongOpt("package")
.withDescription("package declaration (will use default (empty) package if absent")
.hasArgs(1)
.withArgName("package")
.isRequired(false)
.create('p'));
o.addOption(OptionBuilder
.withLongOpt("name")
.withDescription("the name of the namespace (will try to guess from the input file if absent)")
.hasArgs(1)
.withArgName("ns")
.isRequired(false)
.create('n'));
o.addOption(OptionBuilder
.withLongOpt("uri")
.withDescription("the prefix for the vocabulary (if not available in the input file)")
.hasArgs(1)
.withArgName("prefix")
.isRequired(false)
.create('u'));
o.addOption(OptionBuilder
.withArgName("spaces")
.hasOptionalArgs(1)
.withArgName("indent")
.withDescription("use spaces for for indentation (tabs if missing, 4 spaces if no number given)")
.isRequired(false)
.create('s'));
o.addOption(OptionBuilder
.withLongOpt("language")
.withDescription("preferred language for vocabulary labels")
.hasArgs(1)
.withArgName("preferred-language")
.isRequired(false)
.create('l'));
o.addOption(OptionBuilder
.withLongOpt("help")
.withDescription("pint this help")
.isRequired(false)
.hasArg(false)
.create('h'));
return o;
}
private static File fetchVocab(URL url, final Path tempFile) throws URISyntaxException, IOException {
final Properties buildProperties = getBuildProperties();
final HttpClientBuilder clientBuilder = HttpClientBuilder.create()
.setUserAgent(
String.format("%s:%s/%s (%s)",
getBuildProperties().getProperty("groupId", "unknown"),
getBuildProperties().getProperty("artifactId", "unknown"),
getBuildProperties().getProperty("version", "unknown"),
getBuildProperties().getProperty("name", "unknown"))
);
try(CloseableHttpClient client = clientBuilder.build()) {
final HttpUriRequest request = RequestBuilder.get()
.setUri(url.toURI())
.setHeader(HttpHeaders.ACCEPT, getAcceptHeaderValue())
.build();
return client.execute(request, new ResponseHandler<File>() {
@Override
public File handleResponse(HttpResponse response) throws ClientProtocolException, IOException {
final File cf = tempFile.toFile();
FileUtils.copyInputStreamToFile(response.getEntity().getContent(), cf);
return cf;
}
});
}
}
private static Properties getBuildProperties() {
Properties p = new Properties();
try {
p.load(Main.class.getResourceAsStream("/build.properties"));
} catch (IOException e) {
// ignore
}
return p;
}
private static String getAcceptHeaderValue() {
final Set<RDFFormat> rdfFormats = RDFParserRegistry.getInstance().getKeys();
final Iterator<String> acceptParams = RDFFormat.getAcceptParams(rdfFormats, false, RDFFormat.TURTLE).iterator();
if (acceptParams.hasNext()) {
final StringBuilder sb = new StringBuilder();
while (acceptParams.hasNext()) {
sb.append(acceptParams.next());
if (acceptParams.hasNext()) {
sb.append(", ");
}
}
return sb.toString();
} else {
return null;
}
}
}
| sesame-vocab-builder-cli/src/main/java/com/github/tkurz/sesame/vocab/Main.java | package com.github.tkurz.sesame.vocab;
import org.apache.commons.cli.*;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpHeaders;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.ResponseHandler;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.methods.RequestBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.openrdf.model.util.GraphUtilException;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.RDFParserRegistry;
import org.openrdf.rio.Rio;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Iterator;
import java.util.Properties;
import java.util.Set;
/**
* ...
* <p/>
* @author Thomas Kurz ([email protected])
* @author Jakob Frank ([email protected])
*/
public class Main {
public static void main(String [] args) {
try {
CommandLineParser parser = new PosixParser();
CommandLine cli = parser.parse(getCliOpts(), args);
if (cli.hasOption('h')) {
printHelp();
return;
}
// two args must be left over: <input-inputFile> <output-inputFile>
String[] cliArgs = cli.getArgs();
final String input, output;
switch (cliArgs.length) {
case 0:
throw new ParseException("Missing input-file");
case 1:
input = cliArgs[0];
output = null;
break;
case 2:
input = cliArgs[0];
output = cliArgs[1];
break;
default:
throw new ParseException("too many arguments");
}
RDFFormat format = Rio.getParserFormatForMIMEType(cli.getOptionValue('f', null));
Path tempFile = null;
final VocabBuilder builder;
if (input.startsWith("http://")) {
tempFile = Files.createTempFile("vocab-builder", "."+(format!=null?format.getDefaultFileExtension():"cache"));
URL url = new URL(input);
try {
fetchVocab(url, tempFile);
} catch (URISyntaxException e) {
throw new ParseException("Invalid input URL: " +e.getMessage());
}
builder = new VocabBuilder(tempFile.toString(), format);
} else
builder = new VocabBuilder(input, format);
if (cli.hasOption('p')) {
builder.setPackageName(cli.getOptionValue('p'));
}
if (cli.hasOption('n')) {
builder.setName(cli.getOptionValue('n'));
}
if (cli.hasOption('u')) {
builder.setPrefix(cli.getOptionValue('u'));
}
if (cli.hasOption('l')) {
builder.setPreferredLanguage(cli.getOptionValue('l'));
}
if (cli.hasOption('s')) {
try {
builder.setIndent(StringUtils.repeat(' ', Integer.parseInt(cli.getOptionValue('s', "4"))));
} catch (NumberFormatException e) {
throw new ParseException("indent must be numeric");
}
} else {
builder.setIndent("\t");
}
if (output != null) {
System.out.printf("Starting generation%n");
Path outFile = Paths.get(output);
builder.generate(outFile);
System.out.printf("Generation finished, result available in '%s'%n", output);
} else {
builder.generate(System.out);
}
if (tempFile != null) {
Files.deleteIfExists(tempFile);
}
} catch (ParseException e) {
printHelp(e.getMessage());
} catch (RDFParseException e) {
System.err.println("Could not parse input file: " + e.getMessage());
} catch (FileNotFoundException e) {
System.err.println("Could not read input-file: " + e.getMessage());
} catch (IOException e) {
System.err.println("Error during file-access: " + e.getMessage());
} catch (GraphUtilException e) {
e.printStackTrace();
} catch (GenerationException e) {
System.err.printf(e.getMessage());
}
}
private static void printHelp() {
printHelp(null);
}
private static void printHelp(String error) {
HelpFormatter hf = new HelpFormatter();
PrintWriter w = new PrintWriter(System.out);
if (error != null) {
hf.printWrapped(w, 80, error);
w.println();
}
hf.printWrapped(w, 80, "usage: Main [options...] <input-file> [<output-file>]");
hf.printWrapped(w, 80, " <input-file> the input file to read from");
hf.printWrapped(w, 80, " [<output-file>] the output file to write, StdOut if omitted");
hf.printOptions(w, 80, getCliOpts(), 2, 2);
w.flush();
w.close();
}
@SuppressWarnings({"AccessStaticViaInstance", "static-access"})
private static Options getCliOpts() {
Options o = new Options();
o.addOption(OptionBuilder
.withLongOpt("format")
.withDescription("mime-type of the input file (will try to guess if absent)")
.hasArgs(1)
.withArgName("input-format")
.isRequired(false)
.create('f'));
o.addOption(OptionBuilder
.withLongOpt("package")
.withDescription("package declaration (will use default (empty) package if absent")
.hasArgs(1)
.withArgName("package")
.isRequired(false)
.create('p'));
o.addOption(OptionBuilder
.withLongOpt("name")
.withDescription("the name of the namespace (will try to guess from the input file if absent)")
.hasArgs(1)
.withArgName("ns")
.isRequired(false)
.create('n'));
o.addOption(OptionBuilder
.withLongOpt("uri")
.withDescription("the prefix for the vocabulary (if not available in the input file)")
.hasArgs(1)
.withArgName("prefix")
.isRequired(false)
.create('u'));
o.addOption(OptionBuilder
.withArgName("spaces")
.hasOptionalArgs(1)
.withArgName("indent")
.withDescription("use spaces for for indentation (tabs if missing, 4 spaces if no number given)")
.isRequired(false)
.create('s'));
o.addOption(OptionBuilder
.withLongOpt("language")
.withDescription("preferred language for vocabulary labels")
.hasArgs(1)
.withArgName("preferred-language")
.isRequired(false)
.create('l'));
o.addOption(OptionBuilder
.withLongOpt("help")
.withDescription("pint this help")
.isRequired(false)
.hasArg(false)
.create('h'));
return o;
}
private static File fetchVocab(URL url, final Path tempFile) throws URISyntaxException, IOException {
final Properties buildProperties = getBuildProperties();
final HttpClientBuilder clientBuilder = HttpClientBuilder.create()
.setUserAgent(
String.format("%s:%s/%s (%s)",
getBuildProperties().getProperty("groupId", "unknown"),
getBuildProperties().getProperty("artifactId", "unknown"),
getBuildProperties().getProperty("version", "unknown"),
getBuildProperties().getProperty("name", "unknown"))
);
try(CloseableHttpClient client = clientBuilder.build()) {
final HttpUriRequest request = RequestBuilder.get()
.setUri(url.toURI())
.setHeader(HttpHeaders.ACCEPT, getAcceptHeaderValue())
.build();
return client.execute(request, new ResponseHandler<File>() {
@Override
public File handleResponse(HttpResponse response) throws ClientProtocolException, IOException {
final File cf = tempFile.toFile();
FileUtils.copyInputStreamToFile(response.getEntity().getContent(), cf);
return cf;
}
});
}
}
private static Properties getBuildProperties() {
Properties p = new Properties();
try {
p.load(Main.class.getResourceAsStream("/build.properties"));
} catch (IOException e) {
// ignore
}
return p;
}
private static String getAcceptHeaderValue() {
final Set<RDFFormat> rdfFormats = RDFParserRegistry.getInstance().getKeys();
final Iterator<String> acceptParams = RDFFormat.getAcceptParams(rdfFormats, false, RDFFormat.TURTLE).iterator();
if (acceptParams.hasNext()) {
final StringBuilder sb = new StringBuilder();
while (acceptParams.hasNext()) {
sb.append(acceptParams.next());
if (acceptParams.hasNext()) {
sb.append(", ");
}
}
return sb.toString();
} else {
return null;
}
}
}
| Improved error message on unknown formats
| sesame-vocab-builder-cli/src/main/java/com/github/tkurz/sesame/vocab/Main.java | Improved error message on unknown formats |
|
Java | apache-2.0 | 798bb1405d822ca37ebdc55d0ac0322bdc7524e9 | 0 | mkarneim/luamod | package net.wizardsoflua.lua.extension;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.Objects.requireNonNull;
import java.util.ServiceConfigurationError;
import java.util.function.Consumer;
import net.sandius.rembulan.Table;
import net.wizardsoflua.lua.Converters;
import net.wizardsoflua.lua.extension.api.InitializationContext;
import net.wizardsoflua.lua.extension.spi.ConverterExtension;
import net.wizardsoflua.lua.extension.spi.LuaExtension;
public class LuaExtensionLoader implements net.wizardsoflua.lua.extension.api.LuaExtensionLoader {
private final ClassIndex extensions = new ClassIndex();
private final Table env;
private final InitializationContext initializationContext;
private final Converters converters;
public LuaExtensionLoader(Table env, InitializationContext initializationContext,
Converters converters) {
this.env = checkNotNull(env, "env == null!");
this.initializationContext =
checkNotNull(initializationContext, "initializationContext == null!");
this.converters = requireNonNull(converters, "converters == null!");
}
public void installExtensions() {
ServiceLoader.load(LuaExtension.class).forEach(this::getLuaExtension);
ServiceLoader.load(ConverterExtension.class).forEach(this::getConverterExtension);
}
@Override
public <E extends LuaExtension> E getLuaExtension(Class<E> extensionClass) {
return getExtension(extensionClass, extension -> {
extension.initialize(initializationContext);
extension.installInto(env);
});
}
public <E extends ConverterExtension<?, ?>> E getConverterExtension(Class<E> extensionClass) {
return getExtension(extensionClass, extension -> {
extension.initialize(initializationContext);
converters.addConverterExtension(extension);
});
}
private <E> E getExtension(Class<E> extensionClass, Consumer<E> initializer)
throws ServiceConfigurationError {
E extension = extensions.get(extensionClass);
if (extension == null) {
extension = newInstance(extensionClass);
extensions.add(extension);
initializer.accept(extension);
}
return extension;
}
private static <P> P newInstance(Class<P> cls) throws ServiceConfigurationError {
try {
return cls.newInstance();
} catch (InstantiationException | IllegalAccessException ex) {
String message = "Provider " + cls + " could not be instantiated";
throw new ServiceConfigurationError(LuaExtension.class.getName() + ": " + message, ex);
}
}
}
| src/main/java/net/wizardsoflua/lua/extension/LuaExtensionLoader.java | package net.wizardsoflua.lua.extension;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.Objects.requireNonNull;
import java.util.ServiceConfigurationError;
import java.util.function.Consumer;
import net.sandius.rembulan.Table;
import net.wizardsoflua.lua.Converters;
import net.wizardsoflua.lua.extension.api.InitializationContext;
import net.wizardsoflua.lua.extension.spi.ConverterExtension;
import net.wizardsoflua.lua.extension.spi.LuaExtension;
public class LuaExtensionLoader implements net.wizardsoflua.lua.extension.api.LuaExtensionLoader {
private final ClassIndex extensions = new ClassIndex();
private final Table env;
private final InitializationContext initializationContext;
private final Converters converters;
public LuaExtensionLoader(Table env, InitializationContext initializationContext,
Converters converters) {
this.env = checkNotNull(env, "env == null!");
this.initializationContext =
checkNotNull(initializationContext, "initializationContext == null!");
this.converters = requireNonNull(converters, "converters == null!");
}
public void installExtensions() {
ServiceLoader.load(LuaExtension.class).forEach(this::getLuaExtension);
ServiceLoader.load(ConverterExtension.class).forEach(this::getConverterExtension);
}
@Override
public <E extends LuaExtension> E getLuaExtension(Class<E> extensionClass) {
return getExtension(extensionClass, extension -> {
extension.initialize(initializationContext);
extension.installInto(env);
});
}
public <E extends ConverterExtension<?, ?>> E getConverterExtension(Class<E> extensionClass) {
return getExtension(extensionClass, extension -> {
extension.initialize(initializationContext);
converters.addConverterExtension(extension);
});
}
private <E> E getExtension(Class<E> extensionClass, Consumer<E> initializer)
throws ServiceConfigurationError {
E extension = extensions.get(extensionClass);
if (extensionClass == null) {
extension = newInstance(extensionClass);
extensions.add(extension);
initializer.accept(extension);
}
return extension;
}
private static <P> P newInstance(Class<P> cls) throws ServiceConfigurationError {
try {
return cls.newInstance();
} catch (InstantiationException | IllegalAccessException ex) {
String message = "Provider " + cls + " could not be instantiated";
throw new ServiceConfigurationError(LuaExtension.class.getName() + ": " + message, ex);
}
}
}
| Fix error in LuaExtensionLoader
| src/main/java/net/wizardsoflua/lua/extension/LuaExtensionLoader.java | Fix error in LuaExtensionLoader |
|
Java | apache-2.0 | 5ddb72f3befc22d69c98c0b002de5740efcabd75 | 0 | suhand/carbon-platform-integration,suhand/carbon-platform-integration,dimuthud/carbon-platform-integration,dimuthud/carbon-platform-integration,wso2/carbon-platform-integration,suhand/carbon-platform-integration,wso2/carbon-platform-integration,dimuthud/carbon-platform-integration,isurusuranga/carbon-platform-integration,wso2/carbon-platform-integration,wso2/carbon-platform-integration,bmlct/carbon-platform-integration,isurusuranga/carbon-platform-integration,isurusuranga/carbon-platform-integration,bmlct/carbon-platform-integration,isurusuranga/carbon-platform-integration,dimuthud/carbon-platform-integration,bmlct/carbon-platform-integration,bmlct/carbon-platform-integration,suhand/carbon-platform-integration,malithie/carbon-platform-integration | /*
*Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
*WSO2 Inc. licenses this file to you under the Apache License,
*Version 2.0 (the "License"); you may not use this file except
*in compliance with the License.
*You may obtain a copy of the License at
*
*http://www.apache.org/licenses/LICENSE-2.0
*
*Unless required by applicable law or agreed to in writing,
*software distributed under the License is distributed on an
*"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
*KIND, either express or implied. See the License for the
*specific language governing permissions and limitations
*under the License.
*/
package org.wso2.carbon.automation.extensions.servers.axis2server;
import org.apache.axis2.AxisFault;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.context.ConfigurationContextFactory;
import org.apache.axis2.deployment.DeploymentEngine;
import org.apache.axis2.description.AxisServiceGroup;
import org.apache.axis2.engine.ListenerManager;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.automation.engine.FrameworkConstants;
import org.wso2.carbon.automation.engine.frameworkutils.FrameworkPathUtil;
import org.wso2.carbon.automation.extensions.ExtensionUtils;
import java.io.*;
import java.nio.charset.Charset;
public class Axis2ServerManager implements BackendServer {
private static final Log log = LogFactory.getLog(Axis2ServerManager.class);
private ConfigurationContext cfgCtx;
private ListenerManager listenerManager;
private boolean started;
String repositoryPath = null;
String MODIFIED_RESOURCE_NAME = "";
public Axis2ServerManager() {
this("test_axis2_server_9000.xml");
repositoryPath = System.getProperty(FrameworkConstants.CARBON_HOME) + File.separator +
"samples" + File.separator + "axis2Server" + File.separator + "repository";
}
public Axis2ServerManager(String axis2xmlFile) {
String newFile = axis2xmlFile + "_bk";
repositoryPath = System.getProperty(FrameworkConstants.CARBON_HOME) + File.separator +
"samples" + File.separator + "axis2Server" + File.separator + "repository";
File repository = new File(repositoryPath);
log.info("Using the Axis2 repository path: " + repository.getAbsolutePath());
try {
changeConfiguration(axis2xmlFile, newFile);
File axis2xml = copyResourceToFileSystem(newFile, "axis2.xml");
if (!axis2xml.exists()) {
log.error("Error while copying the test axis2.xml to the file system");
return;
}
log.info("Loading axis2.xml from: " + axis2xml.getAbsolutePath());
cfgCtx = ConfigurationContextFactory.createConfigurationContextFromFileSystem(
repository.getAbsolutePath(), axis2xml.getAbsolutePath());
} catch (Exception e) {
log.error("Error while initializing the configuration context", e);
}
}
public void start() throws IOException {
log.info("Starting sample Axis2 server");
listenerManager = new ListenerManager();
listenerManager.init(cfgCtx);
listenerManager.start();
try {
Thread.sleep(2000);
} catch (InterruptedException ignored) {
}
started = true;
}
public void stop() {
log.info("Stopping sample Axis2 server");
try {
listenerManager.stop();
listenerManager.destroy();
cfgCtx.cleanupContexts();
} catch (AxisFault axisFault) {
log.error("Error while shutting down the listener managers", axisFault);
}
started = false;
}
public boolean isStarted() {
return !listenerManager.isStopped();
}
public void hotDeployArtifact(String artifact) throws IOException {
File fOrig = new File(artifact);
File fDest = new File(repositoryPath + File.separator + "services" + File.separator);
FileUtils.copyFile(fOrig, fDest);
}
public void hotUndeployArtifact(String artifact) {
File fOrig = new File(artifact);
FileUtils.deleteQuietly(fOrig);
}
public void deployService(Object service) throws IOException {
String artifactName = service + ".aar";
File file = copyResourceToFileSystem(artifactName, artifactName);
AxisServiceGroup serviceGroup = DeploymentEngine.loadServiceGroup(file, cfgCtx);
cfgCtx.getAxisConfiguration().addServiceGroup(serviceGroup);
}
private void changeConfiguration(String file, String newFile) throws IOException {
StringBuilder sb = new StringBuilder();
File config =
new File(FrameworkPathUtil.getSystemResourceLocation() + File.separator +
"artifacts" + File.separator + "AXIS2" + File.separator + "config" +
File.separator + file);
BufferedReader br = null;
OutputStream os = null;
try {
if (config != null) {
String currentLine;
br = new BufferedReader(new InputStreamReader(new FileInputStream(config),
Charset.defaultCharset()));
while ((currentLine = br.readLine()) != null) {
if (currentLine.contains("REPLACE_CK")) {
currentLine = currentLine.replace("REPLACE_CK",
System.getProperty(FrameworkConstants.CARBON_HOME) +
File.separator + "repository" + File.separator +
"resources" + File.separator + "security" +
File.separator + "wso2carbon.jks");
} else if (currentLine.contains("REPLACE_TS")) {
currentLine = currentLine.replace("REPLACE_TS",
System.getProperty(FrameworkConstants.CARBON_HOME) +
File.separator + "repository" + File.separator +
"resources" + File.separator + "security" +
File.separator + "client-truststore.jks");
}
sb.append(currentLine);
}
br.close();
}
File newConfig =
new File(ExtensionUtils.getSystemResourceLocation() + File.separator +
"artifacts" + File.separator + "AXIS2" + File.separator + "config" +
File.separator + newFile);
if (newConfig.exists()) {
FileUtils.deleteQuietly(newConfig);
}
FileUtils.touch(newConfig);
os = FileUtils.openOutputStream(newConfig);
os.write(sb.toString().getBytes("UTF-8"));
} finally {
if (os != null)
os.close();
if (br != null)
br.close();
}
}
private File copyResourceToFileSystem(String resourceName, String fileName) throws IOException {
File file = new File(System.getProperty("basedir") + File.separator + "target" +
File.separator + fileName);
if (file.exists()) {
FileUtils.deleteQuietly(file);
}
FileUtils.touch(file);
OutputStream os = FileUtils.openOutputStream(file);
InputStream is = null;
try {
if (resourceName.contains(".aar")) {
is = new FileInputStream(ExtensionUtils.getSystemResourceLocation() +
File.separator + "artifacts" + File.separator + "AXIS2" +
File.separator + "aar" +
File.separator + resourceName);
} else {
is = new FileInputStream(ExtensionUtils.getSystemResourceLocation() +
File.separator + "artifacts" + File.separator + "AXIS2" +
File.separator + "config" +
File.separator + resourceName);
}
if (is != null) {
byte[] data = new byte[1024];
int len;
while ((len = is.read(data)) != -1) {
os.write(data, 0, len);
}
}
} finally {
os.flush();
os.close();
if (is != null)
is.close();
}
return file;
}
private File copyServiceToFileSystem(String resourceName, String fileName) throws IOException {
File file = new File(System.getProperty("basedir") + File.separator + "target" +
File.separator + fileName);
if (file.exists()) {
FileUtils.deleteQuietly(file);
}
FileUtils.touch(file);
OutputStream os = FileUtils.openOutputStream(file);
InputStream is = null;
try {
is = new FileInputStream(ExtensionUtils.getSystemResourceLocation() +
File.separator + "artifacts" + File.separator + "AXIS2" +
File.separator + "config" +
File.separator + resourceName);
if (is != null) {
byte[] data = new byte[1024];
int len;
while ((len = is.read(data)) != -1) {
os.write(data, 0, len);
}
os.flush();
}
} finally {
os.close();
is.close();
}
return file;
}
}
| test-automation-framework/org.wso2.carbon.automation.extensions/src/main/java/org/wso2/carbon/automation/extensions/servers/axis2server/Axis2ServerManager.java | /*
*Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
*WSO2 Inc. licenses this file to you under the Apache License,
*Version 2.0 (the "License"); you may not use this file except
*in compliance with the License.
*You may obtain a copy of the License at
*
*http://www.apache.org/licenses/LICENSE-2.0
*
*Unless required by applicable law or agreed to in writing,
*software distributed under the License is distributed on an
*"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
*KIND, either express or implied. See the License for the
*specific language governing permissions and limitations
*under the License.
*/
package org.wso2.carbon.automation.extensions.servers.axis2server;
import org.apache.axis2.AxisFault;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.context.ConfigurationContextFactory;
import org.apache.axis2.deployment.DeploymentEngine;
import org.apache.axis2.description.AxisServiceGroup;
import org.apache.axis2.engine.ListenerManager;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.automation.engine.FrameworkConstants;
import org.wso2.carbon.automation.engine.frameworkutils.FrameworkPathUtil;
import org.wso2.carbon.automation.extensions.ExtensionUtils;
import java.io.*;
public class Axis2ServerManager implements BackendServer {
private static final Log log = LogFactory.getLog(Axis2ServerManager.class);
private ConfigurationContext cfgCtx;
private ListenerManager listenerManager;
private boolean started;
String repositoryPath = null;
String MODIFIED_RESOURCE_NAME = "";
public Axis2ServerManager() {
this("test_axis2_server_9000.xml");
repositoryPath = System.getProperty(FrameworkConstants.CARBON_HOME) + File.separator +
"samples" + File.separator + "axis2Server" + File.separator + "repository";
}
public Axis2ServerManager(String axis2xmlFile) {
String newFile = axis2xmlFile + "_bk";
repositoryPath = System.getProperty(FrameworkConstants.CARBON_HOME) + File.separator +
"samples" + File.separator + "axis2Server" + File.separator + "repository";
File repository = new File(repositoryPath);
log.info("Using the Axis2 repository path: " + repository.getAbsolutePath());
try {
changeConfiguration(axis2xmlFile, newFile);
File axis2xml = copyResourceToFileSystem(newFile, "axis2.xml");
if (!axis2xml.exists()) {
log.error("Error while copying the test axis2.xml to the file system");
return;
}
log.info("Loading axis2.xml from: " + axis2xml.getAbsolutePath());
cfgCtx = ConfigurationContextFactory.createConfigurationContextFromFileSystem(
repository.getAbsolutePath(), axis2xml.getAbsolutePath());
} catch (Exception e) {
log.error("Error while initializing the configuration context", e);
}
}
public void start() throws IOException {
log.info("Starting sample Axis2 server");
listenerManager = new ListenerManager();
listenerManager.init(cfgCtx);
listenerManager.start();
try {
Thread.sleep(2000);
} catch (InterruptedException ignored) {
}
started = true;
}
public void stop() {
log.info("Stopping sample Axis2 server");
try {
listenerManager.stop();
listenerManager.destroy();
cfgCtx.cleanupContexts();
} catch (AxisFault axisFault) {
log.error("Error while shutting down the listener managers", axisFault);
}
started = false;
}
public boolean isStarted() {
return !listenerManager.isStopped();
}
public void hotDeployArtifact(String artifact) throws IOException {
File fOrig = new File(artifact);
File fDest = new File(repositoryPath + File.separator + "services" + File.separator);
FileUtils.copyFile(fOrig, fDest);
}
public void hotUndeployArtifact(String artifact) {
File fOrig = new File(artifact);
FileUtils.deleteQuietly(fOrig);
}
public void deployService(Object service) throws IOException {
String artifactName = service + ".aar";
File file = copyResourceToFileSystem(artifactName, artifactName);
AxisServiceGroup serviceGroup = DeploymentEngine.loadServiceGroup(file, cfgCtx);
cfgCtx.getAxisConfiguration().addServiceGroup(serviceGroup);
}
private void changeConfiguration(String file, String newFile) throws IOException {
StringBuilder sb = new StringBuilder();
File config =
new File(FrameworkPathUtil.getSystemResourceLocation() + File.separator +
"artifacts" + File.separator + "AXIS2" + File.separator + "config" +
File.separator + file);
if (config != null) {
String currentLine;
BufferedReader br = new BufferedReader(new FileReader(config));
while ((currentLine = br.readLine()) != null) {
if (currentLine.contains("REPLACE_CK")) {
currentLine = currentLine.replace("REPLACE_CK",
System.getProperty(FrameworkConstants.CARBON_HOME) +
File.separator + "repository" + File.separator +
"resources" + File.separator + "security" +
File.separator + "wso2carbon.jks");
} else if (currentLine.contains("REPLACE_TS")) {
currentLine = currentLine.replace("REPLACE_TS",
System.getProperty(FrameworkConstants.CARBON_HOME) +
File.separator + "repository" + File.separator +
"resources" + File.separator + "security" +
File.separator + "client-truststore.jks");
}
sb.append(currentLine);
}
br.close();
}
File newConfig =
new File(ExtensionUtils.getSystemResourceLocation() + File.separator +
"artifacts" + File.separator + "AXIS2" + File.separator + "config" +
File.separator + newFile);
if (newConfig.exists()) {
FileUtils.deleteQuietly(newConfig);
}
FileUtils.touch(newConfig);
OutputStream os = FileUtils.openOutputStream(newConfig);
os.write(sb.toString().getBytes("UTF-8"));
os.close();
}
private File copyResourceToFileSystem(String resourceName, String fileName) throws IOException {
File file = new File(System.getProperty("basedir") + File.separator + "target" +
File.separator + fileName);
if (file.exists()) {
FileUtils.deleteQuietly(file);
}
FileUtils.touch(file);
OutputStream os = FileUtils.openOutputStream(file);
InputStream is;
if (resourceName.contains(".aar")) {
is = new FileInputStream(ExtensionUtils.getSystemResourceLocation() +
File.separator + "artifacts" + File.separator + "AXIS2" +
File.separator + "aar" +
File.separator + resourceName);
} else {
is = new FileInputStream(ExtensionUtils.getSystemResourceLocation() +
File.separator + "artifacts" + File.separator + "AXIS2" +
File.separator + "config" +
File.separator + resourceName);
}
if (is != null) {
byte[] data = new byte[1024];
int len;
while ((len = is.read(data)) != -1) {
os.write(data, 0, len);
}
}
os.flush();
os.close();
is.close();
return file;
}
private File copyServiceToFileSystem(String resourceName, String fileName) throws IOException {
File file = new File(System.getProperty("basedir") + File.separator + "target" +
File.separator + fileName);
if (file.exists()) {
FileUtils.deleteQuietly(file);
}
FileUtils.touch(file);
OutputStream os = FileUtils.openOutputStream(file);
InputStream is = new FileInputStream(ExtensionUtils.getSystemResourceLocation() +
File.separator + "artifacts" + File.separator + "AXIS2" +
File.separator + "config" +
File.separator + resourceName);
if (is != null) {
byte[] data = new byte[1024];
int len;
while ((len = is.read(data)) != -1) {
os.write(data, 0, len);
}
os.flush();
os.close();
is.close();
}
return file;
}
}
| stream closed properly
| test-automation-framework/org.wso2.carbon.automation.extensions/src/main/java/org/wso2/carbon/automation/extensions/servers/axis2server/Axis2ServerManager.java | stream closed properly |
|
Java | apache-2.0 | 646c5f3d864e1de9499409e26bc1b0456d7b943d | 0 | eFaps/eFaps-Kernel,ov3rflow/eFaps-Kernel | /*
* Copyright 2003 - 2009 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Revision: $Rev$
* Last Changed: $Date$
* Last Changed By: $Author$
*/
package org.efaps.esjp.earchive.node;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.efaps.admin.datamodel.Type;
import org.efaps.admin.event.Parameter;
import org.efaps.admin.event.Return;
import org.efaps.admin.event.Parameter.ParameterValues;
import org.efaps.admin.event.Return.ReturnValues;
import org.efaps.admin.program.esjp.EFapsRevision;
import org.efaps.admin.program.esjp.EFapsUUID;
import org.efaps.db.Checkin;
import org.efaps.db.Checkout;
import org.efaps.db.Context;
import org.efaps.db.Instance;
import org.efaps.db.SearchQuery;
import org.efaps.esjp.earchive.NamesInterface;
import org.efaps.esjp.earchive.repository.Repository;
import org.efaps.util.EFapsException;
/**
* TODO comment!
*
* @author jmox
* @version $Id$
*/
@EFapsUUID("d6cb382d-a8e8-4b35-9d28-6b344c53f676")
@EFapsRevision("$Rev$")
public class NodeUI implements NamesInterface {
public Return getTableUI(final Parameter _parameter) throws EFapsException {
final Return ret = new Return();
final Instance instance = _parameter.getInstance();
Node node = null;
if ("eArchive_Repository".equals(instance.getType().getName())) {
node = Node.getRootNodeFromDB(new Repository(instance));
}
final String parentInstanceKey;
final long parentId;
String revision = null;
if (node == null) {
parentId = instance.getId();
final String keyTmp = instance.getKey();
if (keyTmp.contains(SEPERATOR_REVISION)) {
final int pos = keyTmp.indexOf(SEPERATOR_REVISION);
revision = keyTmp.substring(pos);
parentInstanceKey = keyTmp.substring(0, pos);
} else {
parentInstanceKey = keyTmp;
}
} else {
parentId = node.getId();
parentInstanceKey = node.getHistoryId() + SEPERATOR_IDS + node.getCopyId();
}
final SearchQuery query = new SearchQuery();
query.setQueryTypes("eArchive_Node2NodeView");
query.setExpandChildTypes(true);
query.addWhereExprEqValue("Parent", parentId);
query.addSelect("NodeType");
query.addSelect("Child");
query.addSelect("HistoryId");
query.addSelect("CopyId");
query.execute();
final List<List<Instance>> list = new ArrayList<List<Instance>>();
while (query.next()) {
final List<Instance> instances = new ArrayList<Instance>(1);
final StringBuilder instanceKey = new StringBuilder()
.append(parentInstanceKey).append(SEPERATOR_INSTANCE)
.append(query.get("HistoryId")).append(SEPERATOR_IDS).append(query.get("CopyId"))
.append(revision != null ? revision : "");
Type type = Type.get((Long) query.get("NodeType"));
if (revision != null) {
if (type.getName().equals(TYPE_NODEDIRECTORY)) {
type = Type.get(TYPE_NODEDIRECTORYREV);
} else {
type = Type.get(TYPE_NODEFILEREV);
}
}
instances.add(Instance.get(type ,
(Long) query.get("Child"),
instanceKey.toString()));
list.add(instances);
}
ret.put(ReturnValues.VALUES, list);
return ret;
}
public Return getRevisionTableUI(final Parameter _parameter)
throws EFapsException {
final Return ret = new Return();
final Instance instance = _parameter.getInstance();
final Node node;
final String instanceKey;
if ("eArchive_Repository".equals(instance.getType().getName())) {
node = Node.getRootNodeFromDB(new Repository(instance));
instanceKey = node.getHistoryId() + SEPERATOR_IDS + node.getCopyId();
} else {
node = Node.getNodeFromDB(instance.getId(), instance.getKey());
instanceKey = instance.getKey();
}
final SearchQuery query = new SearchQuery();
query.setQueryTypes(TYPE_NODEABSTRACTREV);
query.setExpandChildTypes(true);
query.addWhereExprEqValue(TYPE_NODEABSTRACTREV_A_HISTORYID,
node.getHistoryId());
query.addWhereExprEqValue(TYPE_NODEABSTRACTREV_A_COPYID, node.getCopyId());
query.addSelect(TYPE_NODEABSTRACTREV_A_TYPE);
query.addSelect(TYPE_NODEABSTRACTREV_A_REVISION);
query.addSelect(TYPE_NODEABSTRACTREV_A_ID);
query.execute();
final List<List<Instance>> list = new ArrayList<List<Instance>>();
while (query.next()) {
final List<Instance> instances = new ArrayList<Instance>(1);
final StringBuilder keyBldr = new StringBuilder()
.append(instanceKey).append(SEPERATOR_REVISION)
.append(query.get(TYPE_NODEABSTRACTREV_A_REVISION));
instances.add(Instance.get((Type) query.get(TYPE_NODEABSTRACTREV_A_TYPE),
(Long) query.get(TYPE_NODEABSTRACTREV_A_ID),
keyBldr.toString()));
list.add(instances);
}
ret.put(ReturnValues.VALUES, list);
return ret;
}
public Return createDirectory(final Parameter _parameter)
throws EFapsException {
final String name = _parameter.getParameterValue("name");
final Instance instance = _parameter.getInstance();
final Node parentNode;
if ("eArchive_Repository".equals(instance.getType().getName())) {
parentNode = Node.getRootNodeFromDB(new Repository(instance));
} else {
parentNode = Node.getNodeFromDB(instance.getId(), instance.getKey());
}
final Node newDir = Node.createNewNode(name, Node.TYPE_NODEDIRECTORY);
newDir.connect2Parent(parentNode);
return new Return();
}
public Return createFile(final Parameter _parameter)
throws EFapsException {
final String name = _parameter.getParameterValue("name");
final Instance instance = _parameter.getInstance();
final Node node;
if ("eArchive_Repository".equals(instance.getType().getName())) {
node = Node.getRootNodeFromDB(new Repository(instance));
} else {
node = Node.getNodeFromDB(instance.getId(), instance.getKey());
}
final Node newFile = Node.createNewNode(name, Node.TYPE_NODEFILE);
newFile.connect2Parent(node);
final Instance fileInstance = Instance.get(Type.get(TYPE_FILE),
newFile.getFileId());
final Context.FileParameter fileItem =
Context.getThreadContext().getFileParameters().get("upload");
final Checkin checkin = new Checkin(fileInstance);
try {
checkin.execute(fileItem.getName(), fileItem.getInputStream(),
(int) fileItem.getSize());
} catch (final IOException e) {
throw new EFapsException(this.getClass(), "execute", e, _parameter);
}
return new Return();
}
public Return rename(final Parameter _parameter)
throws EFapsException {
final String name = _parameter.getParameterValue("name");
final Instance instance = _parameter.getInstance();
final Node node = Node.getNodeFromDB(instance.getId(), instance.getKey());
node.rename(name);
return new Return();
}
public Return getInstance(final Parameter _parameter) throws EFapsException {
final String instanceKey = (String) _parameter.get(ParameterValues.OTHERS);
Instance instance = null;
if (instanceKey != null) {
final boolean revision = instanceKey.contains(SEPERATOR_REVISION);
if (instanceKey.indexOf(SEPERATOR_INSTANCE) < 0 && !revision) {
instance = Instance.get(instanceKey);
} else {
final List<Node> nodes = Node.getNodeHirachy(instanceKey);
final Node node = nodes.get(nodes.size() - 1);
Type type = node.getType();
if (revision) {
if (type.getName().equals(TYPE_NODEDIRECTORY)) {
type = Type.get(TYPE_NODEDIRECTORYREV);
} else {
type = Type.get(TYPE_NODEFILEREV);
}
}
instance = Instance.get(type, node.getId(), instanceKey);
}
}
final Return ret = new Return();
ret.put(ReturnValues.VALUES, instance);
return ret;
}
public Return removeNode(final Parameter _parameter) throws EFapsException {
final Instance instance = _parameter.getInstance();
final String[] instanceKeys
= (String[]) _parameter.get(ParameterValues.OTHERS);
if (instanceKeys != null) {
final Node node;
if ("eArchive_Repository".equals(instance.getType().getName())) {
node = Node.getRootNodeFromDB(new Repository(instance));
} else {
node = Node.getNodeFromDB(instance.getId(), instance.getKey());
}
node.deleteChildren(instanceKeys);
}
return new Return();
}
public Return checkout(final Parameter _parameter) throws EFapsException {
final Return ret = new Return();
final Instance instance = _parameter.getInstance();
final Node node = Node.getNodeFromDB(instance.getId(), instance.getKey());
final Checkout checkout = new Checkout(Instance.get(Type.get(TYPE_FILE),
node.getFileId()));
File file = null;
try {
checkout.preprocess();
file = File.createTempFile(checkout.getFileName(), ".txt");
final FileOutputStream stream = new FileOutputStream(file);
checkout.execute(stream);
} catch (final Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
ret.put(ReturnValues.VALUES, file);
return ret;
}
}
| module/earchive/src/main/efaps/ESJP/org/efaps/esjp/earchive/node/NodeUI.java | /*
* Copyright 2003 - 2009 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Revision: $Rev$
* Last Changed: $Date$
* Last Changed By: $Author$
*/
package org.efaps.esjp.earchive.node;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.efaps.admin.datamodel.Type;
import org.efaps.admin.event.Parameter;
import org.efaps.admin.event.Return;
import org.efaps.admin.event.Parameter.ParameterValues;
import org.efaps.admin.event.Return.ReturnValues;
import org.efaps.admin.program.esjp.EFapsRevision;
import org.efaps.admin.program.esjp.EFapsUUID;
import org.efaps.db.Checkin;
import org.efaps.db.Checkout;
import org.efaps.db.Context;
import org.efaps.db.Instance;
import org.efaps.db.SearchQuery;
import org.efaps.esjp.earchive.NamesInterface;
import org.efaps.esjp.earchive.repository.Repository;
import org.efaps.util.EFapsException;
/**
* TODO comment!
*
* @author jmox
* @version $Id$
*/
@EFapsUUID("d6cb382d-a8e8-4b35-9d28-6b344c53f676")
@EFapsRevision("$Rev$")
public class NodeUI implements NamesInterface {
public Return getTableUI(final Parameter _parameter) throws EFapsException {
final Return ret = new Return();
final Instance instance = _parameter.getInstance();
Node node = null;
if ("eArchive_Repository".equals(instance.getType().getName())) {
node = Node.getRootNodeFromDB(new Repository(instance));
}
final String parentInstanceKey;
final long parentId;
String revision = null;
if (node == null) {
parentId = instance.getId();
final String keyTmp = instance.getKey();
if (keyTmp.contains(SEPERATOR_REVISION)) {
final int pos = keyTmp.indexOf(SEPERATOR_REVISION);
revision = keyTmp.substring(pos);
parentInstanceKey = keyTmp.substring(0, pos);
} else {
parentInstanceKey = keyTmp;
}
} else {
parentId = node.getId();
parentInstanceKey = node.getHistoryId() + SEPERATOR_IDS + node.getCopyId();
}
final SearchQuery query = new SearchQuery();
query.setQueryTypes("eArchive_Node2NodeView");
query.setExpandChildTypes(true);
query.addWhereExprEqValue("Parent", parentId);
query.addSelect("NodeType");
query.addSelect("Child");
query.addSelect("HistoryId");
query.addSelect("CopyId");
query.execute();
final List<List<Instance>> list = new ArrayList<List<Instance>>();
while (query.next()) {
final List<Instance> instances = new ArrayList<Instance>(1);
final StringBuilder instanceKey = new StringBuilder()
.append(parentInstanceKey).append(SEPERATOR_INSTANCE)
.append(query.get("HistoryId")).append(SEPERATOR_IDS).append(query.get("CopyId"))
.append(revision != null ? revision : "");
Type type = Type.get((Long) query.get("NodeType"));
if (revision != null) {
if (type.getName().equals(TYPE_NODEDIRECTORY)) {
type = Type.get(TYPE_NODEDIRECTORYREV);
} else {
type = Type.get(TYPE_NODEFILEREV);
}
}
instances.add(Instance.get(type ,
(Long) query.get("Child"),
instanceKey.toString()));
list.add(instances);
}
ret.put(ReturnValues.VALUES, list);
return ret;
}
public Return getRevisionTableUI(final Parameter _parameter)
throws EFapsException {
final Return ret = new Return();
final Instance instance = _parameter.getInstance();
final Node node;
final String instanceKey;
if ("eArchive_Repository".equals(instance.getType().getName())) {
node = Node.getRootNodeFromDB(new Repository(instance));
instanceKey = node.getHistoryId() + SEPERATOR_INSTANCE + node.getCopyId();
} else {
node = Node.getNodeFromDB(instance.getId(), instance.getKey());
instanceKey = instance.getKey();
}
final SearchQuery query = new SearchQuery();
query.setQueryTypes(TYPE_NODEABSTRACTREV);
query.setExpandChildTypes(true);
query.addWhereExprEqValue(TYPE_NODEABSTRACTREV_A_HISTORYID,
node.getHistoryId());
query.addWhereExprEqValue(TYPE_NODEABSTRACTREV_A_COPYID, node.getCopyId());
query.addSelect(TYPE_NODEABSTRACTREV_A_TYPE);
query.addSelect(TYPE_NODEABSTRACTREV_A_REVISION);
query.addSelect(TYPE_NODEABSTRACTREV_A_ID);
query.execute();
final List<List<Instance>> list = new ArrayList<List<Instance>>();
while (query.next()) {
final List<Instance> instances = new ArrayList<Instance>(1);
final StringBuilder keyBldr = new StringBuilder()
.append(instanceKey).append(SEPERATOR_REVISION)
.append(query.get(TYPE_NODEABSTRACTREV_A_REVISION));
instances.add(Instance.get((Type) query.get(TYPE_NODEABSTRACTREV_A_TYPE),
(Long) query.get(TYPE_NODEABSTRACTREV_A_ID),
keyBldr.toString()));
list.add(instances);
}
ret.put(ReturnValues.VALUES, list);
return ret;
}
public Return createDirectory(final Parameter _parameter)
throws EFapsException {
final String name = _parameter.getParameterValue("name");
final Instance instance = _parameter.getInstance();
final Node parentNode;
if ("eArchive_Repository".equals(instance.getType().getName())) {
parentNode = Node.getRootNodeFromDB(new Repository(instance));
} else {
parentNode = Node.getNodeFromDB(instance.getId(), instance.getKey());
}
final Node newDir = Node.createNewNode(name, Node.TYPE_NODEDIRECTORY);
newDir.connect2Parent(parentNode);
return new Return();
}
public Return createFile(final Parameter _parameter)
throws EFapsException {
final String name = _parameter.getParameterValue("name");
final Instance instance = _parameter.getInstance();
final Node node;
if ("eArchive_Repository".equals(instance.getType().getName())) {
node = Node.getRootNodeFromDB(new Repository(instance));
} else {
node = Node.getNodeFromDB(instance.getId(), instance.getKey());
}
final Node newFile = Node.createNewNode(name, Node.TYPE_NODEFILE);
newFile.connect2Parent(node);
final Instance fileInstance = Instance.get(Type.get(TYPE_FILE),
newFile.getFileId());
final Context.FileParameter fileItem =
Context.getThreadContext().getFileParameters().get("upload");
final Checkin checkin = new Checkin(fileInstance);
try {
checkin.execute(fileItem.getName(), fileItem.getInputStream(),
(int) fileItem.getSize());
} catch (final IOException e) {
throw new EFapsException(this.getClass(), "execute", e, _parameter);
}
return new Return();
}
public Return rename(final Parameter _parameter)
throws EFapsException {
final String name = _parameter.getParameterValue("name");
final Instance instance = _parameter.getInstance();
final Node node = Node.getNodeFromDB(instance.getId(), instance.getKey());
node.rename(name);
return new Return();
}
public Return getInstance(final Parameter _parameter) throws EFapsException {
final String instanceKey = (String) _parameter.get(ParameterValues.OTHERS);
Instance instance = null;
if (instanceKey != null) {
final boolean revision = instanceKey.contains(SEPERATOR_REVISION);
if (instanceKey.indexOf(SEPERATOR_INSTANCE) < 0 && !revision) {
instance = Instance.get(instanceKey);
} else {
final List<Node> nodes = Node.getNodeHirachy(instanceKey);
final Node node = nodes.get(nodes.size() - 1);
Type type = node.getType();
if (revision) {
if (type.getName().equals(TYPE_NODEDIRECTORY)) {
type = Type.get(TYPE_NODEDIRECTORYREV);
} else {
type = Type.get(TYPE_NODEFILEREV);
}
}
instance = Instance.get(type, node.getId(), instanceKey);
}
}
final Return ret = new Return();
ret.put(ReturnValues.VALUES, instance);
return ret;
}
public Return removeNode(final Parameter _parameter) throws EFapsException {
final Instance instance = _parameter.getInstance();
final String[] instanceKeys
= (String[]) _parameter.get(ParameterValues.OTHERS);
if (instanceKeys != null) {
final Node node;
if ("eArchive_Repository".equals(instance.getType().getName())) {
node = Node.getRootNodeFromDB(new Repository(instance));
} else {
node = Node.getNodeFromDB(instance.getId(), instance.getKey());
}
node.deleteChildren(instanceKeys);
}
return new Return();
}
public Return checkout(final Parameter _parameter) throws EFapsException {
final Return ret = new Return();
final Instance instance = _parameter.getInstance();
final Node node = Node.getNodeFromDB(instance.getId(), instance.getKey());
final Checkout checkout = new Checkout(Instance.get(Type.get(TYPE_FILE),
node.getFileId()));
File file = null;
try {
checkout.preprocess();
file = File.createTempFile(checkout.getFileName(), ".txt");
final FileOutputStream stream = new FileOutputStream(file);
checkout.execute(stream);
} catch (final Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
ret.put(ReturnValues.VALUES, file);
return ret;
}
}
| - BugFix: wrong seperator was used
git-svn-id: 4b3b87045ec33e1a2f7ddff44705baa56df11711@2258 fee104cc-1dfa-8c0f-632d-d3b7e6b59fb0
| module/earchive/src/main/efaps/ESJP/org/efaps/esjp/earchive/node/NodeUI.java | - BugFix: wrong seperator was used |
|
Java | apache-2.0 | 5fcfbdd54fe8987ad2af86dcec09ddcacd5038f9 | 0 | bhecquet/seleniumRobot,bhecquet/seleniumRobot,bhecquet/seleniumRobot | package com.seleniumtests.core.runner;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.testng.IInvokedMethod;
import org.testng.IInvokedMethodListener2;
import org.testng.IResultMap;
import org.testng.ISuite;
import org.testng.ISuiteListener;
import org.testng.ITestContext;
import org.testng.ITestListener;
import org.testng.ITestNGMethod;
import org.testng.ITestResult;
import org.testng.Reporter;
import org.testng.internal.ResultMap;
import org.testng.internal.TestResult;
import com.mashape.unirest.http.Unirest;
import com.seleniumtests.core.SeleniumTestsContextManager;
import com.seleniumtests.core.TearDownService;
import com.seleniumtests.core.testretry.TestRetryAnalyzer;
import com.seleniumtests.driver.WebUIDriver;
import com.seleniumtests.driver.screenshots.ScreenShot;
import com.seleniumtests.driver.screenshots.ScreenshotUtil;
import com.seleniumtests.reporter.CommonReporter;
import com.seleniumtests.reporter.TestLogging;
import com.seleniumtests.reporter.TestStep;
import com.seleniumtests.util.logging.SeleniumRobotLogger;
public class SeleniumRobotTestListener implements ITestListener, IInvokedMethodListener2, ISuiteListener {
protected static final Logger logger = SeleniumRobotLogger.getLogger(SeleniumRobotTestListener.class);
private static Map<Thread, Boolean> cucumberTest = Collections.synchronizedMap(new HashMap<>());
private Date start;
private Map<String, Boolean> isRetryHandleNeeded = new HashMap<>();
private Map<String, IResultMap> failedTests = new HashMap<>();
private Map<String, IResultMap> skippedTests = new HashMap<>();
private Map<String, IResultMap> passedTests = new HashMap<>();
private static SeleniumRobotTestListener currentListener;
public SeleniumRobotTestListener() {
currentListener = this;
}
public Map<String, Boolean> getIsRetryHandleNeeded() {
return isRetryHandleNeeded;
}
protected String buildMethodSignature(final Method method) {
return method.getDeclaringClass().getCanonicalName() + "." + method.getName() + "()";
}
@Override
public void onTestStart(ITestResult result) {
}
@Override
public void onTestSuccess(ITestResult result) {
}
@Override
public synchronized void onTestFailure(ITestResult testResult) {
if (testResult.getMethod().getRetryAnalyzer() != null) {
TestRetryAnalyzer testRetryAnalyzer = (TestRetryAnalyzer) testResult.getMethod().getRetryAnalyzer();
// test will be retried
if (testRetryAnalyzer.retryPeek(testResult)) {
testResult.setStatus(ITestResult.SKIP);
Reporter.setCurrentTestResult(null);
} else {
IResultMap rMap = failedTests.get(testResult.getTestContext().getName());
rMap.addResult(testResult, testResult.getMethod());
failedTests.put(testResult.getTestContext().getName(), rMap);
}
logger.info(testResult.getMethod() + " Failed in " + testRetryAnalyzer.getCount() + " times");
isRetryHandleNeeded.put(testResult.getTestContext().getName(), true);
}
}
@Override
public void onTestSkipped(ITestResult result) {
// TODO Auto-generated method stub
}
@Override
public void onTestFailedButWithinSuccessPercentage(ITestResult result) {
// TODO Auto-generated method stub
}
@Override
public void onStart(ITestContext context) {
start = new Date();
SeleniumTestsContextManager.initGlobalContext(context);
SeleniumTestsContextManager.initThreadContext(context, null);
isRetryHandleNeeded.put(context.getName(), false);
failedTests.put(context.getName(), new ResultMap());
skippedTests.put(context.getName(), new ResultMap());
passedTests.put(context.getName(), new ResultMap());
}
@Override
public void onFinish(ITestContext context) {
if (isRetryHandleNeeded.get(context.getName())) {
removeIncorrectlySkippedTests(context, failedTests.get(context.getName()));
removeFailedTestsInTestNG(context);
} else {
failedTests.put(context.getName(), context.getFailedTests());
skippedTests.put(context.getName(), context.getSkippedTests());
passedTests.put(context.getName(), context.getPassedTests());
}
}
@Override
public void beforeInvocation(IInvokedMethod method, ITestResult testResult) {
}
@Override
public void afterInvocation(IInvokedMethod method, ITestResult testResult) {
}
@Override
public void beforeInvocation(IInvokedMethod method, ITestResult testResult, ITestContext context) {
TestLogging.setCurrentTestResult(testResult);
if (method.isTestMethod()) {
if (SeleniumRobotTestPlan.isCucumberTest()) {
testResult.setAttribute(SeleniumRobotLogger.METHOD_NAME, testResult.getParameters()[0].toString());
logger.info(SeleniumRobotLogger.START_TEST_PATTERN + testResult.getParameters()[0].toString());
SeleniumTestsContextManager.initThreadContext(context, testResult.getParameters()[0].toString());
SeleniumTestsContextManager.getThreadContext().setTestMethodSignature(testResult.getParameters()[0].toString());
} else {
testResult.setAttribute(SeleniumRobotLogger.METHOD_NAME, method.getTestMethod().getMethodName());
logger.info(SeleniumRobotLogger.START_TEST_PATTERN + method.getTestMethod().getMethodName());
SeleniumTestsContextManager.initThreadContext(context, method.getTestMethod().getMethodName());
SeleniumTestsContextManager.getThreadContext().setTestMethodSignature(
buildMethodSignature(method.getTestMethod().getConstructorOrMethod().getMethod()));
}
if (testResult.getMethod().getRetryAnalyzer() == null) {
testResult.getMethod().setRetryAnalyzer(new TestRetryAnalyzer());
}
}
}
@Override
public void afterInvocation(IInvokedMethod method, ITestResult testResult, ITestContext context) {
Reporter.setCurrentTestResult(testResult);
if (method.isTestMethod()) {
List<TearDownService> serviceList = SeleniumTestsContextManager.getThreadContext().getTearDownServices();
if (serviceList != null && !serviceList.isEmpty()) {
for (TearDownService service : serviceList) {
service.tearDown();
}
}
logger.info(SeleniumRobotLogger.END_TEST_PATTERN + testResult.getAttribute(SeleniumRobotLogger.METHOD_NAME));
Reporter.setCurrentTestResult(testResult);
// Handle Soft CustomAssertion
if (method.isTestMethod()) {
changeTestResult(testResult);
}
if (testResult.getThrowable() != null) {
logger.error(testResult.getThrowable().getMessage());
}
// capture snap shot at the end of the test
logLastStep(testResult);
}
}
@Override
public void onStart(ISuite suite) {
SeleniumRobotLogger.updateLogger(SeleniumTestsContextManager.getGlobalContext().getOutputDirectory(),
SeleniumTestsContextManager.getGlobalContext().getDefaultOutputDirectory());
SeleniumTestsContextManager.generateApplicationPath(suite.getXmlSuite());
logger.info(String.format("Application %s version: %s", SeleniumTestsContextManager.getApplicationName(), SeleniumTestsContextManager.getApplicationVersion()));
logger.info("Core version: " + SeleniumTestsContextManager.getCoreVersion());
}
@Override
public void onFinish(ISuite suite) {
if (start != null) {
logger.info("Test Suite Execution Time: " + (new Date().getTime() - start.getTime()) / 1000 / 60 + " minutes.");
} else {
logger.warn("No test executed");
}
try {
SeleniumRobotLogger.parseLogFile();
} catch (IOException e) {
logger.error("cannot read log file", e);
}
try {
Unirest.shutdown();
} catch (IOException e) {
logger.error("Cannot stop unirest", e);
}
}
/**
* On test end, will take a snap shot and store it
*/
private void logLastStep(ITestResult testResult) {
TestStep tearDownStep = new TestStep("Test end");
TestLogging.setCurrentRootTestStep(tearDownStep);
TestLogging.log(String.format("Test is %s", testResult.isSuccess() ? "OK": "KO with error: " + testResult.getThrowable().getMessage()));
if (WebUIDriver.getWebDriver(false) != null) {
for (ScreenShot screenshot: new ScreenshotUtil().captureWebPageSnapshots(true)) {
TestLogging.logScreenshot(screenshot);
}
}
TestLogging.logTestStep(tearDownStep);
WebUIDriver.cleanUp();
}
/**
* In case test result is SUCCESS but some softAssertions were raised, change test result to
* FAILED
*
* @param result
*/
public void changeTestResult(final ITestResult result) {
List<Throwable> verificationFailures = SeleniumTestsContextManager.getThreadContext().getVerificationFailures(Reporter.getCurrentTestResult());
int size = verificationFailures.size();
if (size == 0 || result.getStatus() == TestResult.FAILURE) {
return;
}
result.setStatus(TestResult.FAILURE);
if (size == 1) {
result.setThrowable(verificationFailures.get(0));
} else {
StringBuilder stackString = new StringBuilder("!!! Many Test Failures (").append(size).append(")\n\n");
for (int i = 0; i < size - 1; i++) {
CommonReporter.generateTheStackTrace(verificationFailures.get(i), String.format("Failure %d of %d%n", i + 1, size), stackString);
}
Throwable last = verificationFailures.get(size - 1);
stackString.append(String.format("%n.%nFailure %d of %d%n", size, size));
stackString.append(last.toString());
// set merged throwable
Throwable merged = new AssertionError(stackString.toString());
merged.setStackTrace(last.getStackTrace());
result.setThrowable(merged);
}
// move test for passedTests to failedTests if test is not already in failed tests
if (result.getTestContext().getPassedTests().getAllMethods().contains(result.getMethod())) {
result.getTestContext().getPassedTests().removeResult(result);
result.getTestContext().getFailedTests().addResult(result, result.getMethod());
}
}
/**
* Remove retrying failed test cases from skipped test cases.
*
* @param tc
* @param map
*
* @return
*/
private void removeIncorrectlySkippedTests(final ITestContext tc, final IResultMap map) {
List<ITestNGMethod> failsToRemove = new ArrayList<>();
IResultMap returnValue = tc.getSkippedTests();
for (ITestResult result : returnValue.getAllResults()) {
for (ITestResult resultToCheck : map.getAllResults()) {
if (resultToCheck.getMethod().equals(result.getMethod())) {
failsToRemove.add(resultToCheck.getMethod());
break;
}
}
for (ITestResult resultToCheck : tc.getPassedTests().getAllResults()) {
if (resultToCheck.getMethod().equals(result.getMethod())) {
failsToRemove.add(resultToCheck.getMethod());
break;
}
}
}
for (ITestNGMethod method : failsToRemove) {
returnValue.removeResult(method);
}
skippedTests.put(tc.getName(), tc.getSkippedTests());
}
/**
* Remove failed test cases in TestNG.
*
* @param tc
*
* @return
*/
private void removeFailedTestsInTestNG(final ITestContext tc) {
IResultMap returnValue = tc.getFailedTests();
ResultMap removeMap = new ResultMap();
for (ITestResult result : returnValue.getAllResults()) {
boolean isFailed = false;
for (ITestResult resultToCheck : failedTests.get(tc.getName()).getAllResults()) {
if (result.getMethod().equals(resultToCheck.getMethod())
&& result.getEndMillis() == resultToCheck.getEndMillis()) {
isFailed = true;
break;
}
}
if (!isFailed) {
logger.info("Removed failed cases:" + result.getMethod().getMethodName());
removeMap.addResult(result, result.getMethod());
}
}
for (ITestResult result : removeMap.getAllResults()) {
ITestResult removeResult = null;
for (ITestResult resultToCheck : returnValue.getAllResults()) {
if (result.getMethod().equals(resultToCheck.getMethod())
&& result.getEndMillis() == resultToCheck.getEndMillis()) {
removeResult = resultToCheck;
break;
}
}
if (removeResult != null) {
returnValue.getAllResults().remove(removeResult);
}
}
}
}
| core/src/main/java/com/seleniumtests/core/runner/SeleniumRobotTestListener.java | package com.seleniumtests.core.runner;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.testng.IInvokedMethod;
import org.testng.IInvokedMethodListener2;
import org.testng.IResultMap;
import org.testng.ISuite;
import org.testng.ISuiteListener;
import org.testng.ITestContext;
import org.testng.ITestListener;
import org.testng.ITestNGMethod;
import org.testng.ITestResult;
import org.testng.Reporter;
import org.testng.internal.ResultMap;
import org.testng.internal.TestResult;
import com.mashape.unirest.http.Unirest;
import com.seleniumtests.core.SeleniumTestsContextManager;
import com.seleniumtests.core.TearDownService;
import com.seleniumtests.core.testretry.TestRetryAnalyzer;
import com.seleniumtests.driver.WebUIDriver;
import com.seleniumtests.driver.screenshots.ScreenShot;
import com.seleniumtests.driver.screenshots.ScreenshotUtil;
import com.seleniumtests.reporter.CommonReporter;
import com.seleniumtests.reporter.TestLogging;
import com.seleniumtests.reporter.TestStep;
import com.seleniumtests.util.logging.SeleniumRobotLogger;
public class SeleniumRobotTestListener implements ITestListener, IInvokedMethodListener2, ISuiteListener {
protected static final Logger logger = SeleniumRobotLogger.getLogger(SeleniumRobotTestListener.class);
private static Map<Thread, Boolean> cucumberTest = Collections.synchronizedMap(new HashMap<>());
private Date start;
private Map<String, Boolean> isRetryHandleNeeded = new HashMap<>();
private Map<String, IResultMap> failedTests = new HashMap<>();
private Map<String, IResultMap> skippedTests = new HashMap<>();
private Map<String, IResultMap> passedTests = new HashMap<>();
private static SeleniumRobotTestListener currentListener;
public SeleniumRobotTestListener() {
currentListener = this;
}
public Map<String, Boolean> getIsRetryHandleNeeded() {
return isRetryHandleNeeded;
}
protected String buildMethodSignature(final Method method) {
return method.getDeclaringClass().getCanonicalName() + "." + method.getName() + "()";
}
@Override
public void onTestStart(ITestResult result) {
// TODO Auto-generated method stub
}
@Override
public void onTestSuccess(ITestResult result) {
// capture snap shot at the end of the test
logLastStep(result);
}
@Override
public synchronized void onTestFailure(ITestResult testResult) {
if (testResult.getMethod().getRetryAnalyzer() != null) {
TestRetryAnalyzer testRetryAnalyzer = (TestRetryAnalyzer) testResult.getMethod().getRetryAnalyzer();
// test will be retried
if (testRetryAnalyzer.retryPeek(testResult)) {
testResult.setStatus(ITestResult.SKIP);
Reporter.setCurrentTestResult(null);
} else {
IResultMap rMap = failedTests.get(testResult.getTestContext().getName());
rMap.addResult(testResult, testResult.getMethod());
failedTests.put(testResult.getTestContext().getName(), rMap);
}
logger.info(testResult.getMethod() + " Failed in " + testRetryAnalyzer.getCount() + " times");
isRetryHandleNeeded.put(testResult.getTestContext().getName(), true);
}
// capture snap shot
logLastStep(testResult);
}
@Override
public void onTestSkipped(ITestResult result) {
// TODO Auto-generated method stub
}
@Override
public void onTestFailedButWithinSuccessPercentage(ITestResult result) {
// TODO Auto-generated method stub
}
@Override
public void onStart(ITestContext context) {
start = new Date();
SeleniumTestsContextManager.initGlobalContext(context);
SeleniumTestsContextManager.initThreadContext(context, null);
isRetryHandleNeeded.put(context.getName(), false);
failedTests.put(context.getName(), new ResultMap());
skippedTests.put(context.getName(), new ResultMap());
passedTests.put(context.getName(), new ResultMap());
}
@Override
public void onFinish(ITestContext context) {
if (isRetryHandleNeeded.get(context.getName())) {
removeIncorrectlySkippedTests(context, failedTests.get(context.getName()));
removeFailedTestsInTestNG(context);
} else {
failedTests.put(context.getName(), context.getFailedTests());
skippedTests.put(context.getName(), context.getSkippedTests());
passedTests.put(context.getName(), context.getPassedTests());
}
}
@Override
public void beforeInvocation(IInvokedMethod method, ITestResult testResult) {
}
@Override
public void afterInvocation(IInvokedMethod method, ITestResult testResult) {
}
@Override
public void beforeInvocation(IInvokedMethod method, ITestResult testResult, ITestContext context) {
TestLogging.setCurrentTestResult(testResult);
if (method.isTestMethod()) {
if (SeleniumRobotTestPlan.isCucumberTest()) {
testResult.setAttribute(SeleniumRobotLogger.METHOD_NAME, testResult.getParameters()[0].toString());
logger.info(SeleniumRobotLogger.START_TEST_PATTERN + testResult.getParameters()[0].toString());
SeleniumTestsContextManager.initThreadContext(context, testResult.getParameters()[0].toString());
SeleniumTestsContextManager.getThreadContext().setTestMethodSignature(testResult.getParameters()[0].toString());
} else {
testResult.setAttribute(SeleniumRobotLogger.METHOD_NAME, method.getTestMethod().getMethodName());
logger.info(SeleniumRobotLogger.START_TEST_PATTERN + method.getTestMethod().getMethodName());
SeleniumTestsContextManager.initThreadContext(context, method.getTestMethod().getMethodName());
SeleniumTestsContextManager.getThreadContext().setTestMethodSignature(
buildMethodSignature(method.getTestMethod().getConstructorOrMethod().getMethod()));
}
if (testResult.getMethod().getRetryAnalyzer() == null) {
testResult.getMethod().setRetryAnalyzer(new TestRetryAnalyzer());
}
}
}
@Override
public void afterInvocation(IInvokedMethod method, ITestResult testResult, ITestContext context) {
Reporter.setCurrentTestResult(testResult);
if (method.isTestMethod()) {
List<TearDownService> serviceList = SeleniumTestsContextManager.getThreadContext().getTearDownServices();
if (serviceList != null && !serviceList.isEmpty()) {
for (TearDownService service : serviceList) {
service.tearDown();
}
}
logger.info(SeleniumRobotLogger.END_TEST_PATTERN + testResult.getAttribute(SeleniumRobotLogger.METHOD_NAME));
Reporter.setCurrentTestResult(testResult);
// Handle Soft CustomAssertion
if (method.isTestMethod()) {
changeTestResult(testResult);
}
if (testResult.getThrowable() != null) {
logger.error(testResult.getThrowable().getMessage());
}
}
}
@Override
public void onStart(ISuite suite) {
SeleniumRobotLogger.updateLogger(SeleniumTestsContextManager.getGlobalContext().getOutputDirectory(),
SeleniumTestsContextManager.getGlobalContext().getDefaultOutputDirectory());
SeleniumTestsContextManager.generateApplicationPath(suite.getXmlSuite());
logger.info(String.format("Application %s version: %s", SeleniumTestsContextManager.getApplicationName(), SeleniumTestsContextManager.getApplicationVersion()));
logger.info("Core version: " + SeleniumTestsContextManager.getCoreVersion());
}
@Override
public void onFinish(ISuite suite) {
if (start != null) {
logger.info("Test Suite Execution Time: " + (new Date().getTime() - start.getTime()) / 1000 / 60 + " minutes.");
} else {
logger.warn("No test executed");
}
try {
SeleniumRobotLogger.parseLogFile();
} catch (IOException e) {
logger.error("cannot read log file", e);
}
try {
Unirest.shutdown();
} catch (IOException e) {
logger.error("Cannot stop unirest", e);
}
}
/**
* On test end, will take a snap shot and store it
*/
private void logLastStep(ITestResult testResult) {
TestStep tearDownStep = new TestStep("Test end");
TestLogging.setCurrentRootTestStep(tearDownStep);
TestLogging.log(String.format("Test is %s", testResult.isSuccess() ? "OK": "KO with error: " + testResult.getThrowable().getMessage()));
if (WebUIDriver.getWebDriver(false) != null) {
for (ScreenShot screenshot: new ScreenshotUtil().captureWebPageSnapshots(true)) {
TestLogging.logScreenshot(screenshot);
}
}
TestLogging.logTestStep(tearDownStep);
WebUIDriver.cleanUp();
}
/**
* In case test result is SUCCESS but some softAssertions were raised, change test result to
* FAILED
*
* @param result
*/
public void changeTestResult(final ITestResult result) {
List<Throwable> verificationFailures = SeleniumTestsContextManager.getThreadContext().getVerificationFailures(Reporter.getCurrentTestResult());
int size = verificationFailures.size();
if (size == 0 || result.getStatus() == TestResult.FAILURE) {
return;
}
result.setStatus(TestResult.FAILURE);
if (size == 1) {
result.setThrowable(verificationFailures.get(0));
} else {
StringBuilder stackString = new StringBuilder("!!! Many Test Failures (").append(size).append(")\n\n");
for (int i = 0; i < size - 1; i++) {
CommonReporter.generateTheStackTrace(verificationFailures.get(i), String.format("Failure %d of %d%n", i + 1, size), stackString);
}
Throwable last = verificationFailures.get(size - 1);
stackString.append(String.format("%n.%nFailure %d of %d%n", size, size));
stackString.append(last.toString());
// set merged throwable
Throwable merged = new AssertionError(stackString.toString());
merged.setStackTrace(last.getStackTrace());
result.setThrowable(merged);
}
// move test for passedTests to failedTests if test is not already in failed tests
if (result.getTestContext().getPassedTests().getAllMethods().contains(result.getMethod())) {
result.getTestContext().getPassedTests().removeResult(result);
result.getTestContext().getFailedTests().addResult(result, result.getMethod());
}
}
/**
* Remove retrying failed test cases from skipped test cases.
*
* @param tc
* @param map
*
* @return
*/
private void removeIncorrectlySkippedTests(final ITestContext tc, final IResultMap map) {
List<ITestNGMethod> failsToRemove = new ArrayList<>();
IResultMap returnValue = tc.getSkippedTests();
for (ITestResult result : returnValue.getAllResults()) {
for (ITestResult resultToCheck : map.getAllResults()) {
if (resultToCheck.getMethod().equals(result.getMethod())) {
failsToRemove.add(resultToCheck.getMethod());
break;
}
}
for (ITestResult resultToCheck : tc.getPassedTests().getAllResults()) {
if (resultToCheck.getMethod().equals(result.getMethod())) {
failsToRemove.add(resultToCheck.getMethod());
break;
}
}
}
for (ITestNGMethod method : failsToRemove) {
returnValue.removeResult(method);
}
skippedTests.put(tc.getName(), tc.getSkippedTests());
}
/**
* Remove failed test cases in TestNG.
*
* @param tc
*
* @return
*/
private void removeFailedTestsInTestNG(final ITestContext tc) {
IResultMap returnValue = tc.getFailedTests();
ResultMap removeMap = new ResultMap();
for (ITestResult result : returnValue.getAllResults()) {
boolean isFailed = false;
for (ITestResult resultToCheck : failedTests.get(tc.getName()).getAllResults()) {
if (result.getMethod().equals(resultToCheck.getMethod())
&& result.getEndMillis() == resultToCheck.getEndMillis()) {
isFailed = true;
break;
}
}
if (!isFailed) {
logger.info("Removed failed cases:" + result.getMethod().getMethodName());
removeMap.addResult(result, result.getMethod());
}
}
for (ITestResult result : removeMap.getAllResults()) {
ITestResult removeResult = null;
for (ITestResult resultToCheck : returnValue.getAllResults()) {
if (result.getMethod().equals(resultToCheck.getMethod())
&& result.getEndMillis() == resultToCheck.getEndMillis()) {
removeResult = resultToCheck;
break;
}
}
if (removeResult != null) {
returnValue.getAllResults().remove(removeResult);
}
}
}
}
| issue #73: quit and restart browser when retrying test | core/src/main/java/com/seleniumtests/core/runner/SeleniumRobotTestListener.java | issue #73: quit and restart browser when retrying test |
|
Java | apache-2.0 | 2569c1ef2aa9d71b0a88bdb9d51f1bf3a143d58f | 0 | MatthewTamlin/SlidingIntroScreen | /*
* Copyright 2016 Matthew Tamlin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.matthewtamlin.testapp;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.LinearLayout;
import com.matthewtamlin.sliding_intro_screen_library.MultiviewParallaxTransformer;
/**
* Test the activity when a parallax page transformer is set. The activity should show three
* parallax pages, each with front and back images. When scrolled, the front image should scroll
* faster than the other page elements.
*/
public class TestTransformer extends ThreePageTestBase {
private final MultiviewParallaxTransformer transformer = new MultiviewParallaxTransformer();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setPageTransformer(false, transformer);
LinearLayout layout = new LinearLayout(this);
layout.setOrientation(LinearLayout.VERTICAL);
getRootView().addView(layout);
Button addFrontImage = new Button(this);
layout.addView(addFrontImage);
addFrontImage.setText("Add front image parallax");
addFrontImage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
transformer.withParallaxView(R.id.page_fragment_imageHolderFront, 0.5f);
}
});
Button addBackImage = new Button(this);
layout.addView(addBackImage);
addBackImage.setText("Add back image parallax");
addBackImage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
transformer.withParallaxView(R.id.page_fragment_imageHolderBack, 1f);
}
});
Button removeFrontImage = new Button(this);
layout.addView(removeFrontImage);
removeFrontImage.setText("Remove front image parallax");
removeFrontImage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
transformer.withoutParallaxView(R.id.page_fragment_imageHolderFront);
}
});
Button removeBackImage = new Button(this);
layout.addView(removeBackImage);
removeBackImage.setText("Remove back image parallax");
removeBackImage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
transformer.withoutParallaxView(R.id.page_fragment_imageHolderBack);
}
});
}
} | testapp/src/main/java/com/matthewtamlin/testapp/TestTransformer.java | /*
* Copyright 2016 Matthew Tamlin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.matthewtamlin.testapp;
import android.os.Bundle;
import com.matthewtamlin.sliding_intro_screen_library.MultilayerParallaxTransformer;
/**
* Test the activity when a parallax page transformer is set. The activity should show three
* parallax pages, each with front and back images. When scrolled, the front image should scroll
* faster than the other page elements.
*/
public class TestTransformer extends ThreePageTestBase {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setPageTransformer(false, MultilayerParallaxTransformer.newInstance()
.withLayer(R.id.page_fragment_imageHolderFront, 0.5f));
}
} | Updated TestTransformer.java
- Now tests new transformer class
| testapp/src/main/java/com/matthewtamlin/testapp/TestTransformer.java | Updated TestTransformer.java |
|
Java | apache-2.0 | 05119f62e544d2b14978a3f6443535bb67305c1a | 0 | wasperf/acmeair,wasperf/acmeair,wasperf/acmeair,wasperf/acmeair | package com.acmeair.mongo;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Logger;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoClientURI;
import com.mongodb.MongoCredential;
import com.mongodb.ServerAddress;
import com.mongodb.client.MongoDatabase;
//import com.mongodb.async.client.*;
public class ConnectionManager implements MongoConstants{
private static AtomicReference<ConnectionManager> connectionManager = new AtomicReference<ConnectionManager>();
private final static Logger logger = Logger.getLogger(ConnectionManager.class.getName());
protected MongoClient mongoClient;
protected MongoDatabase db;
public static ConnectionManager getConnectionManager() {
if (connectionManager.get() == null) {
synchronized (connectionManager) {
if (connectionManager.get() == null) {
connectionManager.set(new ConnectionManager());
}
}
}
return connectionManager.get();
}
private ConnectionManager (){
// Set default client options, and then check if there is a properties file.
String hostname = "localhost";
int port = 27017;
String dbname = "acmeair";
String username = null;
String password = null;
Properties prop = new Properties();
String acmeairProps = System.getenv("ACMEAIR_PROPERTIES");
ServerAddress dbAddress = null;
MongoClientOptions.Builder options = new MongoClientOptions.Builder();
if(acmeairProps != null){
try {
logger.info("Reading mongo.properties file");
prop.load(new FileInputStream(acmeairProps));
if (prop.containsKey("hostname")){
hostname = prop.getProperty("hostname");
}
if (prop.containsKey("port")){
port = Integer.parseInt(prop.getProperty("port"));
}
if (prop.containsKey("dbname")){
dbname = prop.getProperty("dbname");
}
if (prop.containsKey("username")){
username = prop.getProperty("username");
}
if (prop.containsKey("password")){
password = prop.getProperty("password");
}
if (prop.containsKey("connectionsPerHost")){
options.connectionsPerHost(Integer.parseInt(prop.getProperty("connectionsPerHost")));
}
if (prop.containsKey("minConnectionsPerHost")){
options.minConnectionsPerHost(Integer.parseInt(prop.getProperty("minConnectionsPerHost")));
}
if (prop.containsKey("maxWaitTime")){
options.maxWaitTime(Integer.parseInt(prop.getProperty("maxWaitTime")));
}
if (prop.containsKey("connectTimeout")){
options.connectTimeout(Integer.parseInt(prop.getProperty("connectTimeout")));
}
if (prop.containsKey("socketTimeout")){
options.socketTimeout(Integer.parseInt(prop.getProperty("socketTimeout")));
}
if (prop.containsKey("socketKeepAlive")){
options.socketKeepAlive(Boolean.parseBoolean(prop.getProperty("socketKeepAlive")));
}
if (prop.containsKey("sslEnabled")){
options.sslEnabled(Boolean.parseBoolean(prop.getProperty("sslEnabled")));
}
if (prop.containsKey("threadsAllowedToBlockForConnectionMultiplier")){
options.threadsAllowedToBlockForConnectionMultiplier(Integer.parseInt(prop.getProperty("threadsAllowedToBlockForConnectionMultiplier")));
}
}catch (IOException ioe){
logger.severe("Exception when trying to read from the mongo.properties file" + ioe.getMessage());
}
}
MongoClientOptions builtOptions = options.build();
try {
//Check if VCAP_SERVICES exist, and if it does, look up the url from the credentials.
String vcapJSONString = System.getenv("VCAP_SERVICES");
if (vcapJSONString != null) {
logger.info("Reading VCAP_SERVICES");
Object jsonObject = JSONValue.parse(vcapJSONString);
JSONObject vcapServices = (JSONObject)jsonObject;
JSONArray mongoServiceArray =null;
for (Object key : vcapServices.keySet()){
if (key.toString().startsWith("mongo")){
mongoServiceArray = (JSONArray) vcapServices.get(key);
logger.info("Service Type : MongoLAB - " + key.toString());
break;
}
if (key.toString().startsWith("user-provided")){
mongoServiceArray = (JSONArray) vcapServices.get(key);
logger.info("Service Type : MongoDB by Compost - " + key.toString());
break;
}
}
if (mongoServiceArray == null) {
logger.severe("VCAP_SERVICES existed, but a mongo service was not definied.");
} else {
JSONObject mongoService = (JSONObject)mongoServiceArray.get(0);
JSONObject credentials = (JSONObject)mongoService.get("credentials");
String url = (String) credentials.get("url");
logger.fine("service url = " + url);
MongoClientURI mongoURI = new MongoClientURI(url, options);
mongoClient = new MongoClient(mongoURI);
dbname = mongoURI.getDatabase();
}
}else {
//VCAP_SERVICES don't exist, so use the DB resource
dbAddress = new ServerAddress (hostname, port);
// If username & password exists, connect DB with username & password
if ((username == null)||(password == null)){
mongoClient = new MongoClient(dbAddress, builtOptions);
}else {
List<MongoCredential> credentials = new ArrayList<>();
credentials.add(MongoCredential.createCredential(username, dbname, password.toCharArray()));
mongoClient = new MongoClient(dbAddress,credentials, builtOptions);
}
}
db = mongoClient.getDatabase(dbname);
logger.info("#### Mongo DB is created with DB name " + dbname + " ####");
logger.info("#### MongoClient Options ####");
logger.info("maxConnectionsPerHost : "+ builtOptions.getConnectionsPerHost());
logger.info("minConnectionsPerHost : "+ builtOptions.getMinConnectionsPerHost());
logger.info("maxWaitTime : "+ builtOptions.getMaxWaitTime());
logger.info("connectTimeout : "+ builtOptions.getConnectTimeout());
logger.info("socketTimeout : "+ builtOptions.getSocketTimeout());
logger.info("socketKeepAlive : "+ builtOptions.isSocketKeepAlive());
logger.info("sslEnabled : "+ builtOptions.isSslEnabled());
logger.info("threadsAllowedToBlockForConnectionMultiplier : "+ builtOptions.getThreadsAllowedToBlockForConnectionMultiplier());
logger.info("Complete List : "+ builtOptions.toString());
}catch (Exception e) {
logger.severe("Caught Exception : " + e.getMessage() );
}
}
public MongoDatabase getDB(){
return db;
}
}
| acmeair-services-mongo/src/main/java/com/acmeair/mongo/ConnectionManager.java | package com.acmeair.mongo;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Logger;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoClientURI;
import com.mongodb.MongoCredential;
import com.mongodb.ServerAddress;
import com.mongodb.client.MongoDatabase;
//import com.mongodb.async.client.*;
public class ConnectionManager implements MongoConstants{
private static AtomicReference<ConnectionManager> connectionManager = new AtomicReference<ConnectionManager>();
private final static Logger logger = Logger.getLogger(ConnectionManager.class.getName());
protected MongoClient mongoClient;
protected MongoDatabase db;
public static ConnectionManager getConnectionManager() {
if (connectionManager.get() == null) {
synchronized (connectionManager) {
if (connectionManager.get() == null) {
connectionManager.set(new ConnectionManager());
}
}
}
return connectionManager.get();
}
private ConnectionManager (){
// Set default client options, and then check if there is a properties file.
String hostname = "localhost";
int port = 27017;
String dbname = "acmeair";
String username = null;
String password = null;
Properties prop = new Properties();
String acmeairProps = System.getenv("ACMEAIR_PROPERTIES");
ServerAddress dbAddress = null;
MongoClientOptions.Builder options = new MongoClientOptions.Builder();
if(acmeairProps != null){
try {
logger.info("Reading mongo.properties file");
prop.load(new FileInputStream(acmeairProps));
if (prop.containsKey("hostname")){
hostname = prop.getProperty("hostname");
}
if (prop.containsKey("port")){
port = Integer.parseInt(prop.getProperty("port"));
}
if (prop.containsKey("dbname")){
dbname = prop.getProperty("dbname");
}
if (prop.containsKey("username")){
username = prop.getProperty("username");
}
if (prop.containsKey("password")){
password = prop.getProperty("password");
}
if (prop.containsKey("connectionsPerHost")){
options.connectionsPerHost(Integer.parseInt(prop.getProperty("connectionsPerHost")));
}
if (prop.containsKey("minConnectionsPerHost")){
options.minConnectionsPerHost(Integer.parseInt(prop.getProperty("minConnectionsPerHost")));
}
if (prop.containsKey("maxWaitTime")){
options.maxWaitTime(Integer.parseInt(prop.getProperty("maxWaitTime")));
}
if (prop.containsKey("connectTimeout")){
options.connectTimeout(Integer.parseInt(prop.getProperty("connectTimeout")));
}
if (prop.containsKey("socketTimeout")){
options.socketTimeout(Integer.parseInt(prop.getProperty("socketTimeout")));
}
if (prop.containsKey("socketKeepAlive")){
options.socketKeepAlive(Boolean.parseBoolean(prop.getProperty("socketKeepAlive")));
}
if (prop.containsKey("sslEnabled")){
options.sslEnabled(Boolean.parseBoolean(prop.getProperty("sslEnabled")));
}
if (prop.containsKey("threadsAllowedToBlockForConnectionMultiplier")){
options.threadsAllowedToBlockForConnectionMultiplier(Integer.parseInt(prop.getProperty("threadsAllowedToBlockForConnectionMultiplier")));
}
}catch (IOException ioe){
logger.severe("Exception when trying to read from the mongo.properties file" + ioe.getMessage());
}
}
MongoClientOptions builtOptions = options.build();
try {
//Check if VCAP_SERVICES exist, and if it does, look up the url from the credentials.
String vcapJSONString = System.getenv("VCAP_SERVICES");
if (vcapJSONString != null) {
logger.info("Reading VCAP_SERVICES");
Object jsonObject = JSONValue.parse(vcapJSONString);
JSONObject vcapServices = (JSONObject)jsonObject;
JSONArray mongoServiceArray =null;
for (Object key : vcapServices.keySet()){
if (key.toString().startsWith("mongo")){
mongoServiceArray = (JSONArray) vcapServices.get(key);
break;
}
}
if (mongoServiceArray == null) {
logger.severe("VCAP_SERVICES existed, but a mongo service was not definied.");
} else {
JSONObject mongoService = (JSONObject)mongoServiceArray.get(0);
JSONObject credentials = (JSONObject)mongoService.get("credentials");
String url = (String) credentials.get("url");
logger.fine("service url = " + url);
MongoClientURI mongoURI = new MongoClientURI(url, options);
mongoClient = new MongoClient(mongoURI);
dbname = mongoURI.getDatabase();
}
}else {
//VCAP_SERVICES don't exist, so use the DB resource
dbAddress = new ServerAddress (hostname, port);
// If username & password exists, connect DB with username & password
if ((username == null)||(password == null)){
mongoClient = new MongoClient(dbAddress, builtOptions);
}else {
List<MongoCredential> credentials = new ArrayList<>();
credentials.add(MongoCredential.createCredential(username, dbname, password.toCharArray()));
mongoClient = new MongoClient(dbAddress,credentials, builtOptions);
}
}
db = mongoClient.getDatabase(dbname);
logger.info("#### Mongo DB is created with DB name " + dbname + " ####");
logger.info("#### MongoClient Options ####");
logger.info("maxConnectionsPerHost : "+ builtOptions.getConnectionsPerHost());
logger.info("minConnectionsPerHost : "+ builtOptions.getMinConnectionsPerHost());
logger.info("maxWaitTime : "+ builtOptions.getMaxWaitTime());
logger.info("connectTimeout : "+ builtOptions.getConnectTimeout());
logger.info("socketTimeout : "+ builtOptions.getSocketTimeout());
logger.info("socketKeepAlive : "+ builtOptions.isSocketKeepAlive());
logger.info("sslEnabled : "+ builtOptions.isSslEnabled());
logger.info("threadsAllowedToBlockForConnectionMultiplier : "+ builtOptions.getThreadsAllowedToBlockForConnectionMultiplier());
logger.info("Complete List : "+ builtOptions.toString());
}catch (Exception e) {
logger.severe("Caught Exception : " + e.getMessage() );
}
}
public MongoDatabase getDB(){
return db;
}
}
| Updating to support Compose Service (user-provided type) | acmeair-services-mongo/src/main/java/com/acmeair/mongo/ConnectionManager.java | Updating to support Compose Service (user-provided type) |
|
Java | apache-2.0 | 09a31d398f3ce35961efdf53240a482f76f766b0 | 0 | tombolaltd/cordova-plugin-inappbrowser,tombolaltd/cordova-plugin-inappbrowser,tombolaltd/cordova-plugin-inappbrowser | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.cordova.inappbrowser;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.provider.Browser;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.text.InputType;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.WindowManager.LayoutParams;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.webkit.CookieManager;
import android.webkit.CookieSyncManager;
import android.webkit.HttpAuthHandler;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.Config;
import org.apache.cordova.CordovaArgs;
import org.apache.cordova.CordovaHttpAuthHandler;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CordovaWebView;
import org.apache.cordova.LOG;
import org.apache.cordova.PluginManager;
import org.apache.cordova.PluginResult;
import org.json.JSONException;
import org.json.JSONObject;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.StringTokenizer;
@SuppressLint("SetJavaScriptEnabled")
public class InAppBrowser extends CordovaPlugin {
private static final String NULL = "null";
protected static final String LOG_TAG = "InAppBrowser";
private static final String SELF = "_self";
private static final String SYSTEM = "_system";
private static final String EXIT_EVENT = "exit";
private static final String LOCATION = "location";
private static final String ZOOM = "zoom";
private static final String HIDDEN = "hidden";
private static final String LOAD_START_EVENT = "loadstart";
private static final String LOAD_STOP_EVENT = "loadstop";
private static final String LOAD_ERROR_EVENT = "loaderror";
private static final String CLEAR_ALL_CACHE = "clearcache";
private static final String CLEAR_SESSION_CACHE = "clearsessioncache";
private static final String HARDWARE_BACK_BUTTON = "hardwareback";
private static final String MEDIA_PLAYBACK_REQUIRES_USER_ACTION = "mediaPlaybackRequiresUserAction";
private static final String SHOULD_PAUSE = "shouldPauseOnSuspend";
private InAppBrowserDialog dialog;
private WebView inAppWebView;
private EditText edittext;
private CallbackContext callbackContext;
private boolean showLocationBar = true;
private boolean showZoomControls = true;
private boolean openWindowHidden = false;
private boolean clearAllCache = false;
private boolean clearSessionCache = false;
private boolean hadwareBackButton = true;
private boolean mediaPlaybackRequiresUserGesture = false;
private boolean shouldPauseInAppBrowser = false;
boolean reOpenOnNextPageFinished = false;
/**
* Executes the request and returns PluginResult.
*
* @param action the action to execute.
* @param args JSONArry of arguments for the plugin.
* @param callbackContext the callbackContext used when calling back into JavaScript.
* @return A PluginResult object with a status and message.
*/
public boolean execute(String action, CordovaArgs args, final CallbackContext callbackContext) throws JSONException {
if (action.equals("open")) {
this.callbackContext = callbackContext;
final String url = args.getString(0);
String t = args.optString(1);
if (t == null || t.equals("") || t.equals(NULL)) {
t = SELF;
}
final String target = t;
final HashMap<String, Boolean> features = parseFeature(args.optString(2));
LOG.d(LOG_TAG, "target = " + target);
this.cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
String result = "";
// SELF
if (SELF.equals(target)) {
LOG.d(LOG_TAG, "in self");
/* This code exists for compatibility between 3.x and 4.x versions of Cordova.
* Previously the Config class had a static method, isUrlWhitelisted(). That
* responsibility has been moved to the plugins, with an aggregating method in
* PluginManager.
*/
Boolean shouldAllowNavigation = shouldAllowNavigation(url);
// load in webview
if (Boolean.TRUE.equals(shouldAllowNavigation)) {
LOG.d(LOG_TAG, "loading in webview");
webView.loadUrl(url);
}
//Load the dialer
else if (url.startsWith(WebView.SCHEME_TEL))
{
try {
LOG.d(LOG_TAG, "loading in dialer");
Intent intent = new Intent(Intent.ACTION_DIAL);
intent.setData(Uri.parse(url));
cordova.getActivity().startActivity(intent);
} catch (android.content.ActivityNotFoundException e) {
LOG.e(LOG_TAG, "Error dialing " + url + ": " + e.toString());
}
}
// load in InAppBrowser
else {
LOG.d(LOG_TAG, "loading in InAppBrowser");
result = showWebPage(url, features);
}
}
// SYSTEM
else if (SYSTEM.equals(target)) {
LOG.d(LOG_TAG, "in system");
result = openExternal(url);
}
// BLANK - or anything else
else {
LOG.d(LOG_TAG, "in blank");
result = showWebPage(url, features);
}
PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, result);
pluginResult.setKeepCallback(true);
callbackContext.sendPluginResult(pluginResult);
}
});
}
else if (action.equals("close")) {
closeDialog();
}
else if (action.equals("injectScriptCode")) {
String jsWrapper = null;
if (args.getBoolean(1)) {
jsWrapper = String.format("(function(){prompt(JSON.stringify([eval(%%s)]), 'gap-iab://%s')})()", callbackContext.getCallbackId());
}
injectDeferredObject(args.getString(0), jsWrapper);
}
else if (action.equals("injectScriptFile")) {
String jsWrapper;
if (args.getBoolean(1)) {
jsWrapper = String.format("(function(d) { var c = d.createElement('script'); c.src = %%s; c.onload = function() { prompt('', 'gap-iab://%s'); }; d.body.appendChild(c); })(document)", callbackContext.getCallbackId());
} else {
jsWrapper = "(function(d) { var c = d.createElement('script'); c.src = %s; d.body.appendChild(c); })(document)";
}
injectDeferredObject(args.getString(0), jsWrapper);
}
else if (action.equals("injectStyleCode")) {
String jsWrapper;
if (args.getBoolean(1)) {
jsWrapper = String.format("(function(d) { var c = d.createElement('style'); c.innerHTML = %%s; d.body.appendChild(c); prompt('', 'gap-iab://%s');})(document)", callbackContext.getCallbackId());
} else {
jsWrapper = "(function(d) { var c = d.createElement('style'); c.innerHTML = %s; d.body.appendChild(c); })(document)";
}
injectDeferredObject(args.getString(0), jsWrapper);
}
else if (action.equals("injectStyleFile")) {
String jsWrapper;
if (args.getBoolean(1)) {
jsWrapper = String.format("(function(d) { var c = d.createElement('link'); c.rel='stylesheet'; c.type='text/css'; c.href = %%s; d.head.appendChild(c); prompt('', 'gap-iab://%s');})(document)", callbackContext.getCallbackId());
} else {
jsWrapper = "(function(d) { var c = d.createElement('link'); c.rel='stylesheet'; c.type='text/css'; c.href = %s; d.head.appendChild(c); })(document)";
}
injectDeferredObject(args.getString(0), jsWrapper);
}
else if (action.equals("show")) {
showDialogue();
}
else if (action.equals("hide")) {
this.cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
if(dialog != null) {
dialog.hide();
}
}
});
PluginResult pluginResult = new PluginResult(PluginResult.Status.OK);
pluginResult.setKeepCallback(true);
this.callbackContext.sendPluginResult(pluginResult);
}
else if (action.equals("reveal")) {
revealDialog(args);
}
else {
return false;
}
return true;
}
public void revealDialog(CordovaArgs args) throws JSONException {
if(args.isNull(0)) {
showDialogue();
return;
}
final String url = args.getString(0);
if (url == null || url.equals("") || url.equals(NULL)) {
showDialogue();
return;
}
if(!shouldAllowNavigation(url, "shouldAllowRequest") ) {
return;
}
this.cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
if(null == inAppWebView || null == inAppWebView.getUrl()){
return;
}
if(inAppWebView.getUrl().equals(url)){
showDialogue();
}
else {
reOpenOnNextPageFinished = true;
navigate(url);
}
}
});
}
public void showDialogue() {
this.cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
if(dialog != null) {
dialog.show();
}
}
});
PluginResult pluginResult = new PluginResult(PluginResult.Status.OK);
pluginResult.setKeepCallback(true);
this.callbackContext.sendPluginResult(pluginResult);
}
public Boolean shouldAllowNavigation(String url) {
return shouldAllowNavigation(url, "shouldAllowNavigation");
}
public Boolean shouldAllowNavigation(String url, String pluginManagerMethod) {
Boolean shouldAllowNavigation = null;
if (url.startsWith("javascript:")) {
shouldAllowNavigation = true;
}
if (shouldAllowNavigation == null) {
try {
Method iuw = Config.class.getMethod("isUrlWhiteListed", String.class);
shouldAllowNavigation = (Boolean)iuw.invoke(null, url);
} catch (NoSuchMethodException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (IllegalAccessException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (InvocationTargetException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
}
}
if (shouldAllowNavigation == null) {
try {
Method gpm = webView.getClass().getMethod("getPluginManager");
PluginManager pm = (PluginManager)gpm.invoke(webView);
Method san = pm.getClass().getMethod(pluginManagerMethod, String.class);
shouldAllowNavigation = (Boolean)san.invoke(pm, url);
} catch (NoSuchMethodException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (IllegalAccessException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (InvocationTargetException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
}
}
return shouldAllowNavigation;
}
/**
* Called when the view navigates.
*/
@Override
public void onReset() {
closeDialog();
}
/**
* Called when the system is about to start resuming a previous activity.
*/
@Override
public void onPause(boolean multitasking) {
if (shouldPauseInAppBrowser) {
inAppWebView.onPause();
}
}
/**
* Called when the activity will start interacting with the user.
*/
@Override
public void onResume(boolean multitasking) {
if (shouldPauseInAppBrowser) {
inAppWebView.onResume();
}
}
/**
* Called by AccelBroker when listener is to be shut down.
* Stop listener.
*/
public void onDestroy() {
closeDialog();
}
/**
* Inject an object (script or style) into the InAppBrowser WebView.
*
* This is a helper method for the inject{Script|Style}{Code|File} API calls, which
* provides a consistent method for injecting JavaScript code into the document.
*
* If a wrapper string is supplied, then the source string will be JSON-encoded (adding
* quotes) and wrapped using string formatting. (The wrapper string should have a single
* '%s' marker)
*
* @param source The source object (filename or script/style text) to inject into
* the document.
* @param jsWrapper A JavaScript string to wrap the source string in, so that the object
* is properly injected, or null if the source string is JavaScript text
* which should be executed directly.
*/
private void injectDeferredObject(String source, String jsWrapper) {
String scriptToInject;
if (jsWrapper != null) {
org.json.JSONArray jsonEsc = new org.json.JSONArray();
jsonEsc.put(source);
String jsonRepr = jsonEsc.toString();
String jsonSourceString = jsonRepr.substring(1, jsonRepr.length()-1);
scriptToInject = String.format(jsWrapper, jsonSourceString);
} else {
scriptToInject = source;
}
final String finalScriptToInject = scriptToInject;
this.cordova.getActivity().runOnUiThread(new Runnable() {
@SuppressLint("NewApi")
@Override
public void run() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
// This action will have the side-effect of blurring the currently focused element
inAppWebView.loadUrl("javascript:" + finalScriptToInject);
} else {
inAppWebView.evaluateJavascript(finalScriptToInject, null);
}
}
});
}
/**
* Put the list of features into a hash map
*
* @param optString
* @return
*/
private HashMap<String, Boolean> parseFeature(String optString) {
if (optString.equals(NULL)) {
return null;
} else {
HashMap<String, Boolean> map = new HashMap<String, Boolean>();
StringTokenizer features = new StringTokenizer(optString, ",");
StringTokenizer option;
while(features.hasMoreElements()) {
option = new StringTokenizer(features.nextToken(), "=");
if (option.hasMoreElements()) {
String key = option.nextToken();
Boolean value = option.nextToken().equals("no") ? Boolean.FALSE : Boolean.TRUE;
map.put(key, value);
}
}
return map;
}
}
/**
* Display a new browser with the specified URL.
*
* @param url the url to load.
* @return "" if ok, or error message.
*/
public String openExternal(String url) {
try {
Intent intent = null;
intent = new Intent(Intent.ACTION_VIEW);
// Omitting the MIME type for file: URLs causes "No Activity found to handle Intent".
// Adding the MIME type to http: URLs causes them to not be handled by the downloader.
Uri uri = Uri.parse(url);
if ("file".equals(uri.getScheme())) {
intent.setDataAndType(uri, webView.getResourceApi().getMimeType(uri));
} else {
intent.setData(uri);
}
intent.putExtra(Browser.EXTRA_APPLICATION_ID, cordova.getActivity().getPackageName());
this.cordova.getActivity().startActivity(intent);
return "";
} catch (android.content.ActivityNotFoundException e) {
LOG.d(LOG_TAG, "InAppBrowser: Error loading url "+url+":"+ e.toString());
return e.toString();
}
}
/**
* Closes the dialog
*/
public void closeDialog() {
this.cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
final WebView childView = inAppWebView;
// The JS protects against multiple calls, so this should happen only when
// closeDialog() is called by other native code.
if (childView == null) {
return;
}
childView.setWebViewClient(new WebViewClient() {
// NB: wait for about:blank before dismissing
public void onPageFinished(WebView view, String url) {
if (dialog != null) {
dialog.dismiss();
dialog = null;
}
}
});
// NB: From SDK 19: "If you call methods on WebView from any thread
// other than your app's UI thread, it can cause unexpected results."
// http://developer.android.com/guide/webapps/migrating.html#Threads
childView.loadUrl("about:blank");
childView.destroy();
try {
JSONObject obj = new JSONObject();
obj.put("type", EXIT_EVENT);
sendUpdate(obj, false);
} catch (JSONException ex) {
LOG.d(LOG_TAG, "Should never happen");
}
}
});
}
/**
* Checks to see if it is possible to go back one page in history, then does so.
*/
public void goBack() {
if (this.inAppWebView.canGoBack()) {
this.inAppWebView.goBack();
}
}
/**
* Can the web browser go back?
* @return boolean
*/
public boolean canGoBack() {
return this.inAppWebView.canGoBack();
}
/**
* Has the user set the hardware back button to go back
* @return boolean
*/
public boolean hardwareBack() {
return hadwareBackButton;
}
/**
* Checks to see if it is possible to go forward one page in history, then does so.
*/
private void goForward() {
if (this.inAppWebView.canGoForward()) {
this.inAppWebView.goForward();
}
}
/**
* Navigate to the new page
*
* @param url to load
*/
private void navigate(String url) {
InputMethodManager imm = (InputMethodManager)this.cordova.getActivity().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(edittext.getWindowToken(), 0);
if (!url.startsWith("http") && !url.startsWith("file:")) {
this.inAppWebView.loadUrl("http://" + url);
} else {
this.inAppWebView.loadUrl(url);
}
this.inAppWebView.requestFocus();
}
/**
* Should we show the location bar?
*
* @return boolean
*/
private boolean getShowLocationBar() {
return this.showLocationBar;
}
private InAppBrowser getInAppBrowser(){
return this;
}
/**
* Display a new browser with the specified URL.
*
* @param url the url to load.
* @param features jsonObject
*/
public String showWebPage(final String url, HashMap<String, Boolean> features) {
// Determine if we should hide the location bar.
showLocationBar = true;
showZoomControls = true;
openWindowHidden = false;
mediaPlaybackRequiresUserGesture = false;
if (features != null) {
Boolean show = features.get(LOCATION);
if (show != null) {
showLocationBar = show.booleanValue();
}
Boolean zoom = features.get(ZOOM);
if (zoom != null) {
showZoomControls = zoom.booleanValue();
}
Boolean hidden = features.get(HIDDEN);
if (hidden != null) {
openWindowHidden = hidden.booleanValue();
}
Boolean hardwareBack = features.get(HARDWARE_BACK_BUTTON);
if (hardwareBack != null) {
hadwareBackButton = hardwareBack.booleanValue();
}
Boolean mediaPlayback = features.get(MEDIA_PLAYBACK_REQUIRES_USER_ACTION);
if (mediaPlayback != null) {
mediaPlaybackRequiresUserGesture = mediaPlayback.booleanValue();
}
Boolean cache = features.get(CLEAR_ALL_CACHE);
if (cache != null) {
clearAllCache = cache.booleanValue();
} else {
cache = features.get(CLEAR_SESSION_CACHE);
if (cache != null) {
clearSessionCache = cache.booleanValue();
}
}
Boolean shouldPause = features.get(SHOULD_PAUSE);
if (shouldPause != null) {
shouldPauseInAppBrowser = shouldPause.booleanValue();
}
}
final CordovaWebView thatWebView = this.webView;
// Create dialog in new thread
Runnable runnable = new Runnable() {
/**
* Convert our DIP units to Pixels
*
* @return int
*/
private int dpToPixels(int dipValue) {
int value = (int) TypedValue.applyDimension( TypedValue.COMPLEX_UNIT_DIP,
(float) dipValue,
cordova.getActivity().getResources().getDisplayMetrics()
);
return value;
}
@SuppressLint("NewApi")
public void run() {
// CB-6702 InAppBrowser hangs when opening more than one instance
if (dialog != null) {
dialog.dismiss();
};
// Let's create the main dialog
dialog = new InAppBrowserDialog(cordova.getActivity(), android.R.style.Theme_NoTitleBar);
dialog.getWindow().getAttributes().windowAnimations = android.R.style.Animation_Dialog;
dialog.requestWindowFeature(Window.FEATURE_NO_TITLE);
dialog.setCancelable(true);
dialog.setInAppBroswer(getInAppBrowser());
// Main container layout
LinearLayout main = new LinearLayout(cordova.getActivity());
main.setOrientation(LinearLayout.VERTICAL);
// Toolbar layout
RelativeLayout toolbar = new RelativeLayout(cordova.getActivity());
//Please, no more black!
toolbar.setBackgroundColor(android.graphics.Color.LTGRAY);
toolbar.setLayoutParams(new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, this.dpToPixels(44)));
toolbar.setPadding(this.dpToPixels(2), this.dpToPixels(2), this.dpToPixels(2), this.dpToPixels(2));
toolbar.setHorizontalGravity(Gravity.LEFT);
toolbar.setVerticalGravity(Gravity.TOP);
// Action Button Container layout
RelativeLayout actionButtonContainer = new RelativeLayout(cordova.getActivity());
actionButtonContainer.setLayoutParams(new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
actionButtonContainer.setHorizontalGravity(Gravity.LEFT);
actionButtonContainer.setVerticalGravity(Gravity.CENTER_VERTICAL);
actionButtonContainer.setId(Integer.valueOf(1));
// Back button
ImageButton back = new ImageButton(cordova.getActivity());
RelativeLayout.LayoutParams backLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT);
backLayoutParams.addRule(RelativeLayout.ALIGN_LEFT);
back.setLayoutParams(backLayoutParams);
back.setContentDescription("Back Button");
back.setId(Integer.valueOf(2));
Resources activityRes = cordova.getActivity().getResources();
int backResId = activityRes.getIdentifier("ic_action_previous_item", "drawable", cordova.getActivity().getPackageName());
Drawable backIcon = activityRes.getDrawable(backResId);
if (Build.VERSION.SDK_INT >= 16)
back.setBackground(null);
else
back.setBackgroundDrawable(null);
back.setImageDrawable(backIcon);
back.setScaleType(ImageView.ScaleType.FIT_CENTER);
back.setPadding(0, this.dpToPixels(10), 0, this.dpToPixels(10));
if (Build.VERSION.SDK_INT >= 16)
back.getAdjustViewBounds();
back.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
goBack();
}
});
// Forward button
ImageButton forward = new ImageButton(cordova.getActivity());
RelativeLayout.LayoutParams forwardLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT);
forwardLayoutParams.addRule(RelativeLayout.RIGHT_OF, 2);
forward.setLayoutParams(forwardLayoutParams);
forward.setContentDescription("Forward Button");
forward.setId(Integer.valueOf(3));
int fwdResId = activityRes.getIdentifier("ic_action_next_item", "drawable", cordova.getActivity().getPackageName());
Drawable fwdIcon = activityRes.getDrawable(fwdResId);
if (Build.VERSION.SDK_INT >= 16)
forward.setBackground(null);
else
forward.setBackgroundDrawable(null);
forward.setImageDrawable(fwdIcon);
forward.setScaleType(ImageView.ScaleType.FIT_CENTER);
forward.setPadding(0, this.dpToPixels(10), 0, this.dpToPixels(10));
if (Build.VERSION.SDK_INT >= 16)
forward.getAdjustViewBounds();
forward.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
goForward();
}
});
// Edit Text Box
edittext = new EditText(cordova.getActivity());
RelativeLayout.LayoutParams textLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
textLayoutParams.addRule(RelativeLayout.RIGHT_OF, 1);
textLayoutParams.addRule(RelativeLayout.LEFT_OF, 5);
edittext.setLayoutParams(textLayoutParams);
edittext.setId(Integer.valueOf(4));
edittext.setSingleLine(true);
edittext.setText(url);
edittext.setInputType(InputType.TYPE_TEXT_VARIATION_URI);
edittext.setImeOptions(EditorInfo.IME_ACTION_GO);
edittext.setInputType(InputType.TYPE_NULL); // Will not except input... Makes the text NON-EDITABLE
edittext.setOnKeyListener(new View.OnKeyListener() {
public boolean onKey(View v, int keyCode, KeyEvent event) {
// If the event is a key-down event on the "enter" button
if ((event.getAction() == KeyEvent.ACTION_DOWN) && (keyCode == KeyEvent.KEYCODE_ENTER)) {
navigate(edittext.getText().toString());
return true;
}
return false;
}
});
// Close/Done button
ImageButton close = new ImageButton(cordova.getActivity());
RelativeLayout.LayoutParams closeLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT);
closeLayoutParams.addRule(RelativeLayout.ALIGN_PARENT_RIGHT);
close.setLayoutParams(closeLayoutParams);
forward.setContentDescription("Close Button");
close.setId(Integer.valueOf(5));
int closeResId = activityRes.getIdentifier("ic_action_remove", "drawable", cordova.getActivity().getPackageName());
Drawable closeIcon = activityRes.getDrawable(closeResId);
if (Build.VERSION.SDK_INT >= 16)
close.setBackground(null);
else
close.setBackgroundDrawable(null);
close.setImageDrawable(closeIcon);
close.setScaleType(ImageView.ScaleType.FIT_CENTER);
back.setPadding(0, this.dpToPixels(10), 0, this.dpToPixels(10));
if (Build.VERSION.SDK_INT >= 16)
close.getAdjustViewBounds();
close.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
closeDialog();
}
});
// WebView
inAppWebView = new WebView(cordova.getActivity());
inAppWebView.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
inAppWebView.setId(Integer.valueOf(6));
inAppWebView.setWebChromeClient(new InAppChromeClient(thatWebView));
WebViewClient client = new InAppBrowserClient(thatWebView, edittext);
inAppWebView.setWebViewClient(client);
WebSettings settings = inAppWebView.getSettings();
settings.setJavaScriptEnabled(true);
settings.setJavaScriptCanOpenWindowsAutomatically(true);
settings.setBuiltInZoomControls(showZoomControls);
settings.setPluginState(android.webkit.WebSettings.PluginState.ON);
if(android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) {
settings.setMediaPlaybackRequiresUserGesture(mediaPlaybackRequiresUserGesture);
}
String overrideUserAgent = preferences.getString("OverrideUserAgent", null);
String appendUserAgent = preferences.getString("AppendUserAgent", null);
if (overrideUserAgent != null) {
settings.setUserAgentString(overrideUserAgent);
}
if (appendUserAgent != null) {
settings.setUserAgentString(settings.getUserAgentString() + appendUserAgent);
}
//Toggle whether this is enabled or not!
Bundle appSettings = cordova.getActivity().getIntent().getExtras();
boolean enableDatabase = appSettings == null ? true : appSettings.getBoolean("InAppBrowserStorageEnabled", true);
if (enableDatabase) {
String databasePath = cordova.getActivity().getApplicationContext().getDir("inAppBrowserDB", Context.MODE_PRIVATE).getPath();
settings.setDatabasePath(databasePath);
settings.setDatabaseEnabled(true);
}
settings.setDomStorageEnabled(true);
if (clearAllCache) {
CookieManager.getInstance().removeAllCookie();
} else if (clearSessionCache) {
CookieManager.getInstance().removeSessionCookie();
}
inAppWebView.loadUrl(url);
inAppWebView.setId(Integer.valueOf(6));
inAppWebView.getSettings().setLoadWithOverviewMode(true);
inAppWebView.getSettings().setUseWideViewPort(true);
inAppWebView.requestFocus();
inAppWebView.requestFocusFromTouch();
// Add the back and forward buttons to our action button container layout
actionButtonContainer.addView(back);
actionButtonContainer.addView(forward);
// Add the views to our toolbar
toolbar.addView(actionButtonContainer);
toolbar.addView(edittext);
toolbar.addView(close);
// Don't add the toolbar if its been disabled
if (getShowLocationBar()) {
// Add our toolbar to our main view/layout
main.addView(toolbar);
}
// Add our webview to our main view/layout
main.addView(inAppWebView);
WindowManager.LayoutParams lp = new WindowManager.LayoutParams();
lp.copyFrom(dialog.getWindow().getAttributes());
lp.width = WindowManager.LayoutParams.MATCH_PARENT;
lp.height = WindowManager.LayoutParams.MATCH_PARENT;
dialog.setContentView(main);
dialog.show();
dialog.getWindow().setAttributes(lp);
// the goal of openhidden is to load the url and not display it
// Show() needs to be called to cause the URL to be loaded
if(openWindowHidden) {
dialog.hide();
}
}
};
this.cordova.getActivity().runOnUiThread(runnable);
return "";
}
/**
* Create a new plugin success result and send it back to JavaScript
*
* @param obj a JSONObject contain event payload information
*/
private void sendUpdate(JSONObject obj, boolean keepCallback) {
sendUpdate(obj, keepCallback, PluginResult.Status.OK);
}
/**
* Create a new plugin result and send it back to JavaScript
*
* @param obj a JSONObject contain event payload information
* @param status the status code to return to the JavaScript environment
*/
private void sendUpdate(JSONObject obj, boolean keepCallback, PluginResult.Status status) {
if (callbackContext != null) {
PluginResult result = new PluginResult(status, obj);
result.setKeepCallback(keepCallback);
callbackContext.sendPluginResult(result);
if (!keepCallback) {
callbackContext = null;
}
}
}
/**
* The webview client receives notifications about appView
*/
public class InAppBrowserClient extends WebViewClient {
EditText edittext;
CordovaWebView webView;
/**
* Constructor.
*
* @param webView
* @param mEditText
*/
public InAppBrowserClient(CordovaWebView webView, EditText mEditText) {
this.webView = webView;
this.edittext = mEditText;
}
/**
* Override the URL that should be loaded
*
* This handles a small subset of all the URIs that would be encountered.
*
* @param webView
* @param url
*/
@Override
public boolean shouldOverrideUrlLoading(WebView webView, String url) {
if (url.startsWith(WebView.SCHEME_TEL)) {
try {
Intent intent = new Intent(Intent.ACTION_DIAL);
intent.setData(Uri.parse(url));
cordova.getActivity().startActivity(intent);
return true;
} catch (android.content.ActivityNotFoundException e) {
LOG.e(LOG_TAG, "Error dialing " + url + ": " + e.toString());
}
} else if (url.startsWith("geo:") || url.startsWith(WebView.SCHEME_MAILTO) || url.startsWith("market:") || url.startsWith("intent:")) {
try {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setData(Uri.parse(url));
cordova.getActivity().startActivity(intent);
return true;
} catch (android.content.ActivityNotFoundException e) {
LOG.e(LOG_TAG, "Error with " + url + ": " + e.toString());
}
}
// If sms:5551212?body=This is the message
else if (url.startsWith("sms:")) {
try {
Intent intent = new Intent(Intent.ACTION_VIEW);
// Get address
String address = null;
int parmIndex = url.indexOf('?');
if (parmIndex == -1) {
address = url.substring(4);
} else {
address = url.substring(4, parmIndex);
// If body, then set sms body
Uri uri = Uri.parse(url);
String query = uri.getQuery();
if (query != null) {
if (query.startsWith("body=")) {
intent.putExtra("sms_body", query.substring(5));
}
}
}
intent.setData(Uri.parse("sms:" + address));
intent.putExtra("address", address);
intent.setType("vnd.android-dir/mms-sms");
cordova.getActivity().startActivity(intent);
return true;
} catch (android.content.ActivityNotFoundException e) {
LOG.e(LOG_TAG, "Error sending sms " + url + ":" + e.toString());
}
}
return false;
}
/*
* onPageStarted fires the LOAD_START_EVENT
*
* @param view
* @param url
* @param favicon
*/
@Override
public void onPageStarted(WebView view, String url, Bitmap favicon) {
super.onPageStarted(view, url, favicon);
String newloc = "";
if (url.startsWith("http:") || url.startsWith("https:") || url.startsWith("file:")) {
newloc = url;
}
else
{
// Assume that everything is HTTP at this point, because if we don't specify,
// it really should be. Complain loudly about this!!!
LOG.e(LOG_TAG, "Possible Uncaught/Unknown URI");
newloc = "http://" + url;
}
// Update the UI if we haven't already
if (!newloc.equals(edittext.getText().toString())) {
edittext.setText(newloc);
}
try {
JSONObject obj = new JSONObject();
obj.put("type", LOAD_START_EVENT);
obj.put("url", newloc);
sendUpdate(obj, true);
} catch (JSONException ex) {
LOG.e(LOG_TAG, "URI passed in has caused a JSON error.");
}
}
public void onPageFinished(WebView view, String url) {
super.onPageFinished(view, url);
// CB-10395 InAppBrowser's WebView not storing cookies reliable to local device storage
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
CookieManager.getInstance().flush();
} else {
CookieSyncManager.getInstance().sync();
}
if(reOpenOnNextPageFinished){
reOpenOnNextPageFinished = false;
showDialogue();
}
try {
JSONObject obj = new JSONObject();
obj.put("type", LOAD_STOP_EVENT);
obj.put("url", url);
sendUpdate(obj, true);
} catch (JSONException ex) {
LOG.d(LOG_TAG, "Should never happen");
}
}
public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) {
super.onReceivedError(view, errorCode, description, failingUrl);
try {
JSONObject obj = new JSONObject();
obj.put("type", LOAD_ERROR_EVENT);
obj.put("url", failingUrl);
obj.put("code", errorCode);
obj.put("message", description);
sendUpdate(obj, true, PluginResult.Status.ERROR);
} catch (JSONException ex) {
LOG.d(LOG_TAG, "Should never happen");
}
}
/**
* On received http auth request.
*/
@Override
public void onReceivedHttpAuthRequest(WebView view, HttpAuthHandler handler, String host, String realm) {
// Check if there is some plugin which can resolve this auth challenge
PluginManager pluginManager = null;
try {
Method gpm = webView.getClass().getMethod("getPluginManager");
pluginManager = (PluginManager)gpm.invoke(webView);
} catch (NoSuchMethodException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (IllegalAccessException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (InvocationTargetException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
}
if (pluginManager == null) {
try {
Field pmf = webView.getClass().getField("pluginManager");
pluginManager = (PluginManager)pmf.get(webView);
} catch (NoSuchFieldException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (IllegalAccessException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
}
}
if (pluginManager != null && pluginManager.onReceivedHttpAuthRequest(webView, new CordovaHttpAuthHandler(handler), host, realm)) {
return;
}
// By default handle 401 like we'd normally do!
super.onReceivedHttpAuthRequest(view, handler, host, realm);
}
}
} | src/android/InAppBrowser.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.cordova.inappbrowser;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.provider.Browser;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.text.InputType;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.WindowManager.LayoutParams;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.webkit.CookieManager;
import android.webkit.CookieSyncManager;
import android.webkit.HttpAuthHandler;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.Config;
import org.apache.cordova.CordovaArgs;
import org.apache.cordova.CordovaHttpAuthHandler;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CordovaWebView;
import org.apache.cordova.LOG;
import org.apache.cordova.PluginManager;
import org.apache.cordova.PluginResult;
import org.json.JSONException;
import org.json.JSONObject;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.StringTokenizer;
@SuppressLint("SetJavaScriptEnabled")
public class InAppBrowser extends CordovaPlugin {
private static final String NULL = "null";
protected static final String LOG_TAG = "InAppBrowser";
private static final String SELF = "_self";
private static final String SYSTEM = "_system";
private static final String EXIT_EVENT = "exit";
private static final String LOCATION = "location";
private static final String ZOOM = "zoom";
private static final String HIDDEN = "hidden";
private static final String LOAD_START_EVENT = "loadstart";
private static final String LOAD_STOP_EVENT = "loadstop";
private static final String LOAD_ERROR_EVENT = "loaderror";
private static final String CLEAR_ALL_CACHE = "clearcache";
private static final String CLEAR_SESSION_CACHE = "clearsessioncache";
private static final String HARDWARE_BACK_BUTTON = "hardwareback";
private static final String MEDIA_PLAYBACK_REQUIRES_USER_ACTION = "mediaPlaybackRequiresUserAction";
private static final String SHOULD_PAUSE = "shouldPauseOnSuspend";
private InAppBrowserDialog dialog;
private WebView inAppWebView;
private EditText edittext;
private CallbackContext callbackContext;
private boolean showLocationBar = true;
private boolean showZoomControls = true;
private boolean openWindowHidden = false;
private boolean clearAllCache = false;
private boolean clearSessionCache = false;
private boolean hadwareBackButton = true;
private boolean mediaPlaybackRequiresUserGesture = false;
private boolean shouldPauseInAppBrowser = false;
boolean reOpenOnNextPageFinished = false;
/**
* Executes the request and returns PluginResult.
*
* @param action the action to execute.
* @param args JSONArry of arguments for the plugin.
* @param callbackContext the callbackContext used when calling back into JavaScript.
* @return A PluginResult object with a status and message.
*/
public boolean execute(String action, CordovaArgs args, final CallbackContext callbackContext) throws JSONException {
if (action.equals("open")) {
this.callbackContext = callbackContext;
final String url = args.getString(0);
String t = args.optString(1);
if (t == null || t.equals("") || t.equals(NULL)) {
t = SELF;
}
final String target = t;
final HashMap<String, Boolean> features = parseFeature(args.optString(2));
LOG.d(LOG_TAG, "target = " + target);
this.cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
String result = "";
// SELF
if (SELF.equals(target)) {
LOG.d(LOG_TAG, "in self");
/* This code exists for compatibility between 3.x and 4.x versions of Cordova.
* Previously the Config class had a static method, isUrlWhitelisted(). That
* responsibility has been moved to the plugins, with an aggregating method in
* PluginManager.
*/
Boolean shouldAllowNavigation = shouldAllowNavigation(url);
// load in webview
if (Boolean.TRUE.equals(shouldAllowNavigation)) {
LOG.d(LOG_TAG, "loading in webview");
webView.loadUrl(url);
}
//Load the dialer
else if (url.startsWith(WebView.SCHEME_TEL))
{
try {
LOG.d(LOG_TAG, "loading in dialer");
Intent intent = new Intent(Intent.ACTION_DIAL);
intent.setData(Uri.parse(url));
cordova.getActivity().startActivity(intent);
} catch (android.content.ActivityNotFoundException e) {
LOG.e(LOG_TAG, "Error dialing " + url + ": " + e.toString());
}
}
// load in InAppBrowser
else {
LOG.d(LOG_TAG, "loading in InAppBrowser");
result = showWebPage(url, features);
}
}
// SYSTEM
else if (SYSTEM.equals(target)) {
LOG.d(LOG_TAG, "in system");
result = openExternal(url);
}
// BLANK - or anything else
else {
LOG.d(LOG_TAG, "in blank");
result = showWebPage(url, features);
}
PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, result);
pluginResult.setKeepCallback(true);
callbackContext.sendPluginResult(pluginResult);
}
});
}
else if (action.equals("close")) {
closeDialog();
}
else if (action.equals("injectScriptCode")) {
String jsWrapper = null;
if (args.getBoolean(1)) {
jsWrapper = String.format("(function(){prompt(JSON.stringify([eval(%%s)]), 'gap-iab://%s')})()", callbackContext.getCallbackId());
}
injectDeferredObject(args.getString(0), jsWrapper);
}
else if (action.equals("injectScriptFile")) {
String jsWrapper;
if (args.getBoolean(1)) {
jsWrapper = String.format("(function(d) { var c = d.createElement('script'); c.src = %%s; c.onload = function() { prompt('', 'gap-iab://%s'); }; d.body.appendChild(c); })(document)", callbackContext.getCallbackId());
} else {
jsWrapper = "(function(d) { var c = d.createElement('script'); c.src = %s; d.body.appendChild(c); })(document)";
}
injectDeferredObject(args.getString(0), jsWrapper);
}
else if (action.equals("injectStyleCode")) {
String jsWrapper;
if (args.getBoolean(1)) {
jsWrapper = String.format("(function(d) { var c = d.createElement('style'); c.innerHTML = %%s; d.body.appendChild(c); prompt('', 'gap-iab://%s');})(document)", callbackContext.getCallbackId());
} else {
jsWrapper = "(function(d) { var c = d.createElement('style'); c.innerHTML = %s; d.body.appendChild(c); })(document)";
}
injectDeferredObject(args.getString(0), jsWrapper);
}
else if (action.equals("injectStyleFile")) {
String jsWrapper;
if (args.getBoolean(1)) {
jsWrapper = String.format("(function(d) { var c = d.createElement('link'); c.rel='stylesheet'; c.type='text/css'; c.href = %%s; d.head.appendChild(c); prompt('', 'gap-iab://%s');})(document)", callbackContext.getCallbackId());
} else {
jsWrapper = "(function(d) { var c = d.createElement('link'); c.rel='stylesheet'; c.type='text/css'; c.href = %s; d.head.appendChild(c); })(document)";
}
injectDeferredObject(args.getString(0), jsWrapper);
}
else if (action.equals("show")) {
showDialogue();
}
else if (action.equals("hide")) {
this.cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
if(dialog != null) {
dialog.hide();
}
}
});
PluginResult pluginResult = new PluginResult(PluginResult.Status.OK);
pluginResult.setKeepCallback(true);
this.callbackContext.sendPluginResult(pluginResult);
}
else if (action.equals("reveal")) {
revealDialog(args);
}
else {
return false;
}
return true;
}
public void revealDialog(CordovaArgs args) throws JSONException {
if(args.isNull(0)) {
showDialogue();
return;
}
final String url = args.getString(0);
if (url == null || url.equals("") || url.equals(NULL)) {
showDialogue();
return;
}
if(!shouldAllowNavigation(url, "shouldAllowRequest") ) {
return;
}
this.cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
if(null == inAppWebView || null == inAppWebView.getUrl()){
return;
}
if(inAppWebView.getUrl().equals(url)){
showDialogue();
}
else {
reOpenOnNextPageFinished = true;
navigate(url);
}
}
});
}
public void showDialogue() {
this.cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
if(dialog != null) {
dialog.show();
}
}
});
PluginResult pluginResult = new PluginResult(PluginResult.Status.OK);
pluginResult.setKeepCallback(true);
this.callbackContext.sendPluginResult(pluginResult);
}
// public Boolean shouldAllowNavigation2(String url) {
// Boolean shouldAllowNavigation = null;
// injectDeferredObject(null, "(function(){console.log('checking whitelist')})()");
// if (url.startsWith("javascript:")) {
// injectDeferredObject(null, "(function(){console.log('is JS!!')})()");
// shouldAllowNavigation = true;
// }
// if (shouldAllowNavigation == null) {
// try {
// injectDeferredObject(null, "(function(){console.log('config')})()");
// Method iuw = Config.class.getMethod("isUrlWhiteListed", String.class);
//
// shouldAllowNavigation = (Boolean)iuw.invoke(null, url);
// } catch (NoSuchMethodException e) {
// injectDeferredObject(null, "(function(){console.log('No Method')})()");
// LOG.d(LOG_TAG, e.getLocalizedMessage());
// } catch (IllegalAccessException e) {
// injectDeferredObject(null, "(function(){console.log('Illegal Access')})()");
// LOG.d(LOG_TAG, e.getLocalizedMessage());
// } catch (InvocationTargetException e) {
// injectDeferredObject(null, "(function(){console.log('Invocation target')})()");
// LOG.d(LOG_TAG, e.getLocalizedMessage());
// }
// }
// if (shouldAllowNavigation == null) {
// try {
// injectDeferredObject(null, "(function(){console.log('plugin manager')})()");
// Method gpm = webView.getClass().getMethod("getPluginManager");
// PluginManager pm = (PluginManager)gpm.invoke(webView);
// Method san = pm.getClass().getMethod("shouldAllowRequest", String.class);
// shouldAllowNavigation = (Boolean)san.invoke(pm, url);
// } catch (NoSuchMethodException e) {
// injectDeferredObject(null, "(function(){console.log('No Method')})()");
// LOG.d(LOG_TAG, e.getLocalizedMessage());
// } catch (IllegalAccessException e) {
// injectDeferredObject(null, "(function(){console.log('Illegal Access')})()");
// LOG.d(LOG_TAG, e.getLocalizedMessage());
// } catch (InvocationTargetException e) {
// injectDeferredObject(null, "(function(){console.log('Invocation target')})()");
// LOG.d(LOG_TAG, e.getLocalizedMessage());
// }
// }
// injectDeferredObject(null, "(function(){alert('" + url + "')})()");
// injectDeferredObject(null, "(function(){alert('" + shouldAllowNavigation + "')})()");
// return shouldAllowNavigation;
// }
public Boolean shouldAllowNavigation(String url) {
return shouldAllowNavigation(url, "shouldAllowNavigation");
}
public Boolean shouldAllowNavigation(String url, String pluginManagerMethod) {
Boolean shouldAllowNavigation = null;
if (url.startsWith("javascript:")) {
shouldAllowNavigation = true;
}
if (shouldAllowNavigation == null) {
try {
Method iuw = Config.class.getMethod("isUrlWhiteListed", String.class);
shouldAllowNavigation = (Boolean)iuw.invoke(null, url);
} catch (NoSuchMethodException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (IllegalAccessException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (InvocationTargetException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
}
}
if (shouldAllowNavigation == null) {
try {
Method gpm = webView.getClass().getMethod("getPluginManager");
PluginManager pm = (PluginManager)gpm.invoke(webView);
Method san = pm.getClass().getMethod(pluginManagerMethod, String.class);
shouldAllowNavigation = (Boolean)san.invoke(pm, url);
} catch (NoSuchMethodException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (IllegalAccessException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (InvocationTargetException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
}
}
return shouldAllowNavigation;
}
/**
* Called when the view navigates.
*/
@Override
public void onReset() {
closeDialog();
}
/**
* Called when the system is about to start resuming a previous activity.
*/
@Override
public void onPause(boolean multitasking) {
if (shouldPauseInAppBrowser) {
inAppWebView.onPause();
}
}
/**
* Called when the activity will start interacting with the user.
*/
@Override
public void onResume(boolean multitasking) {
if (shouldPauseInAppBrowser) {
inAppWebView.onResume();
}
}
/**
* Called by AccelBroker when listener is to be shut down.
* Stop listener.
*/
public void onDestroy() {
closeDialog();
}
/**
* Inject an object (script or style) into the InAppBrowser WebView.
*
* This is a helper method for the inject{Script|Style}{Code|File} API calls, which
* provides a consistent method for injecting JavaScript code into the document.
*
* If a wrapper string is supplied, then the source string will be JSON-encoded (adding
* quotes) and wrapped using string formatting. (The wrapper string should have a single
* '%s' marker)
*
* @param source The source object (filename or script/style text) to inject into
* the document.
* @param jsWrapper A JavaScript string to wrap the source string in, so that the object
* is properly injected, or null if the source string is JavaScript text
* which should be executed directly.
*/
private void injectDeferredObject(String source, String jsWrapper) {
String scriptToInject;
if (jsWrapper != null) {
org.json.JSONArray jsonEsc = new org.json.JSONArray();
jsonEsc.put(source);
String jsonRepr = jsonEsc.toString();
String jsonSourceString = jsonRepr.substring(1, jsonRepr.length()-1);
scriptToInject = String.format(jsWrapper, jsonSourceString);
} else {
scriptToInject = source;
}
final String finalScriptToInject = scriptToInject;
this.cordova.getActivity().runOnUiThread(new Runnable() {
@SuppressLint("NewApi")
@Override
public void run() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
// This action will have the side-effect of blurring the currently focused element
inAppWebView.loadUrl("javascript:" + finalScriptToInject);
} else {
inAppWebView.evaluateJavascript(finalScriptToInject, null);
}
}
});
}
/**
* Put the list of features into a hash map
*
* @param optString
* @return
*/
private HashMap<String, Boolean> parseFeature(String optString) {
if (optString.equals(NULL)) {
return null;
} else {
HashMap<String, Boolean> map = new HashMap<String, Boolean>();
StringTokenizer features = new StringTokenizer(optString, ",");
StringTokenizer option;
while(features.hasMoreElements()) {
option = new StringTokenizer(features.nextToken(), "=");
if (option.hasMoreElements()) {
String key = option.nextToken();
Boolean value = option.nextToken().equals("no") ? Boolean.FALSE : Boolean.TRUE;
map.put(key, value);
}
}
return map;
}
}
/**
* Display a new browser with the specified URL.
*
* @param url the url to load.
* @return "" if ok, or error message.
*/
public String openExternal(String url) {
try {
Intent intent = null;
intent = new Intent(Intent.ACTION_VIEW);
// Omitting the MIME type for file: URLs causes "No Activity found to handle Intent".
// Adding the MIME type to http: URLs causes them to not be handled by the downloader.
Uri uri = Uri.parse(url);
if ("file".equals(uri.getScheme())) {
intent.setDataAndType(uri, webView.getResourceApi().getMimeType(uri));
} else {
intent.setData(uri);
}
intent.putExtra(Browser.EXTRA_APPLICATION_ID, cordova.getActivity().getPackageName());
this.cordova.getActivity().startActivity(intent);
return "";
} catch (android.content.ActivityNotFoundException e) {
LOG.d(LOG_TAG, "InAppBrowser: Error loading url "+url+":"+ e.toString());
return e.toString();
}
}
/**
* Closes the dialog
*/
public void closeDialog() {
this.cordova.getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
final WebView childView = inAppWebView;
// The JS protects against multiple calls, so this should happen only when
// closeDialog() is called by other native code.
if (childView == null) {
return;
}
childView.setWebViewClient(new WebViewClient() {
// NB: wait for about:blank before dismissing
public void onPageFinished(WebView view, String url) {
if (dialog != null) {
dialog.dismiss();
dialog = null;
}
}
});
// NB: From SDK 19: "If you call methods on WebView from any thread
// other than your app's UI thread, it can cause unexpected results."
// http://developer.android.com/guide/webapps/migrating.html#Threads
childView.loadUrl("about:blank");
childView.destroy();
try {
JSONObject obj = new JSONObject();
obj.put("type", EXIT_EVENT);
sendUpdate(obj, false);
} catch (JSONException ex) {
LOG.d(LOG_TAG, "Should never happen");
}
}
});
}
/**
* Checks to see if it is possible to go back one page in history, then does so.
*/
public void goBack() {
if (this.inAppWebView.canGoBack()) {
this.inAppWebView.goBack();
}
}
/**
* Can the web browser go back?
* @return boolean
*/
public boolean canGoBack() {
return this.inAppWebView.canGoBack();
}
/**
* Has the user set the hardware back button to go back
* @return boolean
*/
public boolean hardwareBack() {
return hadwareBackButton;
}
/**
* Checks to see if it is possible to go forward one page in history, then does so.
*/
private void goForward() {
if (this.inAppWebView.canGoForward()) {
this.inAppWebView.goForward();
}
}
/**
* Navigate to the new page
*
* @param url to load
*/
private void navigate(String url) {
InputMethodManager imm = (InputMethodManager)this.cordova.getActivity().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(edittext.getWindowToken(), 0);
if (!url.startsWith("http") && !url.startsWith("file:")) {
this.inAppWebView.loadUrl("http://" + url);
} else {
this.inAppWebView.loadUrl(url);
}
this.inAppWebView.requestFocus();
}
/**
* Should we show the location bar?
*
* @return boolean
*/
private boolean getShowLocationBar() {
return this.showLocationBar;
}
private InAppBrowser getInAppBrowser(){
return this;
}
/**
* Display a new browser with the specified URL.
*
* @param url the url to load.
* @param features jsonObject
*/
public String showWebPage(final String url, HashMap<String, Boolean> features) {
// Determine if we should hide the location bar.
showLocationBar = true;
showZoomControls = true;
openWindowHidden = false;
mediaPlaybackRequiresUserGesture = false;
if (features != null) {
Boolean show = features.get(LOCATION);
if (show != null) {
showLocationBar = show.booleanValue();
}
Boolean zoom = features.get(ZOOM);
if (zoom != null) {
showZoomControls = zoom.booleanValue();
}
Boolean hidden = features.get(HIDDEN);
if (hidden != null) {
openWindowHidden = hidden.booleanValue();
}
Boolean hardwareBack = features.get(HARDWARE_BACK_BUTTON);
if (hardwareBack != null) {
hadwareBackButton = hardwareBack.booleanValue();
}
Boolean mediaPlayback = features.get(MEDIA_PLAYBACK_REQUIRES_USER_ACTION);
if (mediaPlayback != null) {
mediaPlaybackRequiresUserGesture = mediaPlayback.booleanValue();
}
Boolean cache = features.get(CLEAR_ALL_CACHE);
if (cache != null) {
clearAllCache = cache.booleanValue();
} else {
cache = features.get(CLEAR_SESSION_CACHE);
if (cache != null) {
clearSessionCache = cache.booleanValue();
}
}
Boolean shouldPause = features.get(SHOULD_PAUSE);
if (shouldPause != null) {
shouldPauseInAppBrowser = shouldPause.booleanValue();
}
}
final CordovaWebView thatWebView = this.webView;
// Create dialog in new thread
Runnable runnable = new Runnable() {
/**
* Convert our DIP units to Pixels
*
* @return int
*/
private int dpToPixels(int dipValue) {
int value = (int) TypedValue.applyDimension( TypedValue.COMPLEX_UNIT_DIP,
(float) dipValue,
cordova.getActivity().getResources().getDisplayMetrics()
);
return value;
}
@SuppressLint("NewApi")
public void run() {
// CB-6702 InAppBrowser hangs when opening more than one instance
if (dialog != null) {
dialog.dismiss();
};
// Let's create the main dialog
dialog = new InAppBrowserDialog(cordova.getActivity(), android.R.style.Theme_NoTitleBar);
dialog.getWindow().getAttributes().windowAnimations = android.R.style.Animation_Dialog;
dialog.requestWindowFeature(Window.FEATURE_NO_TITLE);
dialog.setCancelable(true);
dialog.setInAppBroswer(getInAppBrowser());
// Main container layout
LinearLayout main = new LinearLayout(cordova.getActivity());
main.setOrientation(LinearLayout.VERTICAL);
// Toolbar layout
RelativeLayout toolbar = new RelativeLayout(cordova.getActivity());
//Please, no more black!
toolbar.setBackgroundColor(android.graphics.Color.LTGRAY);
toolbar.setLayoutParams(new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, this.dpToPixels(44)));
toolbar.setPadding(this.dpToPixels(2), this.dpToPixels(2), this.dpToPixels(2), this.dpToPixels(2));
toolbar.setHorizontalGravity(Gravity.LEFT);
toolbar.setVerticalGravity(Gravity.TOP);
// Action Button Container layout
RelativeLayout actionButtonContainer = new RelativeLayout(cordova.getActivity());
actionButtonContainer.setLayoutParams(new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
actionButtonContainer.setHorizontalGravity(Gravity.LEFT);
actionButtonContainer.setVerticalGravity(Gravity.CENTER_VERTICAL);
actionButtonContainer.setId(Integer.valueOf(1));
// Back button
ImageButton back = new ImageButton(cordova.getActivity());
RelativeLayout.LayoutParams backLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT);
backLayoutParams.addRule(RelativeLayout.ALIGN_LEFT);
back.setLayoutParams(backLayoutParams);
back.setContentDescription("Back Button");
back.setId(Integer.valueOf(2));
Resources activityRes = cordova.getActivity().getResources();
int backResId = activityRes.getIdentifier("ic_action_previous_item", "drawable", cordova.getActivity().getPackageName());
Drawable backIcon = activityRes.getDrawable(backResId);
if (Build.VERSION.SDK_INT >= 16)
back.setBackground(null);
else
back.setBackgroundDrawable(null);
back.setImageDrawable(backIcon);
back.setScaleType(ImageView.ScaleType.FIT_CENTER);
back.setPadding(0, this.dpToPixels(10), 0, this.dpToPixels(10));
if (Build.VERSION.SDK_INT >= 16)
back.getAdjustViewBounds();
back.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
goBack();
}
});
// Forward button
ImageButton forward = new ImageButton(cordova.getActivity());
RelativeLayout.LayoutParams forwardLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT);
forwardLayoutParams.addRule(RelativeLayout.RIGHT_OF, 2);
forward.setLayoutParams(forwardLayoutParams);
forward.setContentDescription("Forward Button");
forward.setId(Integer.valueOf(3));
int fwdResId = activityRes.getIdentifier("ic_action_next_item", "drawable", cordova.getActivity().getPackageName());
Drawable fwdIcon = activityRes.getDrawable(fwdResId);
if (Build.VERSION.SDK_INT >= 16)
forward.setBackground(null);
else
forward.setBackgroundDrawable(null);
forward.setImageDrawable(fwdIcon);
forward.setScaleType(ImageView.ScaleType.FIT_CENTER);
forward.setPadding(0, this.dpToPixels(10), 0, this.dpToPixels(10));
if (Build.VERSION.SDK_INT >= 16)
forward.getAdjustViewBounds();
forward.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
goForward();
}
});
// Edit Text Box
edittext = new EditText(cordova.getActivity());
RelativeLayout.LayoutParams textLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
textLayoutParams.addRule(RelativeLayout.RIGHT_OF, 1);
textLayoutParams.addRule(RelativeLayout.LEFT_OF, 5);
edittext.setLayoutParams(textLayoutParams);
edittext.setId(Integer.valueOf(4));
edittext.setSingleLine(true);
edittext.setText(url);
edittext.setInputType(InputType.TYPE_TEXT_VARIATION_URI);
edittext.setImeOptions(EditorInfo.IME_ACTION_GO);
edittext.setInputType(InputType.TYPE_NULL); // Will not except input... Makes the text NON-EDITABLE
edittext.setOnKeyListener(new View.OnKeyListener() {
public boolean onKey(View v, int keyCode, KeyEvent event) {
// If the event is a key-down event on the "enter" button
if ((event.getAction() == KeyEvent.ACTION_DOWN) && (keyCode == KeyEvent.KEYCODE_ENTER)) {
navigate(edittext.getText().toString());
return true;
}
return false;
}
});
// Close/Done button
ImageButton close = new ImageButton(cordova.getActivity());
RelativeLayout.LayoutParams closeLayoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT);
closeLayoutParams.addRule(RelativeLayout.ALIGN_PARENT_RIGHT);
close.setLayoutParams(closeLayoutParams);
forward.setContentDescription("Close Button");
close.setId(Integer.valueOf(5));
int closeResId = activityRes.getIdentifier("ic_action_remove", "drawable", cordova.getActivity().getPackageName());
Drawable closeIcon = activityRes.getDrawable(closeResId);
if (Build.VERSION.SDK_INT >= 16)
close.setBackground(null);
else
close.setBackgroundDrawable(null);
close.setImageDrawable(closeIcon);
close.setScaleType(ImageView.ScaleType.FIT_CENTER);
back.setPadding(0, this.dpToPixels(10), 0, this.dpToPixels(10));
if (Build.VERSION.SDK_INT >= 16)
close.getAdjustViewBounds();
close.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
closeDialog();
}
});
// WebView
inAppWebView = new WebView(cordova.getActivity());
inAppWebView.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
inAppWebView.setId(Integer.valueOf(6));
inAppWebView.setWebChromeClient(new InAppChromeClient(thatWebView));
WebViewClient client = new InAppBrowserClient(thatWebView, edittext);
inAppWebView.setWebViewClient(client);
WebSettings settings = inAppWebView.getSettings();
settings.setJavaScriptEnabled(true);
settings.setJavaScriptCanOpenWindowsAutomatically(true);
settings.setBuiltInZoomControls(showZoomControls);
settings.setPluginState(android.webkit.WebSettings.PluginState.ON);
if(android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) {
settings.setMediaPlaybackRequiresUserGesture(mediaPlaybackRequiresUserGesture);
}
String overrideUserAgent = preferences.getString("OverrideUserAgent", null);
String appendUserAgent = preferences.getString("AppendUserAgent", null);
if (overrideUserAgent != null) {
settings.setUserAgentString(overrideUserAgent);
}
if (appendUserAgent != null) {
settings.setUserAgentString(settings.getUserAgentString() + appendUserAgent);
}
//Toggle whether this is enabled or not!
Bundle appSettings = cordova.getActivity().getIntent().getExtras();
boolean enableDatabase = appSettings == null ? true : appSettings.getBoolean("InAppBrowserStorageEnabled", true);
if (enableDatabase) {
String databasePath = cordova.getActivity().getApplicationContext().getDir("inAppBrowserDB", Context.MODE_PRIVATE).getPath();
settings.setDatabasePath(databasePath);
settings.setDatabaseEnabled(true);
}
settings.setDomStorageEnabled(true);
if (clearAllCache) {
CookieManager.getInstance().removeAllCookie();
} else if (clearSessionCache) {
CookieManager.getInstance().removeSessionCookie();
}
inAppWebView.loadUrl(url);
inAppWebView.setId(Integer.valueOf(6));
inAppWebView.getSettings().setLoadWithOverviewMode(true);
inAppWebView.getSettings().setUseWideViewPort(true);
inAppWebView.requestFocus();
inAppWebView.requestFocusFromTouch();
// Add the back and forward buttons to our action button container layout
actionButtonContainer.addView(back);
actionButtonContainer.addView(forward);
// Add the views to our toolbar
toolbar.addView(actionButtonContainer);
toolbar.addView(edittext);
toolbar.addView(close);
// Don't add the toolbar if its been disabled
if (getShowLocationBar()) {
// Add our toolbar to our main view/layout
main.addView(toolbar);
}
// Add our webview to our main view/layout
main.addView(inAppWebView);
WindowManager.LayoutParams lp = new WindowManager.LayoutParams();
lp.copyFrom(dialog.getWindow().getAttributes());
lp.width = WindowManager.LayoutParams.MATCH_PARENT;
lp.height = WindowManager.LayoutParams.MATCH_PARENT;
dialog.setContentView(main);
dialog.show();
dialog.getWindow().setAttributes(lp);
// the goal of openhidden is to load the url and not display it
// Show() needs to be called to cause the URL to be loaded
if(openWindowHidden) {
dialog.hide();
}
}
};
this.cordova.getActivity().runOnUiThread(runnable);
return "";
}
/**
* Create a new plugin success result and send it back to JavaScript
*
* @param obj a JSONObject contain event payload information
*/
private void sendUpdate(JSONObject obj, boolean keepCallback) {
sendUpdate(obj, keepCallback, PluginResult.Status.OK);
}
/**
* Create a new plugin result and send it back to JavaScript
*
* @param obj a JSONObject contain event payload information
* @param status the status code to return to the JavaScript environment
*/
private void sendUpdate(JSONObject obj, boolean keepCallback, PluginResult.Status status) {
if (callbackContext != null) {
PluginResult result = new PluginResult(status, obj);
result.setKeepCallback(keepCallback);
callbackContext.sendPluginResult(result);
if (!keepCallback) {
callbackContext = null;
}
}
}
/**
* The webview client receives notifications about appView
*/
public class InAppBrowserClient extends WebViewClient {
EditText edittext;
CordovaWebView webView;
/**
* Constructor.
*
* @param webView
* @param mEditText
*/
public InAppBrowserClient(CordovaWebView webView, EditText mEditText) {
this.webView = webView;
this.edittext = mEditText;
}
/**
* Override the URL that should be loaded
*
* This handles a small subset of all the URIs that would be encountered.
*
* @param webView
* @param url
*/
@Override
public boolean shouldOverrideUrlLoading(WebView webView, String url) {
if (url.startsWith(WebView.SCHEME_TEL)) {
try {
Intent intent = new Intent(Intent.ACTION_DIAL);
intent.setData(Uri.parse(url));
cordova.getActivity().startActivity(intent);
return true;
} catch (android.content.ActivityNotFoundException e) {
LOG.e(LOG_TAG, "Error dialing " + url + ": " + e.toString());
}
} else if (url.startsWith("geo:") || url.startsWith(WebView.SCHEME_MAILTO) || url.startsWith("market:") || url.startsWith("intent:")) {
try {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setData(Uri.parse(url));
cordova.getActivity().startActivity(intent);
return true;
} catch (android.content.ActivityNotFoundException e) {
LOG.e(LOG_TAG, "Error with " + url + ": " + e.toString());
}
}
// If sms:5551212?body=This is the message
else if (url.startsWith("sms:")) {
try {
Intent intent = new Intent(Intent.ACTION_VIEW);
// Get address
String address = null;
int parmIndex = url.indexOf('?');
if (parmIndex == -1) {
address = url.substring(4);
} else {
address = url.substring(4, parmIndex);
// If body, then set sms body
Uri uri = Uri.parse(url);
String query = uri.getQuery();
if (query != null) {
if (query.startsWith("body=")) {
intent.putExtra("sms_body", query.substring(5));
}
}
}
intent.setData(Uri.parse("sms:" + address));
intent.putExtra("address", address);
intent.setType("vnd.android-dir/mms-sms");
cordova.getActivity().startActivity(intent);
return true;
} catch (android.content.ActivityNotFoundException e) {
LOG.e(LOG_TAG, "Error sending sms " + url + ":" + e.toString());
}
}
return false;
}
/*
* onPageStarted fires the LOAD_START_EVENT
*
* @param view
* @param url
* @param favicon
*/
@Override
public void onPageStarted(WebView view, String url, Bitmap favicon) {
super.onPageStarted(view, url, favicon);
String newloc = "";
if (url.startsWith("http:") || url.startsWith("https:") || url.startsWith("file:")) {
newloc = url;
}
else
{
// Assume that everything is HTTP at this point, because if we don't specify,
// it really should be. Complain loudly about this!!!
LOG.e(LOG_TAG, "Possible Uncaught/Unknown URI");
newloc = "http://" + url;
}
// Update the UI if we haven't already
if (!newloc.equals(edittext.getText().toString())) {
edittext.setText(newloc);
}
try {
JSONObject obj = new JSONObject();
obj.put("type", LOAD_START_EVENT);
obj.put("url", newloc);
sendUpdate(obj, true);
} catch (JSONException ex) {
LOG.e(LOG_TAG, "URI passed in has caused a JSON error.");
}
}
public void onPageFinished(WebView view, String url) {
super.onPageFinished(view, url);
// CB-10395 InAppBrowser's WebView not storing cookies reliable to local device storage
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
CookieManager.getInstance().flush();
} else {
CookieSyncManager.getInstance().sync();
}
if(reOpenOnNextPageFinished){
reOpenOnNextPageFinished = false;
showDialogue();
}
try {
JSONObject obj = new JSONObject();
obj.put("type", LOAD_STOP_EVENT);
obj.put("url", url);
sendUpdate(obj, true);
} catch (JSONException ex) {
LOG.d(LOG_TAG, "Should never happen");
}
}
public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) {
super.onReceivedError(view, errorCode, description, failingUrl);
try {
JSONObject obj = new JSONObject();
obj.put("type", LOAD_ERROR_EVENT);
obj.put("url", failingUrl);
obj.put("code", errorCode);
obj.put("message", description);
sendUpdate(obj, true, PluginResult.Status.ERROR);
} catch (JSONException ex) {
LOG.d(LOG_TAG, "Should never happen");
}
}
/**
* On received http auth request.
*/
@Override
public void onReceivedHttpAuthRequest(WebView view, HttpAuthHandler handler, String host, String realm) {
// Check if there is some plugin which can resolve this auth challenge
PluginManager pluginManager = null;
try {
Method gpm = webView.getClass().getMethod("getPluginManager");
pluginManager = (PluginManager)gpm.invoke(webView);
} catch (NoSuchMethodException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (IllegalAccessException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (InvocationTargetException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
}
if (pluginManager == null) {
try {
Field pmf = webView.getClass().getField("pluginManager");
pluginManager = (PluginManager)pmf.get(webView);
} catch (NoSuchFieldException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
} catch (IllegalAccessException e) {
LOG.d(LOG_TAG, e.getLocalizedMessage());
}
}
if (pluginManager != null && pluginManager.onReceivedHttpAuthRequest(webView, new CordovaHttpAuthHandler(handler), host, realm)) {
return;
}
// By default handle 401 like we'd normally do!
super.onReceivedHttpAuthRequest(view, handler, host, realm);
}
}
} | removed commentted test code
| src/android/InAppBrowser.java | removed commentted test code |
|
Java | apache-2.0 | d35c9d2ab8d890e1dd76325211434d6ad32410ae | 0 | wseyler/pentaho-kettle,tkafalas/pentaho-kettle,pedrofvteixeira/pentaho-kettle,tkafalas/pentaho-kettle,HiromuHota/pentaho-kettle,ddiroma/pentaho-kettle,ddiroma/pentaho-kettle,ddiroma/pentaho-kettle,rmansoor/pentaho-kettle,pentaho/pentaho-kettle,ccaspanello/pentaho-kettle,pminutillo/pentaho-kettle,tkafalas/pentaho-kettle,wseyler/pentaho-kettle,skofra0/pentaho-kettle,ccaspanello/pentaho-kettle,roboguy/pentaho-kettle,pentaho/pentaho-kettle,tmcsantos/pentaho-kettle,rmansoor/pentaho-kettle,roboguy/pentaho-kettle,rmansoor/pentaho-kettle,skofra0/pentaho-kettle,pentaho/pentaho-kettle,pedrofvteixeira/pentaho-kettle,ccaspanello/pentaho-kettle,skofra0/pentaho-kettle,tmcsantos/pentaho-kettle,pedrofvteixeira/pentaho-kettle,HiromuHota/pentaho-kettle,ddiroma/pentaho-kettle,tmcsantos/pentaho-kettle,tkafalas/pentaho-kettle,tmcsantos/pentaho-kettle,HiromuHota/pentaho-kettle,roboguy/pentaho-kettle,pedrofvteixeira/pentaho-kettle,ccaspanello/pentaho-kettle,pminutillo/pentaho-kettle,roboguy/pentaho-kettle,wseyler/pentaho-kettle,skofra0/pentaho-kettle,pentaho/pentaho-kettle,pminutillo/pentaho-kettle,wseyler/pentaho-kettle,pminutillo/pentaho-kettle,rmansoor/pentaho-kettle,HiromuHota/pentaho-kettle | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2019 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.simpleeval;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.regex.Pattern;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.value.ValueMetaString;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* This defines a 'simple evaluation' job entry.
*
* @author Samatar Hassan
* @since 01-01-2009
*/
public class JobEntrySimpleEval extends JobEntryBase implements Cloneable, JobEntryInterface {
private static Class<?> PKG = JobEntrySimpleEval.class; // for i18n purposes, needed by Translator2!!
public static final String[] valueTypeDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.EvalPreviousField.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.EvalVariable.Label" ),
};
public static final String[] valueTypeCode = new String[] { "field", "variable" };
public static final int VALUE_TYPE_FIELD = 0;
public static final int VALUE_TYPE_VARIABLE = 1;
public int valuetype;
public static final String[] successConditionDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenEqual.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenDifferent.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenContains.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotContains.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenStartWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotStartWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenEndWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotEndWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenRegExp.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenInList.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotInList.Label" ) };
public static final String[] successConditionCode = new String[] {
"equal", "different", "contains", "notcontains", "startswith", "notstatwith", "endswith", "notendwith",
"regexp", "inlist", "notinlist" };
public static final int SUCCESS_CONDITION_EQUAL = 0;
public static final int SUCCESS_CONDITION_DIFFERENT = 1;
public static final int SUCCESS_CONDITION_CONTAINS = 2;
public static final int SUCCESS_CONDITION_NOT_CONTAINS = 3;
public static final int SUCCESS_CONDITION_START_WITH = 4;
public static final int SUCCESS_CONDITION_NOT_START_WITH = 5;
public static final int SUCCESS_CONDITION_END_WITH = 6;
public static final int SUCCESS_CONDITION_NOT_END_WITH = 7;
public static final int SUCCESS_CONDITION_REGEX = 8;
public static final int SUCCESS_CONDITION_IN_LIST = 9;
public static final int SUCCESS_CONDITION_NOT_IN_LIST = 10;
public int successcondition;
public static final String[] fieldTypeDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeString.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeNumber.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeDateTime.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeBoolean.Label" ),
};
public static final String[] fieldTypeCode = new String[] { "string", "number", "datetime", "boolean" };
public static final int FIELD_TYPE_STRING = 0;
public static final int FIELD_TYPE_NUMBER = 1;
public static final int FIELD_TYPE_DATE_TIME = 2;
public static final int FIELD_TYPE_BOOLEAN = 3;
public int fieldtype;
public static final String[] successNumberConditionDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenEqual.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenDifferent.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenSmallThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenSmallOrEqualThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenGreaterThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenGreaterOrEqualThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessBetween.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenInList.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotInList.Label" ), };
public static final String[] successNumberConditionCode = new String[] {
"equal", "different", "smaller", "smallequal", "greater", "greaterequal", "between", "inlist", "notinlist" };
public static final int SUCCESS_NUMBER_CONDITION_EQUAL = 0;
public static final int SUCCESS_NUMBER_CONDITION_DIFFERENT = 1;
public static final int SUCCESS_NUMBER_CONDITION_SMALLER = 2;
public static final int SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL = 3;
public static final int SUCCESS_NUMBER_CONDITION_GREATER = 4;
public static final int SUCCESS_NUMBER_CONDITION_GREATER_EQUAL = 5;
public static final int SUCCESS_NUMBER_CONDITION_BETWEEN = 6;
public static final int SUCCESS_NUMBER_CONDITION_IN_LIST = 7;
public static final int SUCCESS_NUMBER_CONDITION_NOT_IN_LIST = 8;
public int successnumbercondition;
public static final String[] successBooleanConditionDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenTrue.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenFalse.Label" )
};
public static final String[] successBooleanConditionCode = new String[] { "true", "false" };
public static final int SUCCESS_BOOLEAN_CONDITION_TRUE = 0;
public static final int SUCCESS_BOOLEAN_CONDITION_FALSE = 1;
public int successbooleancondition;
private String fieldname;
private String variablename;
private String mask;
private String comparevalue;
private String minvalue;
private String maxvalue;
private boolean successwhenvarset;
public JobEntrySimpleEval( String n ) {
super( n, "" );
valuetype = VALUE_TYPE_FIELD;
successcondition = SUCCESS_CONDITION_EQUAL;
successnumbercondition = SUCCESS_NUMBER_CONDITION_EQUAL;
successbooleancondition = SUCCESS_BOOLEAN_CONDITION_FALSE;
minvalue = null;
maxvalue = null;
comparevalue = null;
fieldname = null;
variablename = null;
fieldtype = FIELD_TYPE_STRING;
mask = null;
successwhenvarset = false;
}
public JobEntrySimpleEval() {
this( "" );
}
@Override
public Object clone() {
JobEntrySimpleEval je = (JobEntrySimpleEval) super.clone();
return je;
}
private static String getValueTypeCode( int i ) {
if ( i < 0 || i >= valueTypeCode.length ) {
return valueTypeCode[0];
}
return valueTypeCode[i];
}
private static String getFieldTypeCode( int i ) {
if ( i < 0 || i >= fieldTypeCode.length ) {
return fieldTypeCode[0];
}
return fieldTypeCode[i];
}
private static String getSuccessConditionCode( int i ) {
if ( i < 0 || i >= successConditionCode.length ) {
return successConditionCode[0];
}
return successConditionCode[i];
}
public static String getSuccessNumberConditionCode( int i ) {
if ( i < 0 || i >= successNumberConditionCode.length ) {
return successNumberConditionCode[0];
}
return successNumberConditionCode[i];
}
private static String getSuccessBooleanConditionCode( int i ) {
if ( i < 0 || i >= successBooleanConditionCode.length ) {
return successBooleanConditionCode[0];
}
return successBooleanConditionCode[i];
}
@Override
public String getXML() {
StringBuilder retval = new StringBuilder( 300 );
retval.append( super.getXML() );
retval.append( " " ).append( XMLHandler.addTagValue( "valuetype", getValueTypeCode( valuetype ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "fieldname", fieldname ) );
retval.append( " " ).append( XMLHandler.addTagValue( "variablename", variablename ) );
retval.append( " " ).append( XMLHandler.addTagValue( "fieldtype", getFieldTypeCode( fieldtype ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "mask", mask ) );
retval.append( " " ).append( XMLHandler.addTagValue( "comparevalue", comparevalue ) );
retval.append( " " ).append( XMLHandler.addTagValue( "minvalue", minvalue ) );
retval.append( " " ).append( XMLHandler.addTagValue( "maxvalue", maxvalue ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "successcondition", getSuccessConditionCode( successcondition ) ) );
retval
.append( " " ).append(
XMLHandler.addTagValue(
"successnumbercondition", getSuccessNumberConditionCode( successnumbercondition ) ) );
retval.append( " " ).append(
XMLHandler.addTagValue(
"successbooleancondition", getSuccessBooleanConditionCode( successbooleancondition ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "successwhenvarset", successwhenvarset ) );
return retval.toString();
}
private static int getValueTypeByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < valueTypeCode.length; i++ ) {
if ( valueTypeCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessNumberByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successNumberConditionCode.length; i++ ) {
if ( successNumberConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessBooleanByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successBooleanConditionCode.length; i++ ) {
if ( successBooleanConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getFieldTypeByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < fieldTypeCode.length; i++ ) {
if ( fieldTypeCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessConditionByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successConditionCode.length; i++ ) {
if ( successConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
public void setSuccessWhenVarSet( boolean successwhenvarset ) {
this.successwhenvarset = successwhenvarset;
}
public boolean isSuccessWhenVarSet() {
return this.successwhenvarset;
}
public static int getSuccessNumberConditionByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successNumberConditionCode.length; i++ ) {
if ( successNumberConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessBooleanConditionByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successBooleanConditionCode.length; i++ ) {
if ( successBooleanConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
@Override
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository rep, IMetaStore metaStore ) throws KettleXMLException {
try {
super.loadXML( entrynode, databases, slaveServers );
valuetype = getValueTypeByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "valuetype" ), "" ) );
fieldname = XMLHandler.getTagValue( entrynode, "fieldname" );
fieldtype = getFieldTypeByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "fieldtype" ), "" ) );
variablename = XMLHandler.getTagValue( entrynode, "variablename" );
mask = XMLHandler.getTagValue( entrynode, "mask" );
comparevalue = XMLHandler.getTagValue( entrynode, "comparevalue" );
minvalue = XMLHandler.getTagValue( entrynode, "minvalue" );
maxvalue = XMLHandler.getTagValue( entrynode, "maxvalue" );
successcondition =
getSuccessConditionByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "successcondition" ), "" ) );
successnumbercondition =
getSuccessNumberConditionByCode( Const.NVL(
XMLHandler.getTagValue( entrynode, "successnumbercondition" ), "" ) );
successbooleancondition =
getSuccessBooleanConditionByCode( Const.NVL( XMLHandler.getTagValue(
entrynode, "successbooleancondition" ), "" ) );
successwhenvarset = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "successwhenvarset" ) );
} catch ( KettleXMLException xe ) {
throw new KettleXMLException(
BaseMessages.getString( PKG, "JobEntrySimple.Error.Exception.UnableLoadXML" ), xe );
}
}
@Override
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
valuetype = getValueTypeByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "valuetype" ), "" ) );
fieldname = rep.getJobEntryAttributeString( id_jobentry, "fieldname" );
variablename = rep.getJobEntryAttributeString( id_jobentry, "variablename" );
fieldtype = getFieldTypeByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "fieldtype" ), "" ) );
mask = rep.getJobEntryAttributeString( id_jobentry, "mask" );
comparevalue = rep.getJobEntryAttributeString( id_jobentry, "comparevalue" );
minvalue = rep.getJobEntryAttributeString( id_jobentry, "minvalue" );
maxvalue = rep.getJobEntryAttributeString( id_jobentry, "maxvalue" );
successcondition =
getSuccessConditionByCode( Const.NVL(
rep.getJobEntryAttributeString( id_jobentry, "successcondition" ), "" ) );
successnumbercondition =
getSuccessNumberConditionByCode( Const.NVL( rep.getJobEntryAttributeString(
id_jobentry, "successnumbercondition" ), "" ) );
successbooleancondition =
getSuccessBooleanConditionByCode( Const.NVL( rep.getJobEntryAttributeString(
id_jobentry, "successbooleancondition" ), "" ) );
successwhenvarset = rep.getJobEntryAttributeBoolean( id_jobentry, "successwhenvarset" );
} catch ( KettleException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobEntrySimple.Error.Exception.UnableLoadRep" )
+ id_jobentry, dbe );
}
}
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
try {
rep.saveJobEntryAttribute( id_job, getObjectId(), "valuetype", getValueTypeCode( valuetype ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "fieldname", fieldname );
rep.saveJobEntryAttribute( id_job, getObjectId(), "variablename", variablename );
rep.saveJobEntryAttribute( id_job, getObjectId(), "fieldtype", getFieldTypeCode( fieldtype ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "mask", mask );
rep.saveJobEntryAttribute( id_job, getObjectId(), "comparevalue", comparevalue );
rep.saveJobEntryAttribute( id_job, getObjectId(), "minvalue", minvalue );
rep.saveJobEntryAttribute( id_job, getObjectId(), "maxvalue", maxvalue );
rep.saveJobEntryAttribute(
id_job, getObjectId(), "successcondition", getSuccessConditionCode( successcondition ) );
rep
.saveJobEntryAttribute(
id_job, getObjectId(), "successnumbercondition",
getSuccessNumberConditionCode( successnumbercondition ) );
rep.saveJobEntryAttribute(
id_job, getObjectId(), "successbooleancondition",
getSuccessBooleanConditionCode( successbooleancondition ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "successwhenvarset", successwhenvarset );
} catch ( KettleDatabaseException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobEntrySimple.Error.Exception.UnableSaveRep" )
+ id_job, dbe );
}
}
@Override
public Result execute( Result previousResult, int nr ) throws KettleException {
Result result = previousResult;
result.setNrErrors( 1 );
result.setResult( false );
String sourcevalue = null;
switch ( valuetype ) {
case VALUE_TYPE_FIELD:
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.Log.ArgFromPrevious.Found", ( rows != null
? rows.size() : 0 )
+ "" ) );
}
if ( rows.size() == 0 ) {
rows = null;
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.NoRows" ) );
return result;
}
// get first row
resultRow = rows.get( 0 );
String realfieldname = environmentSubstitute( fieldname );
int indexOfField = -1;
indexOfField = resultRow.getRowMeta().indexOfValue( realfieldname );
if ( indexOfField == -1 ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.FieldNotExist", realfieldname ) );
resultRow = null;
rows = null;
return result;
}
sourcevalue = resultRow.getString( indexOfField, null );
if ( sourcevalue == null ) {
sourcevalue = "";
}
resultRow = null;
rows = null;
break;
case VALUE_TYPE_VARIABLE:
if ( Utils.isEmpty( variablename ) ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.VariableMissing" ) );
return result;
}
if ( isSuccessWhenVarSet() ) {
// return variable name
// remove specifications if needed
String variableName = StringUtil.getVariableName( Const.NVL( getVariableName(), "" ) );
// Get value, if the variable is not set, Null will be returned
String value = getVariable( variableName );
if ( value != null ) {
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.VariableSet", variableName ) );
}
result.setResult( true );
result.setNrErrors( 0 );
return result;
} else {
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.VariableNotSet", variableName ) );
}
// PDI-6943: this job entry does not set errors upon evaluation, independently of the outcome of the check
result.setNrErrors( 0 );
return result;
}
}
sourcevalue = environmentSubstitute( getVariableWithSpec() );
break;
default:
break;
}
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSimpleEval.Log.ValueToevaluate", sourcevalue ) );
}
boolean success = false;
String realCompareValue = environmentSubstitute( comparevalue );
if ( realCompareValue == null ) {
realCompareValue = "";
}
String realMinValue = environmentSubstitute( minvalue );
String realMaxValue = environmentSubstitute( maxvalue );
switch ( fieldtype ) {
case FIELD_TYPE_STRING:
switch ( successcondition ) {
case SUCCESS_CONDITION_EQUAL: // equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.equals( realCompareValue ) );
if ( valuetype == VALUE_TYPE_VARIABLE && !success ) {
// make the empty value evaluate to true when compared to a not set variable
if ( Utils.isEmpty( realCompareValue ) ) {
String variableName = StringUtil.getVariableName( variablename );
if ( getVariable( variableName ) == null ) {
success = true;
}
}
}
break;
case SUCCESS_CONDITION_DIFFERENT: // different
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.equals( realCompareValue ) );
break;
case SUCCESS_CONDITION_CONTAINS: // contains
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.contains( realCompareValue ) );
break;
case SUCCESS_CONDITION_NOT_CONTAINS: // not contains
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.contains( realCompareValue ) );
break;
case SUCCESS_CONDITION_START_WITH: // starts with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.startsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_NOT_START_WITH: // not start with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.startsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_END_WITH: // ends with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.endsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_NOT_END_WITH: // not ends with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.endsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_REGEX: // regexp
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( Pattern.compile( realCompareValue ).matcher( sourcevalue ).matches() );
break;
case SUCCESS_CONDITION_IN_LIST: // in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
String[] parts = realCompareValue.split( "," );
for ( int i = 0; i < parts.length && !success; i++ ) {
success = ( sourcevalue.equals( parts[i].trim() ) );
}
break;
case SUCCESS_CONDITION_NOT_IN_LIST: // not in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
parts = realCompareValue.split( "," );
success = true;
for ( int i = 0; i < parts.length && success; i++ ) {
success = !( sourcevalue.equals( parts[i].trim() ) );
}
break;
default:
break;
}
break;
case FIELD_TYPE_NUMBER:
double valuenumber;
try {
valuenumber = Double.parseDouble( sourcevalue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableNumber", sourcevalue, e
.getMessage() ) );
return result;
}
double valuecompare;
switch ( successnumbercondition ) {
case SUCCESS_NUMBER_CONDITION_EQUAL: // equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber == valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_DIFFERENT: // different
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber != valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER: // smaller
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber < valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL: // smaller or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber <= valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_GREATER: // greater
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber > valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_GREATER_EQUAL: // greater or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber >= valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_BETWEEN: // between min and max
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValues", realMinValue, realMaxValue ) );
}
double valuemin;
try {
valuemin = Double.parseDouble( realMinValue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realMinValue, e
.getMessage() ) );
return result;
}
double valuemax;
try {
valuemax = Double.parseDouble( realMaxValue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realMaxValue, e
.getMessage() ) );
return result;
}
if ( valuemin >= valuemax ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.IncorrectNumbers", realMinValue, realMaxValue ) );
return result;
}
success = ( valuenumber >= valuemin && valuenumber <= valuemax );
break;
case SUCCESS_NUMBER_CONDITION_IN_LIST: // in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
String[] parts = realCompareValue.split( "," );
for ( int i = 0; i < parts.length && !success; i++ ) {
try {
valuecompare = Double.parseDouble( parts[i] );
} catch ( Exception e ) {
logError( toString(), BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", parts[i], e.getMessage() ) );
return result;
}
success = ( valuenumber == valuecompare );
}
break;
case SUCCESS_NUMBER_CONDITION_NOT_IN_LIST: // not in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
parts = realCompareValue.split( "," );
success = true;
for ( int i = 0; i < parts.length && success; i++ ) {
try {
valuecompare = Double.parseDouble( parts[i] );
} catch ( Exception e ) {
logError( toString(), BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", parts[i], e.getMessage() ) );
return result;
}
success = ( valuenumber != valuecompare );
}
break;
default:
break;
}
break;
case FIELD_TYPE_DATE_TIME:
String realMask = environmentSubstitute( mask );
SimpleDateFormat df = new SimpleDateFormat();
if ( !Utils.isEmpty( realMask ) ) {
df.applyPattern( realMask );
}
Date datevalue = null;
try {
datevalue = convertToDate( sourcevalue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
Date datecompare;
switch ( successnumbercondition ) {
case SUCCESS_NUMBER_CONDITION_EQUAL: // equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_DIFFERENT: // different
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( !datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER: // smaller
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.before( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL: // smaller or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.before( datecompare ) || datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_GREATER: // greater
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.after( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_GREATER_EQUAL: // greater or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.after( datecompare ) || datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_BETWEEN: // between min and max
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValues", realMinValue, realMaxValue ) );
}
Date datemin;
try {
datemin = convertToDate( realMinValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
Date datemax;
try {
datemax = convertToDate( realMaxValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
if ( datemin.after( datemax ) || datemin.equals( datemax ) ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.IncorrectDates", realMinValue, realMaxValue ) );
return result;
}
success =
( ( datevalue.after( datemin )
|| datevalue.equals( datemin ) ) && ( datevalue.before( datemax )
|| datevalue.equals( datemax ) ) );
break;
case SUCCESS_NUMBER_CONDITION_IN_LIST: // in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
String[] parts = realCompareValue.split( "," );
for ( int i = 0; i < parts.length && !success; i++ ) {
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( toString(), e.getMessage() );
return result;
}
success = ( datevalue.equals( datecompare ) );
}
break;
case SUCCESS_NUMBER_CONDITION_NOT_IN_LIST: // not in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
parts = realCompareValue.split( "," );
success = true;
for ( int i = 0; i < parts.length && success; i++ ) {
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( toString(), e.getMessage() );
return result;
}
success = ( !datevalue.equals( datecompare ) );
}
break;
default:
break;
}
df = null;
break;
case FIELD_TYPE_BOOLEAN:
boolean valuebool;
try {
valuebool = ValueMetaString.convertStringToBoolean( sourcevalue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableBoolean", sourcevalue, e
.getMessage() ) );
return result;
}
switch ( successbooleancondition ) {
case SUCCESS_BOOLEAN_CONDITION_FALSE: // false
success = ( !valuebool );
break;
case SUCCESS_BOOLEAN_CONDITION_TRUE: // true
success = ( valuebool );
break;
default:
break;
}
break;
default:
break;
}
result.setResult( success );
// PDI-6943: this job entry does not set errors upon evaluation, independently of the outcome of the check
result.setNrErrors( 0 );
return result;
}
/*
* Returns variable with specifications
*/
private String getVariableWithSpec() {
String variable = getVariableName();
if ( ( !variable.contains( StringUtil.UNIX_OPEN ) && !variable.contains( StringUtil.WINDOWS_OPEN ) && !variable
.contains( StringUtil.HEX_OPEN ) )
&& ( ( !variable.contains( StringUtil.UNIX_CLOSE ) && !variable.contains( StringUtil.WINDOWS_CLOSE ) && !variable
.contains( StringUtil.HEX_CLOSE ) ) ) ) {
// Add specifications to variable
variable = StringUtil.UNIX_OPEN + variable + StringUtil.UNIX_CLOSE;
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.CheckingVariable", variable ) );
}
}
return variable;
}
private Date convertToDate( String valueString, String mask, SimpleDateFormat df ) throws KettleException {
Date datevalue = null;
try {
datevalue = df.parse( valueString );
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableDate", valueString ) );
}
return datevalue;
}
public static String getValueTypeDesc( int i ) {
if ( i < 0 || i >= valueTypeDesc.length ) {
return valueTypeDesc[0];
}
return valueTypeDesc[i];
}
public static String getFieldTypeDesc( int i ) {
if ( i < 0 || i >= fieldTypeDesc.length ) {
return fieldTypeDesc[0];
}
return fieldTypeDesc[i];
}
public static String getSuccessConditionDesc( int i ) {
if ( i < 0 || i >= successConditionDesc.length ) {
return successConditionDesc[0];
}
return successConditionDesc[i];
}
public static String getSuccessNumberConditionDesc( int i ) {
if ( i < 0 || i >= successNumberConditionDesc.length ) {
return successNumberConditionDesc[0];
}
return successNumberConditionDesc[i];
}
public static String getSuccessBooleanConditionDesc( int i ) {
if ( i < 0 || i >= successBooleanConditionDesc.length ) {
return successBooleanConditionDesc[0];
}
return successBooleanConditionDesc[i];
}
public static int getValueTypeByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < valueTypeDesc.length; i++ ) {
if ( valueTypeDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getValueTypeByCode( tt );
}
public static int getFieldTypeByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < fieldTypeDesc.length; i++ ) {
if ( fieldTypeDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getFieldTypeByCode( tt );
}
public static int getSuccessConditionByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successConditionDesc.length; i++ ) {
if ( successConditionDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getSuccessConditionByCode( tt );
}
public static int getSuccessNumberConditionByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successNumberConditionDesc.length; i++ ) {
if ( successNumberConditionDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getSuccessNumberByCode( tt );
}
public static int getSuccessBooleanConditionByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successBooleanConditionDesc.length; i++ ) {
if ( successBooleanConditionDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getSuccessBooleanByCode( tt );
}
public void setMinValue( String minvalue ) {
this.minvalue = minvalue;
}
public String getMinValue() {
return minvalue;
}
public void setCompareValue( String comparevalue ) {
this.comparevalue = comparevalue;
}
public String getMask() {
return mask;
}
public void setMask( String mask ) {
this.mask = mask;
}
public String getFieldName() {
return fieldname;
}
public void setFieldName( String fieldname ) {
this.fieldname = fieldname;
}
public String getVariableName() {
return variablename;
}
public void setVariableName( String variablename ) {
this.variablename = variablename;
}
public String getCompareValue() {
return comparevalue;
}
public void setMaxValue( String maxvalue ) {
this.maxvalue = maxvalue;
}
public String getMaxValue() {
return maxvalue;
}
@Override
public boolean evaluates() {
return true;
}
}
| engine/src/main/java/org/pentaho/di/job/entries/simpleeval/JobEntrySimpleEval.java | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.simpleeval;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.regex.Pattern;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.value.ValueMetaString;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* This defines a 'simple evaluation' job entry.
*
* @author Samatar Hassan
* @since 01-01-2009
*/
public class JobEntrySimpleEval extends JobEntryBase implements Cloneable, JobEntryInterface {
private static Class<?> PKG = JobEntrySimpleEval.class; // for i18n purposes, needed by Translator2!!
public static final String[] valueTypeDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.EvalPreviousField.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.EvalVariable.Label" ),
};
public static final String[] valueTypeCode = new String[] { "field", "variable" };
public static final int VALUE_TYPE_FIELD = 0;
public static final int VALUE_TYPE_VARIABLE = 1;
public int valuetype;
public static final String[] successConditionDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenEqual.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenDifferent.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenContains.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotContains.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenStartWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotStartWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenEndWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotEndWith.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenRegExp.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenInList.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotInList.Label" ) };
public static final String[] successConditionCode = new String[] {
"equal", "different", "contains", "notcontains", "startswith", "notstatwith", "endswith", "notendwith",
"regexp", "inlist", "notinlist" };
public static final int SUCCESS_CONDITION_EQUAL = 0;
public static final int SUCCESS_CONDITION_DIFFERENT = 1;
public static final int SUCCESS_CONDITION_CONTAINS = 2;
public static final int SUCCESS_CONDITION_NOT_CONTAINS = 3;
public static final int SUCCESS_CONDITION_START_WITH = 4;
public static final int SUCCESS_CONDITION_NOT_START_WITH = 5;
public static final int SUCCESS_CONDITION_END_WITH = 6;
public static final int SUCCESS_CONDITION_NOT_END_WITH = 7;
public static final int SUCCESS_CONDITION_REGEX = 8;
public static final int SUCCESS_CONDITION_IN_LIST = 9;
public static final int SUCCESS_CONDITION_NOT_IN_LIST = 10;
public int successcondition;
public static final String[] fieldTypeDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeString.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeNumber.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeDateTime.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.FieldTypeBoolean.Label" ),
};
public static final String[] fieldTypeCode = new String[] { "string", "number", "datetime", "boolean" };
public static final int FIELD_TYPE_STRING = 0;
public static final int FIELD_TYPE_NUMBER = 1;
public static final int FIELD_TYPE_DATE_TIME = 2;
public static final int FIELD_TYPE_BOOLEAN = 3;
public int fieldtype;
public static final String[] successNumberConditionDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenEqual.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenDifferent.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenSmallThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenSmallOrEqualThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenGreaterThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenGreaterOrEqualThan.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessBetween.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenInList.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenNotInList.Label" ), };
public static final String[] successNumberConditionCode = new String[] {
"equal", "different", "smaller", "smallequal", "greater", "greaterequal", "between", "inlist", "notinlist" };
public static final int SUCCESS_NUMBER_CONDITION_EQUAL = 0;
public static final int SUCCESS_NUMBER_CONDITION_DIFFERENT = 1;
public static final int SUCCESS_NUMBER_CONDITION_SMALLER = 2;
public static final int SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL = 3;
public static final int SUCCESS_NUMBER_CONDITION_GREATER = 4;
public static final int SUCCESS_NUMBER_CONDITION_GREATER_EQUAL = 5;
public static final int SUCCESS_NUMBER_CONDITION_BETWEEN = 6;
public static final int SUCCESS_NUMBER_CONDITION_IN_LIST = 7;
public static final int SUCCESS_NUMBER_CONDITION_NOT_IN_LIST = 8;
public int successnumbercondition;
public static final String[] successBooleanConditionDesc = new String[] {
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenTrue.Label" ),
BaseMessages.getString( PKG, "JobSimpleEval.SuccessWhenFalse.Label" )
};
public static final String[] successBooleanConditionCode = new String[] { "true", "false" };
public static final int SUCCESS_BOOLEAN_CONDITION_TRUE = 0;
public static final int SUCCESS_BOOLEAN_CONDITION_FALSE = 1;
public int successbooleancondition;
private String fieldname;
private String variablename;
private String mask;
private String comparevalue;
private String minvalue;
private String maxvalue;
private boolean successwhenvarset;
public JobEntrySimpleEval( String n ) {
super( n, "" );
valuetype = VALUE_TYPE_FIELD;
successcondition = SUCCESS_CONDITION_EQUAL;
successnumbercondition = SUCCESS_NUMBER_CONDITION_EQUAL;
successbooleancondition = SUCCESS_BOOLEAN_CONDITION_FALSE;
minvalue = null;
maxvalue = null;
comparevalue = null;
fieldname = null;
variablename = null;
fieldtype = FIELD_TYPE_STRING;
mask = null;
successwhenvarset = false;
}
public JobEntrySimpleEval() {
this( "" );
}
@Override
public Object clone() {
JobEntrySimpleEval je = (JobEntrySimpleEval) super.clone();
return je;
}
private static String getValueTypeCode( int i ) {
if ( i < 0 || i >= valueTypeCode.length ) {
return valueTypeCode[0];
}
return valueTypeCode[i];
}
private static String getFieldTypeCode( int i ) {
if ( i < 0 || i >= fieldTypeCode.length ) {
return fieldTypeCode[0];
}
return fieldTypeCode[i];
}
private static String getSuccessConditionCode( int i ) {
if ( i < 0 || i >= successConditionCode.length ) {
return successConditionCode[0];
}
return successConditionCode[i];
}
public static String getSuccessNumberConditionCode( int i ) {
if ( i < 0 || i >= successNumberConditionCode.length ) {
return successNumberConditionCode[0];
}
return successNumberConditionCode[i];
}
private static String getSuccessBooleanConditionCode( int i ) {
if ( i < 0 || i >= successBooleanConditionCode.length ) {
return successBooleanConditionCode[0];
}
return successBooleanConditionCode[i];
}
@Override
public String getXML() {
StringBuilder retval = new StringBuilder( 300 );
retval.append( super.getXML() );
retval.append( " " ).append( XMLHandler.addTagValue( "valuetype", getValueTypeCode( valuetype ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "fieldname", fieldname ) );
retval.append( " " ).append( XMLHandler.addTagValue( "variablename", variablename ) );
retval.append( " " ).append( XMLHandler.addTagValue( "fieldtype", getFieldTypeCode( fieldtype ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "mask", mask ) );
retval.append( " " ).append( XMLHandler.addTagValue( "comparevalue", comparevalue ) );
retval.append( " " ).append( XMLHandler.addTagValue( "minvalue", minvalue ) );
retval.append( " " ).append( XMLHandler.addTagValue( "maxvalue", maxvalue ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "successcondition", getSuccessConditionCode( successcondition ) ) );
retval
.append( " " ).append(
XMLHandler.addTagValue(
"successnumbercondition", getSuccessNumberConditionCode( successnumbercondition ) ) );
retval.append( " " ).append(
XMLHandler.addTagValue(
"successbooleancondition", getSuccessBooleanConditionCode( successbooleancondition ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "successwhenvarset", successwhenvarset ) );
return retval.toString();
}
private static int getValueTypeByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < valueTypeCode.length; i++ ) {
if ( valueTypeCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessNumberByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successNumberConditionCode.length; i++ ) {
if ( successNumberConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessBooleanByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successBooleanConditionCode.length; i++ ) {
if ( successBooleanConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getFieldTypeByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < fieldTypeCode.length; i++ ) {
if ( fieldTypeCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessConditionByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successConditionCode.length; i++ ) {
if ( successConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
public void setSuccessWhenVarSet( boolean successwhenvarset ) {
this.successwhenvarset = successwhenvarset;
}
public boolean isSuccessWhenVarSet() {
return this.successwhenvarset;
}
public static int getSuccessNumberConditionByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successNumberConditionCode.length; i++ ) {
if ( successNumberConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
private static int getSuccessBooleanConditionByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successBooleanConditionCode.length; i++ ) {
if ( successBooleanConditionCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
@Override
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository rep, IMetaStore metaStore ) throws KettleXMLException {
try {
super.loadXML( entrynode, databases, slaveServers );
valuetype = getValueTypeByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "valuetype" ), "" ) );
fieldname = XMLHandler.getTagValue( entrynode, "fieldname" );
fieldtype = getFieldTypeByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "fieldtype" ), "" ) );
variablename = XMLHandler.getTagValue( entrynode, "variablename" );
mask = XMLHandler.getTagValue( entrynode, "mask" );
comparevalue = XMLHandler.getTagValue( entrynode, "comparevalue" );
minvalue = XMLHandler.getTagValue( entrynode, "minvalue" );
maxvalue = XMLHandler.getTagValue( entrynode, "maxvalue" );
successcondition =
getSuccessConditionByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "successcondition" ), "" ) );
successnumbercondition =
getSuccessNumberConditionByCode( Const.NVL(
XMLHandler.getTagValue( entrynode, "successnumbercondition" ), "" ) );
successbooleancondition =
getSuccessBooleanConditionByCode( Const.NVL( XMLHandler.getTagValue(
entrynode, "successbooleancondition" ), "" ) );
successwhenvarset = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "successwhenvarset" ) );
} catch ( KettleXMLException xe ) {
throw new KettleXMLException(
BaseMessages.getString( PKG, "JobEntrySimple.Error.Exception.UnableLoadXML" ), xe );
}
}
@Override
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
valuetype = getValueTypeByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "valuetype" ), "" ) );
fieldname = rep.getJobEntryAttributeString( id_jobentry, "fieldname" );
variablename = rep.getJobEntryAttributeString( id_jobentry, "variablename" );
fieldtype = getFieldTypeByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "fieldtype" ), "" ) );
mask = rep.getJobEntryAttributeString( id_jobentry, "mask" );
comparevalue = rep.getJobEntryAttributeString( id_jobentry, "comparevalue" );
minvalue = rep.getJobEntryAttributeString( id_jobentry, "minvalue" );
maxvalue = rep.getJobEntryAttributeString( id_jobentry, "maxvalue" );
successcondition =
getSuccessConditionByCode( Const.NVL(
rep.getJobEntryAttributeString( id_jobentry, "successcondition" ), "" ) );
successnumbercondition =
getSuccessNumberConditionByCode( Const.NVL( rep.getJobEntryAttributeString(
id_jobentry, "successnumbercondition" ), "" ) );
successbooleancondition =
getSuccessBooleanConditionByCode( Const.NVL( rep.getJobEntryAttributeString(
id_jobentry, "successbooleancondition" ), "" ) );
successwhenvarset = rep.getJobEntryAttributeBoolean( id_jobentry, "successwhenvarset" );
} catch ( KettleException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobEntrySimple.Error.Exception.UnableLoadRep" )
+ id_jobentry, dbe );
}
}
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
try {
rep.saveJobEntryAttribute( id_job, getObjectId(), "valuetype", getValueTypeCode( valuetype ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "fieldname", fieldname );
rep.saveJobEntryAttribute( id_job, getObjectId(), "variablename", variablename );
rep.saveJobEntryAttribute( id_job, getObjectId(), "fieldtype", getFieldTypeCode( fieldtype ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "mask", mask );
rep.saveJobEntryAttribute( id_job, getObjectId(), "comparevalue", comparevalue );
rep.saveJobEntryAttribute( id_job, getObjectId(), "minvalue", minvalue );
rep.saveJobEntryAttribute( id_job, getObjectId(), "maxvalue", maxvalue );
rep.saveJobEntryAttribute(
id_job, getObjectId(), "successcondition", getSuccessConditionCode( successcondition ) );
rep
.saveJobEntryAttribute(
id_job, getObjectId(), "successnumbercondition",
getSuccessNumberConditionCode( successnumbercondition ) );
rep.saveJobEntryAttribute(
id_job, getObjectId(), "successbooleancondition",
getSuccessBooleanConditionCode( successbooleancondition ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "successwhenvarset", successwhenvarset );
} catch ( KettleDatabaseException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "JobEntrySimple.Error.Exception.UnableSaveRep" )
+ id_job, dbe );
}
}
@Override
public Result execute( Result previousResult, int nr ) throws KettleException {
Result result = previousResult;
result.setNrErrors( 1 );
result.setResult( false );
String sourcevalue = null;
switch ( valuetype ) {
case VALUE_TYPE_FIELD:
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.Log.ArgFromPrevious.Found", ( rows != null
? rows.size() : 0 )
+ "" ) );
}
if ( rows.size() == 0 ) {
rows = null;
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.NoRows" ) );
return result;
}
// get first row
resultRow = rows.get( 0 );
String realfieldname = environmentSubstitute( fieldname );
int indexOfField = -1;
indexOfField = resultRow.getRowMeta().indexOfValue( realfieldname );
if ( indexOfField == -1 ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.FieldNotExist", realfieldname ) );
resultRow = null;
rows = null;
return result;
}
sourcevalue = resultRow.getString( indexOfField, null );
if ( sourcevalue == null ) {
sourcevalue = "";
}
resultRow = null;
rows = null;
break;
case VALUE_TYPE_VARIABLE:
if ( Utils.isEmpty( variablename ) ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.VariableMissing" ) );
return result;
}
if ( isSuccessWhenVarSet() ) {
// return variable name
// remove specifications if needed
String variableName = StringUtil.getVariableName( Const.NVL( getVariableName(), "" ) );
// Get value, if the variable is not set, Null will be returned
String value = getVariable( variableName );
if ( value != null ) {
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.VariableSet", variableName ) );
}
result.setResult( true );
result.setNrErrors( 0 );
return result;
} else {
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.VariableNotSet", variableName ) );
}
// PDI-6943: this job entry does not set errors upon evaluation, independently of the outcome of the check
result.setNrErrors( 0 );
return result;
}
}
sourcevalue = environmentSubstitute( getVariableWithSpec() );
break;
default:
break;
}
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSimpleEval.Log.ValueToevaluate", sourcevalue ) );
}
boolean success = false;
String realCompareValue = environmentSubstitute( comparevalue );
if ( realCompareValue == null ) {
realCompareValue = "";
}
String realMinValue = environmentSubstitute( minvalue );
String realMaxValue = environmentSubstitute( maxvalue );
switch ( fieldtype ) {
case FIELD_TYPE_STRING:
switch ( successcondition ) {
case SUCCESS_CONDITION_EQUAL: // equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.equals( realCompareValue ) );
if ( valuetype == VALUE_TYPE_VARIABLE && !success ) {
// make the empty value evaluate to true when compared to a not set variable
if ( Utils.isEmpty( realCompareValue ) ) {
String variableName = StringUtil.getVariableName( variablename );
if ( System.getProperty( variableName ) == null ) {
success = true;
}
}
}
break;
case SUCCESS_CONDITION_DIFFERENT: // different
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.equals( realCompareValue ) );
break;
case SUCCESS_CONDITION_CONTAINS: // contains
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.contains( realCompareValue ) );
break;
case SUCCESS_CONDITION_NOT_CONTAINS: // not contains
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.contains( realCompareValue ) );
break;
case SUCCESS_CONDITION_START_WITH: // starts with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.startsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_NOT_START_WITH: // not start with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.startsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_END_WITH: // ends with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( sourcevalue.endsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_NOT_END_WITH: // not ends with
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( !sourcevalue.endsWith( realCompareValue ) );
break;
case SUCCESS_CONDITION_REGEX: // regexp
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
success = ( Pattern.compile( realCompareValue ).matcher( sourcevalue ).matches() );
break;
case SUCCESS_CONDITION_IN_LIST: // in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
String[] parts = realCompareValue.split( "," );
for ( int i = 0; i < parts.length && !success; i++ ) {
success = ( sourcevalue.equals( parts[i].trim() ) );
}
break;
case SUCCESS_CONDITION_NOT_IN_LIST: // not in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
parts = realCompareValue.split( "," );
success = true;
for ( int i = 0; i < parts.length && success; i++ ) {
success = !( sourcevalue.equals( parts[i].trim() ) );
}
break;
default:
break;
}
break;
case FIELD_TYPE_NUMBER:
double valuenumber;
try {
valuenumber = Double.parseDouble( sourcevalue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableNumber", sourcevalue, e
.getMessage() ) );
return result;
}
double valuecompare;
switch ( successnumbercondition ) {
case SUCCESS_NUMBER_CONDITION_EQUAL: // equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber == valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_DIFFERENT: // different
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber != valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER: // smaller
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber < valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL: // smaller or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber <= valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_GREATER: // greater
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber > valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_GREATER_EQUAL: // greater or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
valuecompare = Double.parseDouble( realCompareValue );
} catch ( Exception e ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realCompareValue, e.getMessage() ) );
return result;
}
success = ( valuenumber >= valuecompare );
break;
case SUCCESS_NUMBER_CONDITION_BETWEEN: // between min and max
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValues", realMinValue, realMaxValue ) );
}
double valuemin;
try {
valuemin = Double.parseDouble( realMinValue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realMinValue, e
.getMessage() ) );
return result;
}
double valuemax;
try {
valuemax = Double.parseDouble( realMaxValue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableNumber", realMaxValue, e
.getMessage() ) );
return result;
}
if ( valuemin >= valuemax ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.IncorrectNumbers", realMinValue, realMaxValue ) );
return result;
}
success = ( valuenumber >= valuemin && valuenumber <= valuemax );
break;
case SUCCESS_NUMBER_CONDITION_IN_LIST: // in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
String[] parts = realCompareValue.split( "," );
for ( int i = 0; i < parts.length && !success; i++ ) {
try {
valuecompare = Double.parseDouble( parts[i] );
} catch ( Exception e ) {
logError( toString(), BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", parts[i], e.getMessage() ) );
return result;
}
success = ( valuenumber == valuecompare );
}
break;
case SUCCESS_NUMBER_CONDITION_NOT_IN_LIST: // not in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
parts = realCompareValue.split( "," );
success = true;
for ( int i = 0; i < parts.length && success; i++ ) {
try {
valuecompare = Double.parseDouble( parts[i] );
} catch ( Exception e ) {
logError( toString(), BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableNumber", parts[i], e.getMessage() ) );
return result;
}
success = ( valuenumber != valuecompare );
}
break;
default:
break;
}
break;
case FIELD_TYPE_DATE_TIME:
String realMask = environmentSubstitute( mask );
SimpleDateFormat df = new SimpleDateFormat();
if ( !Utils.isEmpty( realMask ) ) {
df.applyPattern( realMask );
}
Date datevalue = null;
try {
datevalue = convertToDate( sourcevalue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
Date datecompare;
switch ( successnumbercondition ) {
case SUCCESS_NUMBER_CONDITION_EQUAL: // equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_DIFFERENT: // different
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( !datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER: // smaller
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.before( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL: // smaller or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.before( datecompare ) || datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_GREATER: // greater
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.after( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_GREATER_EQUAL: // greater or equal
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
success = ( datevalue.after( datecompare ) || datevalue.equals( datecompare ) );
break;
case SUCCESS_NUMBER_CONDITION_BETWEEN: // between min and max
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValues", realMinValue, realMaxValue ) );
}
Date datemin;
try {
datemin = convertToDate( realMinValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
Date datemax;
try {
datemax = convertToDate( realMaxValue, realMask, df );
} catch ( Exception e ) {
logError( e.getMessage() );
return result;
}
if ( datemin.after( datemax ) || datemin.equals( datemax ) ) {
logError( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.IncorrectDates", realMinValue, realMaxValue ) );
return result;
}
success =
( ( datevalue.after( datemin )
|| datevalue.equals( datemin ) ) && ( datevalue.before( datemax )
|| datevalue.equals( datemax ) ) );
break;
case SUCCESS_NUMBER_CONDITION_IN_LIST: // in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
String[] parts = realCompareValue.split( "," );
for ( int i = 0; i < parts.length && !success; i++ ) {
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( toString(), e.getMessage() );
return result;
}
success = ( datevalue.equals( datecompare ) );
}
break;
case SUCCESS_NUMBER_CONDITION_NOT_IN_LIST: // not in list
if ( isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSimpleEval.Log.CompareWithValue", sourcevalue, realCompareValue ) );
}
realCompareValue = Const.NVL( realCompareValue, "" );
parts = realCompareValue.split( "," );
success = true;
for ( int i = 0; i < parts.length && success; i++ ) {
try {
datecompare = convertToDate( realCompareValue, realMask, df );
} catch ( Exception e ) {
logError( toString(), e.getMessage() );
return result;
}
success = ( !datevalue.equals( datecompare ) );
}
break;
default:
break;
}
df = null;
break;
case FIELD_TYPE_BOOLEAN:
boolean valuebool;
try {
valuebool = ValueMetaString.convertStringToBoolean( sourcevalue );
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobEntrySimpleEval.Error.UnparsableBoolean", sourcevalue, e
.getMessage() ) );
return result;
}
switch ( successbooleancondition ) {
case SUCCESS_BOOLEAN_CONDITION_FALSE: // false
success = ( !valuebool );
break;
case SUCCESS_BOOLEAN_CONDITION_TRUE: // true
success = ( valuebool );
break;
default:
break;
}
break;
default:
break;
}
result.setResult( success );
// PDI-6943: this job entry does not set errors upon evaluation, independently of the outcome of the check
result.setNrErrors( 0 );
return result;
}
/*
* Returns variable with specifications
*/
private String getVariableWithSpec() {
String variable = getVariableName();
if ( ( !variable.contains( StringUtil.UNIX_OPEN ) && !variable.contains( StringUtil.WINDOWS_OPEN ) && !variable
.contains( StringUtil.HEX_OPEN ) )
&& ( ( !variable.contains( StringUtil.UNIX_CLOSE ) && !variable.contains( StringUtil.WINDOWS_CLOSE ) && !variable
.contains( StringUtil.HEX_CLOSE ) ) ) ) {
// Add specifications to variable
variable = StringUtil.UNIX_OPEN + variable + StringUtil.UNIX_CLOSE;
if ( isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobEntrySimpleEval.CheckingVariable", variable ) );
}
}
return variable;
}
private Date convertToDate( String valueString, String mask, SimpleDateFormat df ) throws KettleException {
Date datevalue = null;
try {
datevalue = df.parse( valueString );
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString(
PKG, "JobEntrySimpleEval.Error.UnparsableDate", valueString ) );
}
return datevalue;
}
public static String getValueTypeDesc( int i ) {
if ( i < 0 || i >= valueTypeDesc.length ) {
return valueTypeDesc[0];
}
return valueTypeDesc[i];
}
public static String getFieldTypeDesc( int i ) {
if ( i < 0 || i >= fieldTypeDesc.length ) {
return fieldTypeDesc[0];
}
return fieldTypeDesc[i];
}
public static String getSuccessConditionDesc( int i ) {
if ( i < 0 || i >= successConditionDesc.length ) {
return successConditionDesc[0];
}
return successConditionDesc[i];
}
public static String getSuccessNumberConditionDesc( int i ) {
if ( i < 0 || i >= successNumberConditionDesc.length ) {
return successNumberConditionDesc[0];
}
return successNumberConditionDesc[i];
}
public static String getSuccessBooleanConditionDesc( int i ) {
if ( i < 0 || i >= successBooleanConditionDesc.length ) {
return successBooleanConditionDesc[0];
}
return successBooleanConditionDesc[i];
}
public static int getValueTypeByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < valueTypeDesc.length; i++ ) {
if ( valueTypeDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getValueTypeByCode( tt );
}
public static int getFieldTypeByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < fieldTypeDesc.length; i++ ) {
if ( fieldTypeDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getFieldTypeByCode( tt );
}
public static int getSuccessConditionByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successConditionDesc.length; i++ ) {
if ( successConditionDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getSuccessConditionByCode( tt );
}
public static int getSuccessNumberConditionByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successNumberConditionDesc.length; i++ ) {
if ( successNumberConditionDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getSuccessNumberByCode( tt );
}
public static int getSuccessBooleanConditionByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < successBooleanConditionDesc.length; i++ ) {
if ( successBooleanConditionDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getSuccessBooleanByCode( tt );
}
public void setMinValue( String minvalue ) {
this.minvalue = minvalue;
}
public String getMinValue() {
return minvalue;
}
public void setCompareValue( String comparevalue ) {
this.comparevalue = comparevalue;
}
public String getMask() {
return mask;
}
public void setMask( String mask ) {
this.mask = mask;
}
public String getFieldName() {
return fieldname;
}
public void setFieldName( String fieldname ) {
this.fieldname = fieldname;
}
public String getVariableName() {
return variablename;
}
public void setVariableName( String variablename ) {
this.variablename = variablename;
}
public String getCompareValue() {
return comparevalue;
}
public void setMaxValue( String maxvalue ) {
this.maxvalue = maxvalue;
}
public String getMaxValue() {
return maxvalue;
}
@Override
public boolean evaluates() {
return true;
}
}
| [PDI-18437] Simple Evaluation is true when string compared to NULL
| engine/src/main/java/org/pentaho/di/job/entries/simpleeval/JobEntrySimpleEval.java | [PDI-18437] Simple Evaluation is true when string compared to NULL |
|
Java | apache-2.0 | 81d8a801528b60478a784ed555d83e97217b3622 | 0 | consulo/consulo,consulo/consulo,consulo/consulo,ernestp/consulo,consulo/consulo,consulo/consulo,ernestp/consulo,ernestp/consulo,ernestp/consulo,consulo/consulo,ernestp/consulo,ernestp/consulo | /*
* Copyright 2013 Consulo.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.consulo.idea.util.projectWizard;
import com.intellij.ide.highlighter.ModuleFileType;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.components.PathMacroManager;
import com.intellij.openapi.module.ModifiableModuleModel;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.ui.configuration.ModulesProvider;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.packaging.artifacts.ModifiableArtifactModel;
import com.intellij.projectImport.ProjectImportBuilder;
import org.consulo.idea.IdeaConstants;
import org.consulo.idea.IdeaIcons;
import org.consulo.idea.file.IdeaModuleFileType;
import org.consulo.module.extension.ModuleExtensionProvider;
import org.consulo.module.extension.ModuleExtensionProviderEP;
import org.consulo.module.extension.MutableModuleExtension;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.xpath.XPath;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author VISTALL
* @since 18:49/14.06.13
*/
public class IdeaProjectImportBuilder extends ProjectImportBuilder<Object> {
@NotNull
@Override
public String getName() {
return "IntelliJ IDEA";
}
@Override
public Icon getIcon() {
return IdeaIcons.Idea;
}
@Override
public List<Object> getList() {
return null;
}
@Override
public boolean isMarked(Object element) {
return false;
}
@Override
public void setList(List<Object> list) throws ConfigurationException {
}
@Override
public void setOpenProjectSettingsAfter(boolean on) {
}
@Nullable
@Override
public List<Module> commit(Project project,
ModifiableModuleModel model,
ModulesProvider modulesProvider,
ModifiableArtifactModel artifactModel) {
final String projectPath = project.getBasePath();
File file = new File(projectPath, IdeaConstants.PROJECT_DIR);
if (!file.exists()) {
return null;
}
List<Module> modules = Collections.emptyList();
try {
modules = loadModules(file, model, project);
}
catch (JDOMException e) {
e.printStackTrace();
}
catch (IOException e) {
e.printStackTrace();
}
return modules;
}
private static List<Module> loadModules(File ideaDir, ModifiableModuleModel modifiableModuleModel, Project project) throws JDOMException, IOException {
List<Module> modules = new ArrayList<Module>();
File modulesFile = new File(ideaDir, "modules.xml");
final Document document = JDOMUtil.loadDocument(modulesFile);
PathMacroManager.getInstance(project).expandPaths(document.getRootElement());
XPath xpathExpression = XPath.newInstance("/project[@version='4']/component[@name='ProjectModuleManager']/modules/*");
final List list = xpathExpression.selectNodes(document);
for (Object o : list) {
Element element = (Element)o;
String filepath = element.getAttributeValue("filepath");
if (filepath == null) {
continue;
}
modules.add(loadModule(filepath, modifiableModuleModel, project));
}
return modules;
}
private static Module loadModule(String moduleFilePath, ModifiableModuleModel originalModel, Project project) throws JDOMException, IOException {
final boolean fromProjectStructure = originalModel != null;
final Document document = JDOMUtil.loadDocument(new File(moduleFilePath));
final ModifiableModuleModel newModel = fromProjectStructure ? originalModel : ModuleManager.getInstance(project).getModifiableModel();
final Module module =
newModel.newModule(moduleFilePath.replace(IdeaModuleFileType.DEFAULT_EXTENSION, ModuleFileType.DEFAULT_EXTENSION));
final Element rootElement = document.getRootElement();
PathMacroManager.getInstance(module).expandPaths(document.getRootElement());
String moduleType = rootElement.getAttributeValue("type");
final ModifiableRootModel modifiableModel = ModuleRootManager.getInstance(module).getModifiableModel();
enableExtensionsByType(moduleType, modifiableModel);
XPath xpathExpression = XPath.newInstance("/module[@version='4']/component[@name='NewModuleRootManager']/*");
final List list = xpathExpression.selectNodes(document);
for (Object o : list) {
Element element = (Element)o;
final String name = element.getName();
if ("content".equals(name)) {
final String url = element.getAttributeValue("url");
final ContentEntry contentEntry = modifiableModel.addContentEntry(url);
for(Element childOfContent : element.getChildren()) {
final String nameChildOfContent = childOfContent.getName();
if("sourceFolder".equals(nameChildOfContent)) {
String url2 = childOfContent.getAttributeValue("url");
boolean isTestSource = Boolean.valueOf(childOfContent.getAttributeValue("isTestSource"));
contentEntry.addFolder(url2, isTestSource ? ContentFolderType.TEST : ContentFolderType.SOURCE);
}
}
}
}
new WriteAction<Object>()
{
@Override
protected void run(Result<Object> result) throws Throwable {
modifiableModel.commit();
}
}.execute();
if(!fromProjectStructure) {
new WriteAction<Object>()
{
@Override
protected void run(Result<Object> result) throws Throwable {
newModel.commit();
}
}.execute();
}
return module;
}
private static void enableExtensionsByType(@NotNull String moduleType, ModuleRootModel rootModel) {
if (moduleType.equals("JAVA_MODULE")) {
enableExtensionFor("java", rootModel);
}
else {
enableExtensionFor("java", rootModel);
enableExtensionFor("consulo-plugin", rootModel);
}
}
private static void enableExtensionFor(@NotNull String id, @NotNull ModuleRootModel rootModel) {
final ModuleExtensionProvider provider = ModuleExtensionProviderEP.findProvider(id);
if (provider == null) {
return;
}
final MutableModuleExtension extension = (MutableModuleExtension)rootModel.getExtensionWithoutCheck(provider.getImmutableClass());
assert extension != null;
extension.setEnabled(true);
}
}
| plugins/idea/src/org/consulo/idea/util/projectWizard/IDEAProjectImportBuilder.java | /*
* Copyright 2013 Consulo.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.consulo.idea.util.projectWizard;
import com.intellij.ide.highlighter.ModuleFileType;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.components.PathMacroManager;
import com.intellij.openapi.module.ModifiableModuleModel;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.ui.configuration.ModulesProvider;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.packaging.artifacts.ModifiableArtifactModel;
import com.intellij.projectImport.ProjectImportBuilder;
import org.consulo.idea.IdeaConstants;
import org.consulo.idea.IdeaIcons;
import org.consulo.idea.file.IdeaModuleFileType;
import org.consulo.module.extension.ModuleExtensionProvider;
import org.consulo.module.extension.ModuleExtensionProviderEP;
import org.consulo.module.extension.MutableModuleExtension;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.xpath.XPath;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author VISTALL
* @since 18:49/14.06.13
*/
public class IdeaProjectImportBuilder extends ProjectImportBuilder<Object> {
@NotNull
@Override
public String getName() {
return "IntelliJ IDEA";
}
@Override
public Icon getIcon() {
return IdeaIcons.Idea;
}
@Override
public List<Object> getList() {
return null;
}
@Override
public boolean isMarked(Object element) {
return false;
}
@Override
public void setList(List<Object> list) throws ConfigurationException {
}
@Override
public void setOpenProjectSettingsAfter(boolean on) {
}
@Nullable
@Override
public List<Module> commit(Project project,
ModifiableModuleModel model,
ModulesProvider modulesProvider,
ModifiableArtifactModel artifactModel) {
final String projectPath = project.getBasePath();
File file = new File(projectPath, IdeaConstants.PROJECT_DIR);
if (!file.exists()) {
return null;
}
List<Module> modules = Collections.emptyList();
try {
modules = loadModules(projectPath, file, model, modulesProvider, project);
}
catch (JDOMException e) {
e.printStackTrace();
}
catch (IOException e) {
e.printStackTrace();
}
return modules;
}
private static List<Module> loadModules(String projectPath,
File ideaDir,
ModifiableModuleModel modifiableModuleModel,
ModulesProvider modulesProvider, Project project) throws JDOMException, IOException {
List<Module> modules = new ArrayList<Module>();
File modulesFile = new File(ideaDir, "modules.xml");
final Document document = JDOMUtil.loadDocument(modulesFile);
PathMacroManager.getInstance(project).expandPaths(document.getRootElement());
XPath xpathExpression = XPath.newInstance("/project[@version='4']/component[@name='ProjectModuleManager']/modules/*");
final List list = xpathExpression.selectNodes(document);
for (Object o : list) {
Element element = (Element)o;
String filepath = element.getAttributeValue("filepath");
if (filepath == null) {
continue;
}
filepath = StringUtil.replace(filepath, "$PROJECT_DIR$", projectPath);
modules.add(loadModule(projectPath, filepath, modifiableModuleModel, modulesProvider, project));
}
return modules;
}
private static Module loadModule(String projectPath,
String moduleFilePath,
ModifiableModuleModel originalModel,
ModulesProvider modulesProvider, Project project) throws JDOMException, IOException {
final boolean fromProjectStructure = originalModel != null;
final Document document = JDOMUtil.loadDocument(new File(moduleFilePath));
final ModifiableModuleModel newModel = fromProjectStructure ? originalModel : ModuleManager.getInstance(project).getModifiableModel();
final Module module =
newModel.newModule(moduleFilePath.replace(IdeaModuleFileType.DEFAULT_EXTENSION, ModuleFileType.DEFAULT_EXTENSION));
final Element rootElement = document.getRootElement();
PathMacroManager.getInstance(module).expandPaths(document.getRootElement());
String moduleType = rootElement.getAttributeValue("type");
final ModifiableRootModel modifiableModel = ModuleRootManager.getInstance(module).getModifiableModel();
enableExtensionsByType(moduleType, modifiableModel);
XPath xpathExpression = XPath.newInstance("/module[@version='4']/component[@name='NewModuleRootManager']/*");
final List list = xpathExpression.selectNodes(document);
for (Object o : list) {
Element element = (Element)o;
final String name = element.getName();
if ("content".equals(name)) {
final String url = element.getAttributeValue("url");
final ContentEntry contentEntry = modifiableModel.addContentEntry(url);
for(Element childOfContent : element.getChildren()) {
final String nameChildOfContent = childOfContent.getName();
if("sourceFolder".equals(nameChildOfContent)) {
String url2 = childOfContent.getAttributeValue("url");
boolean isTestSource = Boolean.valueOf(childOfContent.getAttributeValue("isTestSource"));
contentEntry.addFolder(url2, isTestSource ? ContentFolderType.TEST : ContentFolderType.SOURCE);
}
}
}
}
new WriteAction<Object>()
{
@Override
protected void run(Result<Object> result) throws Throwable {
modifiableModel.commit();
}
}.execute();
if(!fromProjectStructure) {
new WriteAction<Object>()
{
@Override
protected void run(Result<Object> result) throws Throwable {
newModel.commit();
}
}.execute();
}
return module;
}
private static void enableExtensionsByType(@NotNull String moduleType, ModuleRootModel rootModel) {
if (moduleType.equals("JAVA_MODULE")) {
enableExtensionFor("java", rootModel);
}
else {
enableExtensionFor("java", rootModel);
enableExtensionFor("consulo-plugin", rootModel);
}
}
private static void enableExtensionFor(@NotNull String id, @NotNull ModuleRootModel rootModel) {
final ModuleExtensionProvider provider = ModuleExtensionProviderEP.findProvider(id);
if (provider == null) {
return;
}
final MutableModuleExtension extension = (MutableModuleExtension)rootModel.getExtensionWithoutCheck(provider.getImmutableClass());
assert extension != null;
extension.setEnabled(true);
}
}
| cleanup
| plugins/idea/src/org/consulo/idea/util/projectWizard/IDEAProjectImportBuilder.java | cleanup |
|
Java | apache-2.0 | 2f699f6049e639f987e45708cff379ca70ee69ec | 0 | PKRoma/simple-binary-encoding,real-logic/simple-binary-encoding,real-logic/simple-binary-encoding,real-logic/simple-binary-encoding,PKRoma/simple-binary-encoding,PKRoma/simple-binary-encoding,PKRoma/simple-binary-encoding,real-logic/simple-binary-encoding,PKRoma/simple-binary-encoding,real-logic/simple-binary-encoding,PKRoma/simple-binary-encoding,real-logic/simple-binary-encoding,real-logic/simple-binary-encoding,PKRoma/simple-binary-encoding | /*
* Copyright 2013-2020 Real Logic Limited.
* Copyright (C) 2017 MarketFactory, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.real_logic.sbe.generation.csharp;
import uk.co.real_logic.sbe.PrimitiveType;
import uk.co.real_logic.sbe.PrimitiveValue;
import uk.co.real_logic.sbe.generation.CodeGenerator;
import org.agrona.generation.OutputManager;
import uk.co.real_logic.sbe.generation.Generators;
import uk.co.real_logic.sbe.ir.*;
import org.agrona.Verify;
import java.io.IOException;
import java.io.Writer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.List;
import static uk.co.real_logic.sbe.generation.csharp.CSharpUtil.*;
import static uk.co.real_logic.sbe.ir.GenerationUtil.collectVarData;
import static uk.co.real_logic.sbe.ir.GenerationUtil.collectGroups;
import static uk.co.real_logic.sbe.ir.GenerationUtil.collectFields;
@SuppressWarnings("MethodLength")
public class CSharpGenerator implements CodeGenerator
{
private static final String META_ATTRIBUTE_ENUM = "MetaAttribute";
private static final String INDENT = " ";
private static final String BASE_INDENT = INDENT;
private final Ir ir;
private final OutputManager outputManager;
public CSharpGenerator(final Ir ir, final OutputManager outputManager)
{
Verify.notNull(ir, "ir");
Verify.notNull(outputManager, "outputManager");
this.ir = ir;
this.outputManager = outputManager;
}
public void generateMessageHeaderStub() throws IOException
{
generateComposite(ir.headerStructure().tokens());
}
public void generateTypeStubs() throws IOException
{
generateMetaAttributeEnum();
for (final List<Token> tokens : ir.types())
{
switch (tokens.get(0).signal())
{
case BEGIN_ENUM:
generateEnum(tokens);
break;
case BEGIN_SET:
generateBitSet(tokens);
break;
case BEGIN_COMPOSITE:
generateComposite(tokens);
break;
}
}
}
public void generate() throws IOException
{
generateMessageHeaderStub();
generateTypeStubs();
for (final List<Token> tokens : ir.messages())
{
final Token msgToken = tokens.get(0);
final String className = formatClassName(msgToken.name());
try (Writer out = outputManager.createOutput(className))
{
out.append(generateFileHeader(ir.applicableNamespace()));
out.append(generateDocumentation(BASE_INDENT, msgToken));
out.append(generateClassDeclaration(className));
out.append(generateMessageFlyweightCode(className, msgToken, BASE_INDENT));
final List<Token> messageBody = tokens.subList(1, tokens.size() - 1);
int offset = 0;
final List<Token> fields = new ArrayList<>();
offset = collectFields(messageBody, offset, fields);
out.append(generateFields(fields, BASE_INDENT));
final List<Token> groups = new ArrayList<>();
offset = collectGroups(messageBody, offset, groups);
final StringBuilder sb = new StringBuilder();
generateGroups(sb, className, groups, BASE_INDENT);
out.append(sb);
final List<Token> varData = new ArrayList<>();
collectVarData(messageBody, offset, varData);
out.append(generateVarData(varData, BASE_INDENT + INDENT));
out.append(INDENT + "}\n");
out.append("}\n");
}
}
}
private void generateGroups(
final StringBuilder sb,
final String parentMessageClassName,
final List<Token> tokens,
final String indent)
{
for (int i = 0, size = tokens.size(); i < size; i++)
{
final Token groupToken = tokens.get(i);
if (groupToken.signal() != Signal.BEGIN_GROUP)
{
throw new IllegalStateException("tokens must begin with BEGIN_GROUP: token=" + groupToken);
}
final String groupName = groupToken.name();
sb.append(generateGroupProperty(groupName, groupToken, indent + INDENT));
generateGroupClassHeader(sb, groupName, parentMessageClassName, tokens, i, indent + INDENT);
i++;
i += tokens.get(i).componentTokenCount();
final List<Token> fields = new ArrayList<>();
i = collectFields(tokens, i, fields);
sb.append(generateFields(fields, indent + INDENT));
final List<Token> groups = new ArrayList<>();
i = collectGroups(tokens, i, groups);
generateGroups(sb, parentMessageClassName, groups, indent + INDENT);
final List<Token> varData = new ArrayList<>();
i = collectVarData(tokens, i, varData);
sb.append(generateVarData(varData, indent + INDENT + INDENT));
sb.append(indent).append(INDENT + "}\n");
}
}
private void generateGroupClassHeader(
final StringBuilder sb,
final String groupName,
final String parentMessageClassName,
final List<Token> tokens,
final int index,
final String indent)
{
final String dimensionsClassName = formatClassName(tokens.get(index + 1).name());
final int dimensionHeaderLength = tokens.get(index + 1).encodedLength();
sb.append(String.format("\n" +
"%1$s" +
indent + "public sealed partial class %2$sGroup\n" +
indent + "{\n" +
indent + INDENT + "private readonly %3$s _dimensions = new %3$s();\n" +
indent + INDENT + "private %4$s _parentMessage;\n" +
indent + INDENT + "private DirectBuffer _buffer;\n" +
indent + INDENT + "private int _blockLength;\n" +
indent + INDENT + "private int _actingVersion;\n" +
indent + INDENT + "private int _count;\n" +
indent + INDENT + "private int _index;\n" +
indent + INDENT + "private int _offset;\n",
generateDocumentation(indent, tokens.get(index)),
formatClassName(groupName),
dimensionsClassName,
parentMessageClassName));
final Token numInGroupToken = Generators.findFirst("numInGroup", tokens, index);
final boolean isIntCastSafe = isRepresentableByInt32(numInGroupToken.encoding());
if (!isIntCastSafe)
{
throw new IllegalArgumentException(String.format(
"%s.numInGroup - cannot be represented safely by an int. Please constrain the maxValue.",
groupName));
}
sb.append(String.format("\n" +
indent + INDENT + "public void WrapForDecode(%s parentMessage, DirectBuffer buffer, int actingVersion)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_parentMessage = parentMessage;\n" +
indent + INDENT + INDENT + "_buffer = buffer;\n" +
indent + INDENT + INDENT + "_dimensions.Wrap(buffer, parentMessage.Limit, actingVersion);\n" +
indent + INDENT + INDENT + "_blockLength = _dimensions.BlockLength;\n" +
indent + INDENT + INDENT + "_count = (int) _dimensions.NumInGroup;\n" + // cast safety checked above
indent + INDENT + INDENT + "_actingVersion = actingVersion;\n" +
indent + INDENT + INDENT + "_index = 0;\n" +
indent + INDENT + INDENT + "_parentMessage.Limit = parentMessage.Limit + SbeHeaderSize;\n" +
indent + INDENT + "}\n",
parentMessageClassName));
final int blockLength = tokens.get(index).encodedLength();
final String typeForBlockLength = cSharpTypeName(tokens.get(index + 2).encoding().primitiveType());
final String typeForNumInGroup = cSharpTypeName(numInGroupToken.encoding().primitiveType());
final String throwCondition = numInGroupToken.encoding().applicableMinValue().longValue() == 0 ?
"if ((uint) count > %3$d)\n" :
"if (count < %2$d || count > %3$d)\n";
sb.append(String.format("\n" +
indent + INDENT + "public void WrapForEncode(%1$s parentMessage, DirectBuffer buffer, int count)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + throwCondition +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "ThrowHelper.ThrowCountOutOfRangeException(count);\n" +
indent + INDENT + INDENT + "}\n\n" +
indent + INDENT + INDENT + "_parentMessage = parentMessage;\n" +
indent + INDENT + INDENT + "_buffer = buffer;\n" +
indent + INDENT + INDENT + "_dimensions.Wrap(buffer, parentMessage.Limit, _actingVersion);\n" +
indent + INDENT + INDENT + "_dimensions.BlockLength = SbeBlockLength;\n" +
indent + INDENT + INDENT + "_dimensions.NumInGroup = (%5$s) count;\n" +
indent + INDENT + INDENT + "_index = 0;\n" +
indent + INDENT + INDENT + "_count = count;\n" +
indent + INDENT + INDENT + "_blockLength = SbeBlockLength;\n" +
indent + INDENT + INDENT + "_actingVersion = SchemaVersion;\n" +
indent + INDENT + INDENT + "parentMessage.Limit = parentMessage.Limit + SbeHeaderSize;\n" +
indent + INDENT + "}\n",
parentMessageClassName,
numInGroupToken.encoding().applicableMinValue().longValue(),
numInGroupToken.encoding().applicableMaxValue().longValue(),
typeForBlockLength,
typeForNumInGroup));
sb.append(String.format("\n" +
indent + INDENT + "public const int SbeBlockLength = %d;\n" +
indent + INDENT + "public const int SbeHeaderSize = %d;\n",
blockLength,
dimensionHeaderLength));
generateGroupEnumerator(sb, groupName, typeForNumInGroup, indent);
}
private void generateGroupEnumerator(
final StringBuilder sb,
final String groupName,
final String typeForNumInGroup,
final String indent)
{
sb.append(
indent + INDENT + "public int ActingBlockLength { get { return _blockLength; } }\n\n" +
indent + INDENT + "public int Count { get { return _count; } }\n\n" +
indent + INDENT + "public bool HasNext { get { return _index < _count; } }\n");
sb.append(String.format("\n" +
indent + INDENT + "public int ResetCountToIndex()\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_count = _index;\n" +
indent + INDENT + INDENT + "_dimensions.NumInGroup = (%s) _count;\n\n" +
indent + INDENT + INDENT + "return _count;\n" +
indent + INDENT + "}\n",
typeForNumInGroup));
sb.append(String.format("\n" +
indent + INDENT + "public %sGroup Next()\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "if (_index >= _count)\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "ThrowHelper.ThrowInvalidOperationException();\n" +
indent + INDENT + INDENT + "}\n\n" +
indent + INDENT + INDENT + "_offset = _parentMessage.Limit;\n" +
indent + INDENT + INDENT + "_parentMessage.Limit = _offset + _blockLength;\n" +
indent + INDENT + INDENT + "++_index;\n\n" +
indent + INDENT + INDENT + "return this;\n" +
indent + INDENT + "}\n",
formatClassName(groupName)));
sb.append("\n" +
indent + INDENT + "public System.Collections.IEnumerator GetEnumerator()\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "while (this.HasNext)\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "yield return this.Next();\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n");
}
private boolean isRepresentableByInt32(final Encoding encoding)
{
// These min and max values are the same in .NET
return encoding.applicableMinValue().longValue() >= Integer.MIN_VALUE &&
encoding.applicableMaxValue().longValue() <= Integer.MAX_VALUE;
}
private CharSequence generateGroupProperty(final String groupName, final Token token, final String indent)
{
final StringBuilder sb = new StringBuilder();
final String className = CSharpUtil.formatClassName(groupName);
sb.append(String.format("\n" +
indent + "private readonly %sGroup _%s = new %sGroup();\n",
className,
toLowerFirstChar(groupName),
className));
sb.append(String.format("\n" +
indent + "public const long %sId = %d;\n",
toUpperFirstChar(groupName),
token.id()));
generateSinceActingDeprecated(sb, indent, toUpperFirstChar(groupName), token);
sb.append(String.format("\n" +
"%1$s" +
indent + "public %2$sGroup %3$s\n" +
indent + "{\n" +
indent + INDENT + "get\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_%4$s.WrapForDecode(_parentMessage, _buffer, _actingVersion);\n" +
indent + INDENT + INDENT + "return _%4$s;\n" +
indent + INDENT + "}\n" +
indent + "}\n",
generateDocumentation(indent, token),
className,
toUpperFirstChar(groupName),
toLowerFirstChar(groupName)));
sb.append(String.format("\n" +
indent + "public %1$sGroup %2$sCount(int count)\n" +
indent + "{\n" +
indent + INDENT + "_%3$s.WrapForEncode(_parentMessage, _buffer, count);\n" +
indent + INDENT + "return _%3$s;\n" +
indent + "}\n",
className,
toUpperFirstChar(groupName),
toLowerFirstChar(groupName)));
return sb;
}
private CharSequence generateVarData(final List<Token> tokens, final String indent)
{
final StringBuilder sb = new StringBuilder();
for (int i = 0, size = tokens.size(); i < size; i++)
{
final Token token = tokens.get(i);
if (token.signal() == Signal.BEGIN_VAR_DATA)
{
generateFieldIdMethod(sb, token, indent);
generateSinceActingDeprecated(sb, indent, CSharpUtil.formatPropertyName(token.name()), token);
generateOffsetMethod(sb, token, indent);
final Token varDataToken = Generators.findFirst("varData", tokens, i);
final String characterEncoding = varDataToken.encoding().characterEncoding();
generateCharacterEncodingMethod(sb, token.name(), characterEncoding, indent);
generateFieldMetaAttributeMethod(sb, token, indent);
final String propertyName = toUpperFirstChar(token.name());
final Token lengthToken = Generators.findFirst("length", tokens, i);
final int sizeOfLengthField = lengthToken.encodedLength();
final Encoding lengthEncoding = lengthToken.encoding();
final String lengthCSharpType = cSharpTypeName(lengthEncoding.primitiveType());
final String lengthTypePrefix = toUpperFirstChar(lengthEncoding.primitiveType().primitiveName());
final ByteOrder byteOrder = lengthEncoding.byteOrder();
final String byteOrderStr = generateByteOrder(byteOrder, lengthEncoding.primitiveType().size());
sb.append(String.format("\n" +
indent + "public const int %sHeaderSize = %d;\n",
propertyName,
sizeOfLengthField));
sb.append(String.format(indent + "\n" +
indent + "public int %1$sLength()\n" +
indent + "{\n" +
indent + INDENT + "_buffer.CheckLimit(_parentMessage.Limit + %2$d);\n" +
indent + INDENT + "return (int)_buffer.%3$sGet%4$s(_parentMessage.Limit);\n" +
indent + "}\n",
propertyName,
sizeOfLengthField,
lengthTypePrefix,
byteOrderStr));
sb.append(String.format("\n" +
indent + "public int Get%1$s(byte[] dst, int dstOffset, int length) =>\n" +
indent + INDENT + "Get%1$s(new Span<byte>(dst, dstOffset, length));\n",
propertyName));
sb.append(String.format("\n" +
indent + "public int Get%1$s(Span<byte> dst)\n" +
indent + "{\n" +
"%2$s" +
indent + INDENT + "const int sizeOfLengthField = %3$d;\n" +
indent + INDENT + "int limit = _parentMessage.Limit;\n" +
indent + INDENT + "_buffer.CheckLimit(limit + sizeOfLengthField);\n" +
indent + INDENT + "int dataLength = (int)_buffer.%4$sGet%5$s(limit);\n" +
indent + INDENT + "int bytesCopied = Math.Min(dst.Length, dataLength);\n" +
indent + INDENT + "_parentMessage.Limit = limit + sizeOfLengthField + dataLength;\n" +
indent + INDENT + "_buffer.GetBytes(limit + sizeOfLengthField, dst.Slice(0, bytesCopied));\n\n" +
indent + INDENT + "return bytesCopied;\n" +
indent + "}\n",
propertyName,
generateArrayFieldNotPresentCondition(token.version(), indent),
sizeOfLengthField,
lengthTypePrefix,
byteOrderStr));
sb.append(String.format(indent + "\n" +
indent + "// Allocates and returns a new byte array\n" +
indent + "public byte[] Get%1$sBytes()\n" +
indent + "{\n" +
indent + INDENT + "const int sizeOfLengthField = %2$d;\n" +
indent + INDENT + "int limit = _parentMessage.Limit;\n" +
indent + INDENT + "_buffer.CheckLimit(limit + sizeOfLengthField);\n" +
indent + INDENT + "int dataLength = (int)_buffer.%3$sGet%4$s(limit);\n" +
indent + INDENT + "byte[] data = new byte[dataLength];\n" +
indent + INDENT + "_parentMessage.Limit = limit + sizeOfLengthField + dataLength;\n" +
indent + INDENT + "_buffer.GetBytes(limit + sizeOfLengthField, data);\n\n" +
indent + INDENT + "return data;\n" +
indent + "}\n",
propertyName,
sizeOfLengthField,
lengthTypePrefix,
byteOrderStr));
sb.append(String.format("\n" +
indent + "public int Set%1$s(byte[] src, int srcOffset, int length) =>\n" +
indent + INDENT + "Set%1$s(new ReadOnlySpan<byte>(src, srcOffset, length));\n",
propertyName));
sb.append(String.format("\n" +
indent + "public int Set%1$s(ReadOnlySpan<byte> src)\n" +
indent + "{\n" +
indent + INDENT + "const int sizeOfLengthField = %2$d;\n" +
indent + INDENT + "int limit = _parentMessage.Limit;\n" +
indent + INDENT + "_parentMessage.Limit = limit + sizeOfLengthField + src.Length;\n" +
indent + INDENT + "_buffer.%3$sPut%5$s(limit, (%4$s)src.Length);\n" +
indent + INDENT + "_buffer.SetBytes(limit + sizeOfLengthField, src);\n\n" +
indent + INDENT + "return src.Length;\n" +
indent + "}\n",
propertyName,
sizeOfLengthField,
lengthTypePrefix,
lengthCSharpType,
byteOrderStr));
}
}
return sb;
}
private void generateBitSet(final List<Token> tokens) throws IOException
{
final Token enumToken = tokens.get(0);
final String enumName = CSharpUtil.formatClassName(enumToken.applicableTypeName());
try (Writer out = outputManager.createOutput(enumName))
{
out.append(generateFileHeader(ir.applicableNamespace()));
out.append(generateDocumentation(INDENT, enumToken));
final String enumPrimitiveType = cSharpTypeName(enumToken.encoding().primitiveType());
out.append(generateEnumDeclaration(enumName, enumPrimitiveType, true));
out.append(generateChoices(tokens.subList(1, tokens.size() - 1)));
out.append(INDENT + "}\n");
out.append("}\n");
}
}
private void generateEnum(final List<Token> tokens) throws IOException
{
final Token enumToken = tokens.get(0);
final String enumName = CSharpUtil.formatClassName(enumToken.applicableTypeName());
try (Writer out = outputManager.createOutput(enumName))
{
out.append(generateFileHeader(ir.applicableNamespace()));
out.append(generateDocumentation(INDENT, enumToken));
final String enumPrimitiveType = cSharpTypeName(enumToken.encoding().primitiveType());
out.append(generateEnumDeclaration(enumName, enumPrimitiveType, false));
out.append(generateEnumValues(tokens.subList(1, tokens.size() - 1), enumToken));
out.append(INDENT + "}\n");
out.append("}\n");
}
}
private void generateComposite(final List<Token> tokens) throws IOException
{
final String compositeName = CSharpUtil.formatClassName(tokens.get(0).applicableTypeName());
try (Writer out = outputManager.createOutput(compositeName))
{
out.append(generateFileHeader(ir.applicableNamespace()));
out.append(generateDocumentation(INDENT, tokens.get(0)));
out.append(generateClassDeclaration(compositeName));
out.append(generateFixedFlyweightCode(tokens.get(0).encodedLength()));
out.append(generateCompositePropertyElements(tokens.subList(1, tokens.size() - 1), BASE_INDENT));
out.append(INDENT + "}\n");
out.append("}\n");
}
}
private CharSequence generateCompositePropertyElements(final List<Token> tokens, final String indent)
{
final StringBuilder sb = new StringBuilder();
for (int i = 0; i < tokens.size();)
{
final Token token = tokens.get(i);
final String propertyName = formatPropertyName(token.name());
// FIXME: do I need to pass classname down here for disambiguation
switch (token.signal())
{
case ENCODING:
sb.append(generatePrimitiveProperty(propertyName, token, token, indent));
break;
case BEGIN_ENUM:
sb.append(generateEnumProperty(propertyName, token, token, indent));
break;
case BEGIN_SET:
sb.append(generateBitSetProperty(propertyName, token, token, indent));
break;
case BEGIN_COMPOSITE:
sb.append(generateCompositeProperty(propertyName, token, token, indent));
break;
}
i += tokens.get(i).componentTokenCount();
}
return sb;
}
private CharSequence generateChoices(final List<Token> tokens)
{
final StringBuilder sb = new StringBuilder();
for (final Token token : tokens)
{
if (token.signal() == Signal.CHOICE)
{
final String choiceName = toUpperFirstChar(token.applicableTypeName());
final String choiceBitPosition = token.encoding().constValue().toString();
final int choiceValue = (int)Math.pow(2, Integer.parseInt(choiceBitPosition));
sb.append(String.format(INDENT + INDENT + "%s = %s,\n", choiceName, choiceValue));
}
}
return sb;
}
private CharSequence generateEnumValues(final List<Token> tokens, final Token encodingToken)
{
final StringBuilder sb = new StringBuilder();
final Encoding encoding = encodingToken.encoding();
for (final Token token : tokens)
{
sb.append(generateDocumentation(INDENT + INDENT, token))
.append(INDENT).append(INDENT).append(token.name()).append(" = ")
.append(token.encoding().constValue()).append(",\n");
}
final PrimitiveValue nullVal = encoding.applicableNullValue();
sb.append(INDENT).append(INDENT).append("NULL_VALUE = ").append(nullVal).append("\n");
return sb;
}
private CharSequence generateFileHeader(final String packageName)
{
String[] tokens = packageName.split("\\.");
final StringBuilder sb = new StringBuilder();
for (final String t : tokens)
{
sb.append(toUpperFirstChar(t)).append(".");
}
if (sb.length() > 0)
{
sb.setLength(sb.length() - 1);
}
tokens = sb.toString().split("-");
sb.setLength(0);
for (final String t : tokens)
{
sb.append(toUpperFirstChar(t));
}
return String.format(
"/* Generated SBE (Simple Binary Encoding) message codec */\n\n" +
"#pragma warning disable 1591 // disable warning on missing comments\n" +
"using System;\n" +
"using Org.SbeTool.Sbe.Dll;\n\n" +
"namespace %s\n" +
"{\n",
sb);
}
private CharSequence generateClassDeclaration(final String className)
{
return String.format(
INDENT + "public sealed partial class %s\n" +
INDENT + "{\n",
className);
}
public static String generateDocumentation(final String indent, final Token token)
{
final String description = token.description();
if (null == description || description.isEmpty())
{
return "";
}
return String.format(
indent + "/// <summary>\n" +
indent + "/// %s\n" +
indent + "/// </summary>\n",
description);
}
private void generateMetaAttributeEnum() throws IOException
{
try (Writer out = outputManager.createOutput(META_ATTRIBUTE_ENUM))
{
out.append(generateFileHeader(ir.applicableNamespace()));
out.append(
INDENT + "public enum MetaAttribute\n" +
INDENT + "{\n" +
INDENT + INDENT + "Epoch,\n" +
INDENT + INDENT + "TimeUnit,\n" +
INDENT + INDENT + "SemanticType,\n" +
INDENT + INDENT + "Presence\n" +
INDENT + "}\n" +
"}\n");
}
}
private CharSequence generateEnumDeclaration(
final String name,
final String primitiveType,
final boolean addFlagsAttribute)
{
String result = "";
if (addFlagsAttribute)
{
result += INDENT + "[Flags]\n";
}
result +=
INDENT + "public enum " + name + " : " + primitiveType + "\n" +
INDENT + "{\n";
return result;
}
private CharSequence generatePrimitiveProperty(
final String propertyName, final Token fieldToken, final Token typeToken, final String indent)
{
final StringBuilder sb = new StringBuilder();
sb.append(generatePrimitiveFieldMetaData(propertyName, typeToken, indent + INDENT));
if (typeToken.isConstantEncoding())
{
sb.append(generateConstPropertyMethods(propertyName, typeToken, indent));
}
else
{
sb.append(generatePrimitivePropertyMethods(propertyName, fieldToken, typeToken, indent));
}
return sb;
}
private CharSequence generatePrimitivePropertyMethods(
final String propertyName,
final Token fieldToken,
final Token typeToken,
final String indent)
{
final int arrayLength = typeToken.arrayLength();
if (arrayLength == 1)
{
return generateSingleValueProperty(propertyName, fieldToken, typeToken, indent + INDENT);
}
else if (arrayLength > 1)
{
return generateArrayProperty(propertyName, fieldToken, typeToken, indent + INDENT);
}
return "";
}
private CharSequence generatePrimitiveFieldMetaData(
final String propertyName,
final Token token,
final String indent)
{
final PrimitiveType primitiveType = token.encoding().primitiveType();
final String typeName = cSharpTypeName(primitiveType);
return String.format(
"\n" +
indent + "public const %1$s %2$sNullValue = %3$s;\n" +
indent + "public const %1$s %2$sMinValue = %4$s;\n" +
indent + "public const %1$s %2$sMaxValue = %5$s;\n",
typeName,
toUpperFirstChar(propertyName),
generateLiteral(primitiveType, token.encoding().applicableNullValue().toString()),
generateLiteral(primitiveType, token.encoding().applicableMinValue().toString()),
generateLiteral(primitiveType, token.encoding().applicableMaxValue().toString()));
}
private CharSequence generateSingleValueProperty(
final String propertyName,
final Token fieldToken,
final Token typeToken,
final String indent)
{
final String typeName = cSharpTypeName(typeToken.encoding().primitiveType());
final String typePrefix = toUpperFirstChar(typeToken.encoding().primitiveType().primitiveName());
final int offset = typeToken.offset();
final ByteOrder byteOrder = typeToken.encoding().byteOrder();
final String byteOrderStr = generateByteOrder(byteOrder, typeToken.encoding().primitiveType().size());
return String.format("\n" +
"%1$s" +
indent + "public %2$s %3$s\n" +
indent + "{\n" +
indent + INDENT + "get\n" +
indent + INDENT + "{\n" +
"%4$s" +
indent + INDENT + INDENT + "return _buffer.%5$sGet%7$s(_offset + %6$d);\n" +
indent + INDENT + "}\n" +
indent + INDENT + "set\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_buffer.%5$sPut%7$s(_offset + %6$d, value);\n" +
indent + INDENT + "}\n" +
indent + "}\n\n",
generateDocumentation(indent, fieldToken),
typeName,
toUpperFirstChar(propertyName),
generateFieldNotPresentCondition(fieldToken.version(), typeToken.encoding(), indent),
typePrefix,
offset,
byteOrderStr);
}
private CharSequence generateFieldNotPresentCondition(
final int sinceVersion,
final Encoding encoding,
final String indent)
{
if (0 == sinceVersion)
{
return "";
}
final String literal;
if (sinceVersion > 0)
{
literal = generateLiteral(encoding.primitiveType(), encoding.applicableNullValue().toString());
}
else
{
literal = "(byte)0";
}
return String.format(
indent + INDENT + INDENT + "if (_actingVersion < %1$d) return %2$s;\n\n",
sinceVersion,
literal);
}
private CharSequence generateArrayFieldNotPresentCondition(final int sinceVersion, final String indent)
{
if (0 == sinceVersion)
{
return "";
}
return String.format(
indent + INDENT + INDENT + "if (_actingVersion < %d) return 0;\n\n",
sinceVersion);
}
private CharSequence generateBitSetNotPresentCondition(
final int sinceVersion,
final String indent,
final String bitSetName)
{
if (0 == sinceVersion)
{
return "";
}
return String.format(
indent + INDENT + INDENT + INDENT + "if (_actingVersion < %1$d) return (%2$s)0;\n\n",
sinceVersion,
bitSetName);
}
private CharSequence generateTypeFieldNotPresentCondition(
final int sinceVersion,
final String indent)
{
if (0 == sinceVersion)
{
return "";
}
return String.format(
indent + INDENT + INDENT + "if (_actingVersion < %d) return null;\n\n",
sinceVersion);
}
private CharSequence generateArrayProperty(
final String propertyName,
final Token fieldToken,
final Token typeToken,
final String indent)
{
final String typeName = cSharpTypeName(typeToken.encoding().primitiveType());
final String typePrefix = toUpperFirstChar(typeToken.encoding().primitiveType().primitiveName());
final int offset = typeToken.offset();
final ByteOrder byteOrder = typeToken.encoding().byteOrder();
final String byteOrderStr = generateByteOrder(byteOrder, typeToken.encoding().primitiveType().size());
final int fieldLength = typeToken.arrayLength();
final int typeSize = typeToken.encoding().primitiveType().size();
final String propName = toUpperFirstChar(propertyName);
final StringBuilder sb = new StringBuilder();
sb.append(String.format("\n" +
indent + "public const int %sLength = %d;\n",
propName, fieldLength));
sb.append(String.format("\n" +
"%1$s" +
indent + "public %2$s Get%3$s(int index)\n" +
indent + "{\n" +
indent + INDENT + "if ((uint) index >= %4$d)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "ThrowHelper.ThrowIndexOutOfRangeException(index);\n" +
indent + INDENT + "}\n\n" +
"%5$s" +
indent + INDENT + "return _buffer.%6$sGet%9$s(_offset + %7$d + (index * %8$d));\n" +
indent + "}\n",
generateDocumentation(indent, fieldToken),
typeName, propName, fieldLength,
generateFieldNotPresentCondition(fieldToken.version(), typeToken.encoding(), indent),
typePrefix, offset, typeSize, byteOrderStr));
sb.append(String.format("\n" +
"%1$s" +
indent + "public void Set%2$s(int index, %3$s value)\n" +
indent + "{\n" +
indent + INDENT + "if ((uint) index >= %4$d)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "ThrowHelper.ThrowIndexOutOfRangeException(index);\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "_buffer.%5$sPut%8$s(_offset + %6$d + (index * %7$d), value);\n" +
indent + "}\n",
generateDocumentation(indent, fieldToken),
propName, typeName, fieldLength, typePrefix, offset, typeSize, byteOrderStr));
if (typeToken.encoding().primitiveType() == PrimitiveType.CHAR)
{
generateCharacterEncodingMethod(sb, propertyName, typeToken.encoding().characterEncoding(), indent);
sb.append(String.format("\n" +
indent + "public int Get%1$s(byte[] dst, int dstOffset)\n" +
indent + "{\n" +
indent + INDENT + "const int length = %2$d;\n" +
"%3$s" +
indent + INDENT + "return Get%1$s(new Span<byte>(dst, dstOffset, length));\n" +
indent + "}\n",
propName, fieldLength, generateArrayFieldNotPresentCondition(fieldToken.version(), indent), offset));
sb.append(String.format("\n" +
indent + "public int Get%1$s(Span<byte> dst)\n" +
indent + "{\n" +
indent + INDENT + "const int length = %2$d;\n" +
indent + INDENT + "if (dst.Length < length)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "ThrowHelper.ThrowWhenSpanLengthTooSmall(dst.Length);\n" +
indent + INDENT + "}\n\n" +
"%3$s" +
indent + INDENT + "_buffer.GetBytes(_offset + %4$d, dst);\n" +
indent + INDENT + "return length;\n" +
indent + "}\n",
propName, fieldLength, generateArrayFieldNotPresentCondition(fieldToken.version(), indent), offset));
sb.append(String.format("\n" +
indent + "public void Set%1$s(byte[] src, int srcOffset)\n" +
indent + "{\n" +
indent + INDENT + "Set%1$s(new ReadOnlySpan<byte>(src, srcOffset, src.Length - srcOffset));\n" +
indent + "}\n",
propName, fieldLength, offset));
sb.append(String.format("\n" +
indent + "public void Set%1$s(ReadOnlySpan<byte> src)\n" +
indent + "{\n" +
indent + INDENT + "const int length = %2$d;\n" +
indent + INDENT + "if (src.Length > length)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "ThrowHelper.ThrowWhenSpanLengthTooLarge(src.Length);\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "_buffer.SetBytes(_offset + %3$d, src);\n" +
indent + "}\n",
propName, fieldLength, offset));
}
return sb;
}
private void generateCharacterEncodingMethod(
final StringBuilder sb,
final String propertyName,
final String encoding,
final String indent)
{
sb.append(String.format("\n" +
indent + "public const string %sCharacterEncoding = \"%s\";\n\n",
formatPropertyName(propertyName),
encoding));
}
private CharSequence generateConstPropertyMethods(
final String propertyName,
final Token token,
final String indent)
{
if (token.encoding().primitiveType() != PrimitiveType.CHAR)
{
// ODE: we generate a property here because the constant could
// become a field in a newer version of the protocol
return String.format("\n" +
"%1s" +
indent + INDENT + "public %2$s %3$s { get { return %4$s; } }\n",
generateDocumentation(indent + INDENT, token),
cSharpTypeName(token.encoding().primitiveType()),
toUpperFirstChar(propertyName),
generateLiteral(token.encoding().primitiveType(), token.encoding().constValue().toString()));
}
final StringBuilder sb = new StringBuilder();
final String javaTypeName = cSharpTypeName(token.encoding().primitiveType());
final byte[] constantValue = token.encoding().constValue().byteArrayValue(token.encoding().primitiveType());
final CharSequence values = generateByteLiteralList(
token.encoding().constValue().byteArrayValue(token.encoding().primitiveType()));
sb.append(String.format(
"\n" +
indent + INDENT + "private static readonly byte[] _%1$sValue = { %2$s };\n",
propertyName,
values));
sb.append(String.format(
"\n" +
indent + INDENT + "public const int %1$sLength = %2$d;\n",
toUpperFirstChar(propertyName),
constantValue.length));
sb.append(String.format(
indent + INDENT + "public %1$s %2$s(int index)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "return _%3$sValue[index];\n" +
indent + INDENT + "}\n\n",
javaTypeName,
toUpperFirstChar(propertyName),
propertyName));
sb.append(String.format(
indent + INDENT + "public int Get%1$s(byte[] dst, int offset, int length)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "int bytesCopied = Math.Min(length, %2$d);\n" +
indent + INDENT + INDENT + "Array.Copy(_%3$sValue, 0, dst, offset, bytesCopied);\n" +
indent + INDENT + INDENT + "return bytesCopied;\n" +
indent + INDENT + "}\n",
toUpperFirstChar(propertyName),
constantValue.length,
propertyName));
return sb;
}
private CharSequence generateByteLiteralList(final byte[] bytes)
{
final StringBuilder values = new StringBuilder();
for (final byte b : bytes)
{
values.append(b).append(", ");
}
if (values.length() > 0)
{
values.setLength(values.length() - 2);
}
return values;
}
private CharSequence generateFixedFlyweightCode(final int size)
{
final String schemaIdType = cSharpTypeName(ir.headerStructure().schemaIdType());
final String schemaVersionType = cSharpTypeName(ir.headerStructure().schemaVersionType());
return String.format(
INDENT + INDENT + "public const %1$s SchemaId = %2$s;\n" +
INDENT + INDENT + "public const %3$s SchemaVersion = %4$s;\n" +
INDENT + INDENT + "public const int Size = %5$d;\n\n",
INDENT + INDENT + "private DirectBuffer _buffer;\n" +
INDENT + INDENT + "private int _offset;\n" +
INDENT + INDENT + "private int _actingVersion;\n\n" +
INDENT + INDENT + "public void Wrap(DirectBuffer buffer, int offset, int actingVersion)\n" +
INDENT + INDENT + "{\n" +
INDENT + INDENT + INDENT + "_offset = offset;\n" +
INDENT + INDENT + INDENT + "_actingVersion = actingVersion;\n" +
INDENT + INDENT + INDENT + "_buffer = buffer;\n" +
INDENT + INDENT + "}\n\n" +
schemaIdType,
generateLiteral(ir.headerStructure().schemaIdType(), Integer.toString(ir.id())),
schemaVersionType,
generateLiteral(ir.headerStructure().schemaVersionType(), Integer.toString(ir.version())),
size);
}
private CharSequence generateMessageFlyweightCode(final String className, final Token token, final String indent)
{
final String blockLengthType = cSharpTypeName(ir.headerStructure().blockLengthType());
final String templateIdType = cSharpTypeName(ir.headerStructure().templateIdType());
final String schemaIdType = cSharpTypeName(ir.headerStructure().schemaIdType());
final String schemaVersionType = cSharpTypeName(ir.headerStructure().schemaVersionType());
final String semanticType = token.encoding().semanticType() == null ? "" : token.encoding().semanticType();
return String.format(
indent + INDENT + "public const %1$s BlockLength = %2$s;\n" +
indent + INDENT + "public const %3$s TemplateId = %4$s;\n" +
indent + INDENT + "public const %5$s SchemaId = %6$s;\n" +
indent + INDENT + "public const %7$s SchemaVersion = %8$s;\n" +
indent + INDENT + "public const string SemanticType = \"%9$s\";\n\n" +
indent + INDENT + "private readonly %10$s _parentMessage;\n" +
indent + INDENT + "private DirectBuffer _buffer;\n" +
indent + INDENT + "private int _offset;\n" +
indent + INDENT + "private int _limit;\n" +
indent + INDENT + "private int _actingBlockLength;\n" +
indent + INDENT + "private int _actingVersion;\n" +
"\n" +
indent + INDENT + "public int Offset { get { return _offset; } }\n\n" +
indent + INDENT + "public %10$s()\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_parentMessage = this;\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "public void WrapForEncode(DirectBuffer buffer, int offset)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_buffer = buffer;\n" +
indent + INDENT + INDENT + "_offset = offset;\n" +
indent + INDENT + INDENT + "_actingBlockLength = BlockLength;\n" +
indent + INDENT + INDENT + "_actingVersion = SchemaVersion;\n" +
indent + INDENT + INDENT + "Limit = offset + _actingBlockLength;\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "public void WrapForEncodeAndApplyHeader(DirectBuffer buffer, int offset, " +
" MessageHeader headerEncoder)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "headerEncoder.Wrap(buffer, offset, SchemaVersion);\n" +
indent + INDENT + INDENT + "headerEncoder.BlockLength = BlockLength;\n" +
indent + INDENT + INDENT + "headerEncoder.TemplateId = TemplateId;\n" +
indent + INDENT + INDENT + "headerEncoder.SchemaId = SchemaId;\n" +
indent + INDENT + INDENT + "headerEncoder.Version = SchemaVersion;\n" +
indent + INDENT + INDENT + "\n" +
indent + INDENT + INDENT + "WrapForEncode(buffer, offset + MessageHeader.Size);\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "public void WrapForDecode(DirectBuffer buffer, int offset, " +
"int actingBlockLength, int actingVersion)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_buffer = buffer;\n" +
indent + INDENT + INDENT + "_offset = offset;\n" +
indent + INDENT + INDENT + "_actingBlockLength = actingBlockLength;\n" +
indent + INDENT + INDENT + "_actingVersion = actingVersion;\n" +
indent + INDENT + INDENT + "Limit = offset + _actingBlockLength;\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "public int Size\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "get\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "return _limit - _offset;\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "public int Limit\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "get\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "return _limit;\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + INDENT + "set\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "_buffer.CheckLimit(value);\n" +
indent + INDENT + INDENT + INDENT + "_limit = value;\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n\n",
blockLengthType,
generateLiteral(ir.headerStructure().blockLengthType(), Integer.toString(token.encodedLength())),
templateIdType,
generateLiteral(ir.headerStructure().templateIdType(), Integer.toString(token.id())),
schemaIdType,
generateLiteral(ir.headerStructure().schemaIdType(), Integer.toString(ir.id())),
schemaVersionType,
generateLiteral(ir.headerStructure().schemaVersionType(), Integer.toString(ir.version())),
semanticType,
className);
}
private CharSequence generateFields(final List<Token> tokens, final String indent)
{
final StringBuilder sb = new StringBuilder();
for (int i = 0, size = tokens.size(); i < size; i++)
{
final Token signalToken = tokens.get(i);
if (signalToken.signal() == Signal.BEGIN_FIELD)
{
final Token encodingToken = tokens.get(i + 1);
final String propertyName = signalToken.name();
generateFieldIdMethod(sb, signalToken, indent + INDENT);
generateSinceActingDeprecated(
sb, indent, CSharpUtil.formatPropertyName(signalToken.name()), signalToken);
generateOffsetMethod(sb, signalToken, indent + INDENT);
generateFieldMetaAttributeMethod(sb, signalToken, indent + INDENT);
switch (encodingToken.signal())
{
case ENCODING:
sb.append(generatePrimitiveProperty(propertyName, signalToken, encodingToken, indent));
break;
case BEGIN_ENUM:
sb.append(generateEnumProperty(propertyName, signalToken, encodingToken, indent));
break;
case BEGIN_SET:
sb.append(generateBitSetProperty(propertyName, signalToken, encodingToken, indent));
break;
case BEGIN_COMPOSITE:
sb.append(generateCompositeProperty(propertyName, signalToken, encodingToken, indent));
break;
}
}
}
return sb;
}
private void generateFieldIdMethod(final StringBuilder sb, final Token token, final String indent)
{
sb.append(String.format("\n" +
indent + "public const int %sId = %d;\n",
CSharpUtil.formatPropertyName(token.name()),
token.id()));
}
private void generateOffsetMethod(final StringBuilder sb, final Token token, final String indent)
{
sb.append(String.format("\n" +
indent + "public const int %sOffset = %d;\n",
CSharpUtil.formatPropertyName(token.name()),
token.offset()));
}
private void generateFieldMetaAttributeMethod(final StringBuilder sb, final Token token, final String indent)
{
final Encoding encoding = token.encoding();
final String epoch = encoding.epoch() == null ? "" : encoding.epoch();
final String timeUnit = encoding.timeUnit() == null ? "" : encoding.timeUnit();
final String semanticType = encoding.semanticType() == null ? "" : encoding.semanticType();
final String presence = encoding.presence() == null ? "" : encoding.presence().toString().toLowerCase();
sb.append(String.format("\n" +
indent + "public static string %sMetaAttribute(MetaAttribute metaAttribute)\n" +
indent + "{\n" +
indent + INDENT + "switch (metaAttribute)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "case MetaAttribute.Epoch: return \"%s\";\n" +
indent + INDENT + INDENT + "case MetaAttribute.TimeUnit: return \"%s\";\n" +
indent + INDENT + INDENT + "case MetaAttribute.SemanticType: return \"%s\";\n" +
indent + INDENT + INDENT + "case MetaAttribute.Presence: return \"%s\";\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "return \"\";\n" +
indent + "}\n",
toUpperFirstChar(token.name()),
epoch,
timeUnit,
semanticType,
presence));
}
private CharSequence generateEnumFieldNotPresentCondition(
final int sinceVersion,
final String enumName,
final String indent)
{
if (0 == sinceVersion)
{
return "";
}
return String.format(
indent + INDENT + INDENT + "if (_actingVersion < %d) return %s.NULL_VALUE;\n\n",
sinceVersion,
enumName);
}
private CharSequence generateEnumProperty(
final String propertyName,
final Token fieldToken,
final Token typeToken,
final String indent)
{
final String enumName = formatClassName(typeToken.applicableTypeName());
final String typePrefix = toUpperFirstChar(typeToken.encoding().primitiveType().primitiveName());
final String enumUnderlyingType = cSharpTypeName(typeToken.encoding().primitiveType());
final int offset = typeToken.offset();
final ByteOrder byteOrder = typeToken.encoding().byteOrder();
final String byteOrderStr = generateByteOrder(byteOrder, typeToken.encoding().primitiveType().size());
if (fieldToken.isConstantEncoding())
{
final String constValue = fieldToken.encoding().constValue().toString();
return String.format("\n" +
"%1$s" +
indent + INDENT + "public %2$s %3$s\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "get\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "return %4$s;\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n\n",
generateDocumentation(indent + INDENT, fieldToken),
enumName,
toUpperFirstChar(propertyName),
constValue);
}
else
{
return String.format("\n" +
"%1$s" +
indent + INDENT + "public %2$s %3$s\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "get\n" +
indent + INDENT + INDENT + "{\n" +
"%4$s" +
indent + INDENT + INDENT + INDENT + "return (%5$s)_buffer.%6$sGet%8$s(_offset + %7$d);\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + INDENT + "set\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "_buffer.%6$sPut%8$s(_offset + %7$d, (%9$s)value);\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n\n",
generateDocumentation(indent + INDENT, fieldToken),
enumName,
toUpperFirstChar(propertyName),
generateEnumFieldNotPresentCondition(fieldToken.version(), enumName, indent),
enumName,
typePrefix,
offset,
byteOrderStr,
enumUnderlyingType);
}
}
private String generateBitSetProperty(
final String propertyName, final Token fieldToken, final Token typeToken, final String indent)
{
final String bitSetName = formatClassName(typeToken.applicableTypeName());
final int offset = typeToken.offset();
final String typePrefix = toUpperFirstChar(typeToken.encoding().primitiveType().primitiveName());
final ByteOrder byteOrder = typeToken.encoding().byteOrder();
final String byteOrderStr = generateByteOrder(byteOrder, typeToken.encoding().primitiveType().size());
final String typeName = cSharpTypeName(typeToken.encoding().primitiveType());
return String.format("\n" +
"%1$s" +
indent + INDENT + "public %2$s %3$s\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "get\n" +
indent + INDENT + INDENT + "{\n" +
"%4$s" +
indent + INDENT + INDENT + INDENT + "return (%5$s)_buffer.%6$sGet%8$s(_offset + %7$d);\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + INDENT + "set\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "_buffer.%6$sPut%8$s(_offset + %7$d, (%9$s)value);\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n",
generateDocumentation(indent + INDENT, fieldToken),
bitSetName,
toUpperFirstChar(propertyName),
generateBitSetNotPresentCondition(fieldToken.version(), indent, bitSetName),
bitSetName,
typePrefix,
offset,
byteOrderStr,
typeName);
}
private Object generateCompositeProperty(
final String propertyName, final Token fieldToken, final Token typeToken, final String indent)
{
final String compositeName = CSharpUtil.formatClassName(typeToken.applicableTypeName());
final int offset = typeToken.offset();
final StringBuilder sb = new StringBuilder();
sb.append(String.format("\n" +
indent + INDENT + "private readonly %1$s _%2$s = new %3$s();\n",
compositeName,
toLowerFirstChar(propertyName),
compositeName));
sb.append(String.format("\n" +
"%1$s" +
indent + INDENT + "public %2$s %3$s\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "get\n" +
indent + INDENT + INDENT + "{\n" +
"%4$s" +
indent + INDENT + INDENT + INDENT + "_%5$s.Wrap(_buffer, _offset + %6$d, _actingVersion);\n" +
indent + INDENT + INDENT + INDENT + "return _%5$s;\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n",
generateDocumentation(indent + INDENT, fieldToken),
compositeName,
toUpperFirstChar(propertyName),
generateTypeFieldNotPresentCondition(fieldToken.version(), indent),
toLowerFirstChar(propertyName),
offset));
return sb;
}
private void generateSinceActingDeprecated(
final StringBuilder sb,
final String indent,
final String propertyName,
final Token token)
{
sb.append(String.format(
indent + "public const int %1$sSinceVersion = %2$d;\n" +
indent + "public const int %1$sDeprecated = %3$d;\n" +
indent + "public bool %1$sInActingVersion()\n" +
indent + "{\n" +
indent + INDENT + "return _actingVersion >= %1$sSinceVersion;\n" +
indent + "}\n",
propertyName,
token.version(),
token.deprecated()));
}
private String generateByteOrder(final ByteOrder byteOrder, final int primitiveTypeSize)
{
if (primitiveTypeSize == 1)
{
return "";
}
if ("BIG_ENDIAN".equals(byteOrder.toString()))
{
return "BigEndian";
}
return "LittleEndian";
}
private String generateLiteral(final PrimitiveType type, final String value)
{
String literal = "";
final String castType = cSharpTypeName(type);
switch (type)
{
case CHAR:
case UINT8:
case INT8:
case INT16:
case UINT16:
literal = "(" + castType + ")" + value;
break;
case INT32:
literal = value;
break;
case UINT32:
literal = value + "U";
break;
case FLOAT:
if (value.endsWith("NaN"))
{
literal = "float.NaN";
}
else
{
literal = value + "f";
}
break;
case UINT64:
literal = "0x" + Long.toHexString(Long.parseLong(value)) + "UL";
break;
case INT64:
literal = value + "L";
break;
case DOUBLE:
if (value.endsWith("NaN"))
{
literal = "double.NaN";
}
else
{
literal = value + "d";
}
break;
}
return literal;
}
}
| sbe-tool/src/main/java/uk/co/real_logic/sbe/generation/csharp/CSharpGenerator.java | /*
* Copyright 2013-2020 Real Logic Limited.
* Copyright (C) 2017 MarketFactory, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.real_logic.sbe.generation.csharp;
import uk.co.real_logic.sbe.PrimitiveType;
import uk.co.real_logic.sbe.PrimitiveValue;
import uk.co.real_logic.sbe.generation.CodeGenerator;
import org.agrona.generation.OutputManager;
import uk.co.real_logic.sbe.generation.Generators;
import uk.co.real_logic.sbe.ir.*;
import org.agrona.Verify;
import java.io.IOException;
import java.io.Writer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.List;
import static uk.co.real_logic.sbe.generation.csharp.CSharpUtil.*;
import static uk.co.real_logic.sbe.ir.GenerationUtil.collectVarData;
import static uk.co.real_logic.sbe.ir.GenerationUtil.collectGroups;
import static uk.co.real_logic.sbe.ir.GenerationUtil.collectFields;
@SuppressWarnings("MethodLength")
public class CSharpGenerator implements CodeGenerator
{
private static final String META_ATTRIBUTE_ENUM = "MetaAttribute";
private static final String INDENT = " ";
private static final String BASE_INDENT = INDENT;
private final Ir ir;
private final OutputManager outputManager;
public CSharpGenerator(final Ir ir, final OutputManager outputManager)
{
Verify.notNull(ir, "ir");
Verify.notNull(outputManager, "outputManager");
this.ir = ir;
this.outputManager = outputManager;
}
public void generateMessageHeaderStub() throws IOException
{
generateComposite(ir.headerStructure().tokens());
}
public void generateTypeStubs() throws IOException
{
generateMetaAttributeEnum();
for (final List<Token> tokens : ir.types())
{
switch (tokens.get(0).signal())
{
case BEGIN_ENUM:
generateEnum(tokens);
break;
case BEGIN_SET:
generateBitSet(tokens);
break;
case BEGIN_COMPOSITE:
generateComposite(tokens);
break;
}
}
}
public void generate() throws IOException
{
generateMessageHeaderStub();
generateTypeStubs();
for (final List<Token> tokens : ir.messages())
{
final Token msgToken = tokens.get(0);
final String className = formatClassName(msgToken.name());
try (Writer out = outputManager.createOutput(className))
{
out.append(generateFileHeader(ir.applicableNamespace()));
out.append(generateDocumentation(BASE_INDENT, msgToken));
out.append(generateClassDeclaration(className));
out.append(generateMessageFlyweightCode(className, msgToken, BASE_INDENT));
final List<Token> messageBody = tokens.subList(1, tokens.size() - 1);
int offset = 0;
final List<Token> fields = new ArrayList<>();
offset = collectFields(messageBody, offset, fields);
out.append(generateFields(fields, BASE_INDENT));
final List<Token> groups = new ArrayList<>();
offset = collectGroups(messageBody, offset, groups);
final StringBuilder sb = new StringBuilder();
generateGroups(sb, className, groups, BASE_INDENT);
out.append(sb);
final List<Token> varData = new ArrayList<>();
collectVarData(messageBody, offset, varData);
out.append(generateVarData(varData, BASE_INDENT + INDENT));
out.append(INDENT + "}\n");
out.append("}\n");
}
}
}
private void generateGroups(
final StringBuilder sb,
final String parentMessageClassName,
final List<Token> tokens,
final String indent)
{
for (int i = 0, size = tokens.size(); i < size; i++)
{
final Token groupToken = tokens.get(i);
if (groupToken.signal() != Signal.BEGIN_GROUP)
{
throw new IllegalStateException("tokens must begin with BEGIN_GROUP: token=" + groupToken);
}
final String groupName = groupToken.name();
sb.append(generateGroupProperty(groupName, groupToken, indent + INDENT));
generateGroupClassHeader(sb, groupName, parentMessageClassName, tokens, i, indent + INDENT);
i++;
i += tokens.get(i).componentTokenCount();
final List<Token> fields = new ArrayList<>();
i = collectFields(tokens, i, fields);
sb.append(generateFields(fields, indent + INDENT));
final List<Token> groups = new ArrayList<>();
i = collectGroups(tokens, i, groups);
generateGroups(sb, parentMessageClassName, groups, indent + INDENT);
final List<Token> varData = new ArrayList<>();
i = collectVarData(tokens, i, varData);
sb.append(generateVarData(varData, indent + INDENT + INDENT));
sb.append(indent).append(INDENT + "}\n");
}
}
private void generateGroupClassHeader(
final StringBuilder sb,
final String groupName,
final String parentMessageClassName,
final List<Token> tokens,
final int index,
final String indent)
{
final String dimensionsClassName = formatClassName(tokens.get(index + 1).name());
final int dimensionHeaderLength = tokens.get(index + 1).encodedLength();
sb.append(String.format("\n" +
"%1$s" +
indent + "public sealed partial class %2$sGroup\n" +
indent + "{\n" +
indent + INDENT + "private readonly %3$s _dimensions = new %3$s();\n" +
indent + INDENT + "private %4$s _parentMessage;\n" +
indent + INDENT + "private DirectBuffer _buffer;\n" +
indent + INDENT + "private int _blockLength;\n" +
indent + INDENT + "private int _actingVersion;\n" +
indent + INDENT + "private int _count;\n" +
indent + INDENT + "private int _index;\n" +
indent + INDENT + "private int _offset;\n",
generateDocumentation(indent, tokens.get(index)),
formatClassName(groupName),
dimensionsClassName,
parentMessageClassName));
final Token numInGroupToken = Generators.findFirst("numInGroup", tokens, index);
final boolean isIntCastSafe = isRepresentableByInt32(numInGroupToken.encoding());
if (!isIntCastSafe)
{
throw new IllegalArgumentException(String.format(
"%s.numInGroup - cannot be represented safely by an int. Please constrain the maxValue.",
groupName));
}
sb.append(String.format("\n" +
indent + INDENT + "public void WrapForDecode(%s parentMessage, DirectBuffer buffer, int actingVersion)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_parentMessage = parentMessage;\n" +
indent + INDENT + INDENT + "_buffer = buffer;\n" +
indent + INDENT + INDENT + "_dimensions.Wrap(buffer, parentMessage.Limit, actingVersion);\n" +
indent + INDENT + INDENT + "_blockLength = _dimensions.BlockLength;\n" +
indent + INDENT + INDENT + "_count = (int) _dimensions.NumInGroup;\n" + // cast safety checked above
indent + INDENT + INDENT + "_actingVersion = actingVersion;\n" +
indent + INDENT + INDENT + "_index = 0;\n" +
indent + INDENT + INDENT + "_parentMessage.Limit = parentMessage.Limit + SbeHeaderSize;\n" +
indent + INDENT + "}\n",
parentMessageClassName));
final int blockLength = tokens.get(index).encodedLength();
final String typeForBlockLength = cSharpTypeName(tokens.get(index + 2).encoding().primitiveType());
final String typeForNumInGroup = cSharpTypeName(numInGroupToken.encoding().primitiveType());
final String throwCondition = numInGroupToken.encoding().applicableMinValue().longValue() == 0 ?
"if ((uint) count > %3$d)\n" :
"if (count < %2$d || count > %3$d)\n";
sb.append(String.format("\n" +
indent + INDENT + "public void WrapForEncode(%1$s parentMessage, DirectBuffer buffer, int count)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + throwCondition +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "ThrowHelper.ThrowCountOutOfRangeException(count);\n" +
indent + INDENT + INDENT + "}\n\n" +
indent + INDENT + INDENT + "_parentMessage = parentMessage;\n" +
indent + INDENT + INDENT + "_buffer = buffer;\n" +
indent + INDENT + INDENT + "_dimensions.Wrap(buffer, parentMessage.Limit, _actingVersion);\n" +
indent + INDENT + INDENT + "_dimensions.BlockLength = SbeBlockLength;\n" +
indent + INDENT + INDENT + "_dimensions.NumInGroup = (%5$s) count;\n" +
indent + INDENT + INDENT + "_index = 0;\n" +
indent + INDENT + INDENT + "_count = count;\n" +
indent + INDENT + INDENT + "_blockLength = SbeBlockLength;\n" +
indent + INDENT + INDENT + "_actingVersion = SchemaVersion;\n" +
indent + INDENT + INDENT + "parentMessage.Limit = parentMessage.Limit + SbeHeaderSize;\n" +
indent + INDENT + "}\n",
parentMessageClassName,
numInGroupToken.encoding().applicableMinValue().longValue(),
numInGroupToken.encoding().applicableMaxValue().longValue(),
typeForBlockLength,
typeForNumInGroup));
sb.append(String.format("\n" +
indent + INDENT + "public const int SbeBlockLength = %d;\n" +
indent + INDENT + "public const int SbeHeaderSize = %d;\n",
blockLength,
dimensionHeaderLength));
generateGroupEnumerator(sb, groupName, typeForNumInGroup, indent);
}
private void generateGroupEnumerator(
final StringBuilder sb,
final String groupName,
final String typeForNumInGroup,
final String indent)
{
sb.append(
indent + INDENT + "public int ActingBlockLength { get { return _blockLength; } }\n\n" +
indent + INDENT + "public int Count { get { return _count; } }\n\n" +
indent + INDENT + "public bool HasNext { get { return _index < _count; } }\n");
sb.append(String.format("\n" +
indent + INDENT + "public int ResetCountToIndex()\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_count = _index;\n" +
indent + INDENT + INDENT + "_dimensions.NumInGroup = (%s) _count;\n\n" +
indent + INDENT + INDENT + "return _count;\n" +
indent + INDENT + "}\n",
typeForNumInGroup));
sb.append(String.format("\n" +
indent + INDENT + "public %sGroup Next()\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "if (_index >= _count)\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "ThrowHelper.ThrowInvalidOperationException();\n" +
indent + INDENT + INDENT + "}\n\n" +
indent + INDENT + INDENT + "_offset = _parentMessage.Limit;\n" +
indent + INDENT + INDENT + "_parentMessage.Limit = _offset + _blockLength;\n" +
indent + INDENT + INDENT + "++_index;\n\n" +
indent + INDENT + INDENT + "return this;\n" +
indent + INDENT + "}\n",
formatClassName(groupName)));
sb.append("\n" +
indent + INDENT + "public System.Collections.IEnumerator GetEnumerator()\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "while (this.HasNext)\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "yield return this.Next();\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n");
}
private boolean isRepresentableByInt32(final Encoding encoding)
{
// These min and max values are the same in .NET
return encoding.applicableMinValue().longValue() >= Integer.MIN_VALUE &&
encoding.applicableMaxValue().longValue() <= Integer.MAX_VALUE;
}
private CharSequence generateGroupProperty(final String groupName, final Token token, final String indent)
{
final StringBuilder sb = new StringBuilder();
final String className = CSharpUtil.formatClassName(groupName);
sb.append(String.format("\n" +
indent + "private readonly %sGroup _%s = new %sGroup();\n",
className,
toLowerFirstChar(groupName),
className));
sb.append(String.format("\n" +
indent + "public const long %sId = %d;\n",
toUpperFirstChar(groupName),
token.id()));
generateSinceActingDeprecated(sb, indent, toUpperFirstChar(groupName), token);
sb.append(String.format("\n" +
"%1$s" +
indent + "public %2$sGroup %3$s\n" +
indent + "{\n" +
indent + INDENT + "get\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_%4$s.WrapForDecode(_parentMessage, _buffer, _actingVersion);\n" +
indent + INDENT + INDENT + "return _%4$s;\n" +
indent + INDENT + "}\n" +
indent + "}\n",
generateDocumentation(indent, token),
className,
toUpperFirstChar(groupName),
toLowerFirstChar(groupName)));
sb.append(String.format("\n" +
indent + "public %1$sGroup %2$sCount(int count)\n" +
indent + "{\n" +
indent + INDENT + "_%3$s.WrapForEncode(_parentMessage, _buffer, count);\n" +
indent + INDENT + "return _%3$s;\n" +
indent + "}\n",
className,
toUpperFirstChar(groupName),
toLowerFirstChar(groupName)));
return sb;
}
private CharSequence generateVarData(final List<Token> tokens, final String indent)
{
final StringBuilder sb = new StringBuilder();
for (int i = 0, size = tokens.size(); i < size; i++)
{
final Token token = tokens.get(i);
if (token.signal() == Signal.BEGIN_VAR_DATA)
{
generateFieldIdMethod(sb, token, indent);
generateSinceActingDeprecated(sb, indent, CSharpUtil.formatPropertyName(token.name()), token);
generateOffsetMethod(sb, token, indent);
final Token varDataToken = Generators.findFirst("varData", tokens, i);
final String characterEncoding = varDataToken.encoding().characterEncoding();
generateCharacterEncodingMethod(sb, token.name(), characterEncoding, indent);
generateFieldMetaAttributeMethod(sb, token, indent);
final String propertyName = toUpperFirstChar(token.name());
final Token lengthToken = Generators.findFirst("length", tokens, i);
final int sizeOfLengthField = lengthToken.encodedLength();
final Encoding lengthEncoding = lengthToken.encoding();
final String lengthCSharpType = cSharpTypeName(lengthEncoding.primitiveType());
final String lengthTypePrefix = toUpperFirstChar(lengthEncoding.primitiveType().primitiveName());
final ByteOrder byteOrder = lengthEncoding.byteOrder();
final String byteOrderStr = generateByteOrder(byteOrder, lengthEncoding.primitiveType().size());
sb.append(String.format("\n" +
indent + "public const int %sHeaderSize = %d;\n",
propertyName,
sizeOfLengthField));
sb.append(String.format(indent + "\n" +
indent + "public int %1$sLength()\n" +
indent + "{\n" +
indent + INDENT + "_buffer.CheckLimit(_parentMessage.Limit + %2$d);\n" +
indent + INDENT + "return (int)_buffer.%3$sGet%4$s(_parentMessage.Limit);\n" +
indent + "}\n",
propertyName,
sizeOfLengthField,
lengthTypePrefix,
byteOrderStr));
sb.append(String.format("\n" +
indent + "public int Get%1$s(byte[] dst, int dstOffset, int length) =>\n" +
indent + INDENT + "Get%1$s(new Span<byte>(dst, dstOffset, length));\n",
propertyName));
sb.append(String.format("\n" +
indent + "public int Get%1$s(Span<byte> dst)\n" +
indent + "{\n" +
"%2$s" +
indent + INDENT + "const int sizeOfLengthField = %3$d;\n" +
indent + INDENT + "int limit = _parentMessage.Limit;\n" +
indent + INDENT + "_buffer.CheckLimit(limit + sizeOfLengthField);\n" +
indent + INDENT + "int dataLength = (int)_buffer.%4$sGet%5$s(limit);\n" +
indent + INDENT + "int bytesCopied = Math.Min(dst.Length, dataLength);\n" +
indent + INDENT + "_parentMessage.Limit = limit + sizeOfLengthField + dataLength;\n" +
indent + INDENT + "_buffer.GetBytes(limit + sizeOfLengthField, dst.Slice(0, bytesCopied));\n\n" +
indent + INDENT + "return bytesCopied;\n" +
indent + "}\n",
propertyName,
generateArrayFieldNotPresentCondition(token.version(), indent),
sizeOfLengthField,
lengthTypePrefix,
byteOrderStr));
sb.append(String.format(indent + "\n" +
indent + "// Allocates and returns a new byte array\n" +
indent + "public byte[] Get%1$sBytes()\n" +
indent + "{\n" +
indent + INDENT + "const int sizeOfLengthField = %2$d;\n" +
indent + INDENT + "int limit = _parentMessage.Limit;\n" +
indent + INDENT + "_buffer.CheckLimit(limit + sizeOfLengthField);\n" +
indent + INDENT + "int dataLength = (int)_buffer.%3$sGet%4$s(limit);\n" +
indent + INDENT + "byte[] data = new byte[dataLength];\n" +
indent + INDENT + "_parentMessage.Limit = limit + sizeOfLengthField + dataLength;\n" +
indent + INDENT + "_buffer.GetBytes(limit + sizeOfLengthField, data);\n\n" +
indent + INDENT + "return data;\n" +
indent + "}\n",
propertyName,
sizeOfLengthField,
lengthTypePrefix,
byteOrderStr));
sb.append(String.format("\n" +
indent + "public int Set%1$s(byte[] src, int srcOffset, int length) =>\n" +
indent + INDENT + "Set%1$s(new ReadOnlySpan<byte>(src, srcOffset, length));\n",
propertyName));
sb.append(String.format("\n" +
indent + "public int Set%1$s(ReadOnlySpan<byte> src)\n" +
indent + "{\n" +
indent + INDENT + "const int sizeOfLengthField = %2$d;\n" +
indent + INDENT + "int limit = _parentMessage.Limit;\n" +
indent + INDENT + "_parentMessage.Limit = limit + sizeOfLengthField + src.Length;\n" +
indent + INDENT + "_buffer.%3$sPut%5$s(limit, (%4$s)src.Length);\n" +
indent + INDENT + "_buffer.SetBytes(limit + sizeOfLengthField, src);\n\n" +
indent + INDENT + "return src.Length;\n" +
indent + "}\n",
propertyName,
sizeOfLengthField,
lengthTypePrefix,
lengthCSharpType,
byteOrderStr));
}
}
return sb;
}
private void generateBitSet(final List<Token> tokens) throws IOException
{
final Token enumToken = tokens.get(0);
final String enumName = CSharpUtil.formatClassName(enumToken.applicableTypeName());
try (Writer out = outputManager.createOutput(enumName))
{
out.append(generateFileHeader(ir.applicableNamespace()));
out.append(generateDocumentation(INDENT, enumToken));
final String enumPrimitiveType = cSharpTypeName(enumToken.encoding().primitiveType());
out.append(generateEnumDeclaration(enumName, enumPrimitiveType, true));
out.append(generateChoices(tokens.subList(1, tokens.size() - 1)));
out.append(INDENT + "}\n");
out.append("}\n");
}
}
private void generateEnum(final List<Token> tokens) throws IOException
{
final Token enumToken = tokens.get(0);
final String enumName = CSharpUtil.formatClassName(enumToken.applicableTypeName());
try (Writer out = outputManager.createOutput(enumName))
{
out.append(generateFileHeader(ir.applicableNamespace()));
out.append(generateDocumentation(INDENT, enumToken));
final String enumPrimitiveType = cSharpTypeName(enumToken.encoding().primitiveType());
out.append(generateEnumDeclaration(enumName, enumPrimitiveType, false));
out.append(generateEnumValues(tokens.subList(1, tokens.size() - 1), enumToken));
out.append(INDENT + "}\n");
out.append("}\n");
}
}
private void generateComposite(final List<Token> tokens) throws IOException
{
final String compositeName = CSharpUtil.formatClassName(tokens.get(0).applicableTypeName());
try (Writer out = outputManager.createOutput(compositeName))
{
out.append(generateFileHeader(ir.applicableNamespace()));
out.append(generateDocumentation(INDENT, tokens.get(0)));
out.append(generateClassDeclaration(compositeName));
out.append(generateFixedFlyweightCode(tokens.get(0).encodedLength()));
out.append(generateCompositePropertyElements(tokens.subList(1, tokens.size() - 1), BASE_INDENT));
out.append(INDENT + "}\n");
out.append("}\n");
}
}
private CharSequence generateCompositePropertyElements(final List<Token> tokens, final String indent)
{
final StringBuilder sb = new StringBuilder();
for (int i = 0; i < tokens.size();)
{
final Token token = tokens.get(i);
final String propertyName = formatPropertyName(token.name());
// FIXME: do I need to pass classname down here for disambiguation
switch (token.signal())
{
case ENCODING:
sb.append(generatePrimitiveProperty(propertyName, token, token, indent));
break;
case BEGIN_ENUM:
sb.append(generateEnumProperty(propertyName, token, token, indent));
break;
case BEGIN_SET:
sb.append(generateBitSetProperty(propertyName, token, token, indent));
break;
case BEGIN_COMPOSITE:
sb.append(generateCompositeProperty(propertyName, token, token, indent));
break;
}
i += tokens.get(i).componentTokenCount();
}
return sb;
}
private CharSequence generateChoices(final List<Token> tokens)
{
final StringBuilder sb = new StringBuilder();
for (final Token token : tokens)
{
if (token.signal() == Signal.CHOICE)
{
final String choiceName = toUpperFirstChar(token.applicableTypeName());
final String choiceBitPosition = token.encoding().constValue().toString();
final int choiceValue = (int)Math.pow(2, Integer.parseInt(choiceBitPosition));
sb.append(String.format(INDENT + INDENT + "%s = %s,\n", choiceName, choiceValue));
}
}
return sb;
}
private CharSequence generateEnumValues(final List<Token> tokens, final Token encodingToken)
{
final StringBuilder sb = new StringBuilder();
final Encoding encoding = encodingToken.encoding();
for (final Token token : tokens)
{
sb.append(generateDocumentation(INDENT + INDENT, token))
.append(INDENT).append(INDENT).append(token.name()).append(" = ")
.append(token.encoding().constValue()).append(",\n");
}
final PrimitiveValue nullVal = encoding.applicableNullValue();
sb.append(INDENT).append(INDENT).append("NULL_VALUE = ").append(nullVal).append("\n");
return sb;
}
private CharSequence generateFileHeader(final String packageName)
{
String[] tokens = packageName.split("\\.");
final StringBuilder sb = new StringBuilder();
for (final String t : tokens)
{
sb.append(toUpperFirstChar(t)).append(".");
}
if (sb.length() > 0)
{
sb.setLength(sb.length() - 1);
}
tokens = sb.toString().split("-");
sb.setLength(0);
for (final String t : tokens)
{
sb.append(toUpperFirstChar(t));
}
return String.format(
"/* Generated SBE (Simple Binary Encoding) message codec */\n\n" +
"#pragma warning disable 1591 // disable warning on missing comments\n" +
"using System;\n" +
"using Org.SbeTool.Sbe.Dll;\n\n" +
"namespace %s\n" +
"{\n",
sb);
}
private CharSequence generateClassDeclaration(final String className)
{
return String.format(
INDENT + "public sealed partial class %s\n" +
INDENT + "{\n",
className);
}
public static String generateDocumentation(final String indent, final Token token)
{
final String description = token.description();
if (null == description || description.isEmpty())
{
return "";
}
return String.format(
indent + "/// <summary>\n" +
indent + "/// %s\n" +
indent + "/// </summary>\n",
description);
}
private void generateMetaAttributeEnum() throws IOException
{
try (Writer out = outputManager.createOutput(META_ATTRIBUTE_ENUM))
{
out.append(generateFileHeader(ir.applicableNamespace()));
out.append(
INDENT + "public enum MetaAttribute\n" +
INDENT + "{\n" +
INDENT + INDENT + "Epoch,\n" +
INDENT + INDENT + "TimeUnit,\n" +
INDENT + INDENT + "SemanticType,\n" +
INDENT + INDENT + "Presence\n" +
INDENT + "}\n" +
"}\n");
}
}
private CharSequence generateEnumDeclaration(
final String name,
final String primitiveType,
final boolean addFlagsAttribute)
{
String result = "";
if (addFlagsAttribute)
{
result += INDENT + "[Flags]\n";
}
result +=
INDENT + "public enum " + name + " : " + primitiveType + "\n" +
INDENT + "{\n";
return result;
}
private CharSequence generatePrimitiveProperty(
final String propertyName, final Token fieldToken, final Token typeToken, final String indent)
{
final StringBuilder sb = new StringBuilder();
sb.append(generatePrimitiveFieldMetaData(propertyName, typeToken, indent + INDENT));
if (typeToken.isConstantEncoding())
{
sb.append(generateConstPropertyMethods(propertyName, typeToken, indent));
}
else
{
sb.append(generatePrimitivePropertyMethods(propertyName, fieldToken, typeToken, indent));
}
return sb;
}
private CharSequence generatePrimitivePropertyMethods(
final String propertyName,
final Token fieldToken,
final Token typeToken,
final String indent)
{
final int arrayLength = typeToken.arrayLength();
if (arrayLength == 1)
{
return generateSingleValueProperty(propertyName, fieldToken, typeToken, indent + INDENT);
}
else if (arrayLength > 1)
{
return generateArrayProperty(propertyName, fieldToken, typeToken, indent + INDENT);
}
return "";
}
private CharSequence generatePrimitiveFieldMetaData(
final String propertyName,
final Token token,
final String indent)
{
final PrimitiveType primitiveType = token.encoding().primitiveType();
final String typeName = cSharpTypeName(primitiveType);
return String.format(
"\n" +
indent + "public const %1$s %2$sNullValue = %3$s;\n" +
indent + "public const %1$s %2$sMinValue = %4$s;\n" +
indent + "public const %1$s %2$sMaxValue = %5$s;\n",
typeName,
toUpperFirstChar(propertyName),
generateLiteral(primitiveType, token.encoding().applicableNullValue().toString()),
generateLiteral(primitiveType, token.encoding().applicableMinValue().toString()),
generateLiteral(primitiveType, token.encoding().applicableMaxValue().toString()));
}
private CharSequence generateSingleValueProperty(
final String propertyName,
final Token fieldToken,
final Token typeToken,
final String indent)
{
final String typeName = cSharpTypeName(typeToken.encoding().primitiveType());
final String typePrefix = toUpperFirstChar(typeToken.encoding().primitiveType().primitiveName());
final int offset = typeToken.offset();
final ByteOrder byteOrder = typeToken.encoding().byteOrder();
final String byteOrderStr = generateByteOrder(byteOrder, typeToken.encoding().primitiveType().size());
return String.format("\n" +
"%1$s" +
indent + "public %2$s %3$s\n" +
indent + "{\n" +
indent + INDENT + "get\n" +
indent + INDENT + "{\n" +
"%4$s" +
indent + INDENT + INDENT + "return _buffer.%5$sGet%7$s(_offset + %6$d);\n" +
indent + INDENT + "}\n" +
indent + INDENT + "set\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_buffer.%5$sPut%7$s(_offset + %6$d, value);\n" +
indent + INDENT + "}\n" +
indent + "}\n\n",
generateDocumentation(indent, fieldToken),
typeName,
toUpperFirstChar(propertyName),
generateFieldNotPresentCondition(fieldToken.version(), typeToken.encoding(), indent),
typePrefix,
offset,
byteOrderStr);
}
private CharSequence generateFieldNotPresentCondition(
final int sinceVersion,
final Encoding encoding,
final String indent)
{
if (0 == sinceVersion)
{
return "";
}
final String literal;
if (sinceVersion > 0)
{
literal = generateLiteral(encoding.primitiveType(), encoding.applicableNullValue().toString());
}
else
{
literal = "(byte)0";
}
return String.format(
indent + INDENT + INDENT + "if (_actingVersion < %1$d) return %2$s;\n\n",
sinceVersion,
literal);
}
private CharSequence generateArrayFieldNotPresentCondition(final int sinceVersion, final String indent)
{
if (0 == sinceVersion)
{
return "";
}
return String.format(
indent + INDENT + INDENT + "if (_actingVersion < %d) return 0;\n\n",
sinceVersion);
}
private CharSequence generateBitSetNotPresentCondition(
final int sinceVersion,
final String indent,
final String bitSetName)
{
if (0 == sinceVersion)
{
return "";
}
return String.format(
indent + INDENT + INDENT + INDENT + "if (_actingVersion < %1$d) return (%2$s)0;\n\n",
sinceVersion,
bitSetName);
}
private CharSequence generateTypeFieldNotPresentCondition(
final int sinceVersion,
final String indent)
{
if (0 == sinceVersion)
{
return "";
}
return String.format(
indent + INDENT + INDENT + "if (_actingVersion < %d) return null;\n\n",
sinceVersion);
}
private CharSequence generateArrayProperty(
final String propertyName,
final Token fieldToken,
final Token typeToken,
final String indent)
{
final String typeName = cSharpTypeName(typeToken.encoding().primitiveType());
final String typePrefix = toUpperFirstChar(typeToken.encoding().primitiveType().primitiveName());
final int offset = typeToken.offset();
final ByteOrder byteOrder = typeToken.encoding().byteOrder();
final String byteOrderStr = generateByteOrder(byteOrder, typeToken.encoding().primitiveType().size());
final int fieldLength = typeToken.arrayLength();
final int typeSize = typeToken.encoding().primitiveType().size();
final String propName = toUpperFirstChar(propertyName);
final StringBuilder sb = new StringBuilder();
sb.append(String.format("\n" +
indent + "public const int %sLength = %d;\n",
propName, fieldLength));
sb.append(String.format("\n" +
"%1$s" +
indent + "public %2$s Get%3$s(int index)\n" +
indent + "{\n" +
indent + INDENT + "if ((uint) index >= %4$d)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "ThrowHelper.ThrowIndexOutOfRangeException(index);\n" +
indent + INDENT + "}\n\n" +
"%5$s" +
indent + INDENT + "return _buffer.%6$sGet%9$s(_offset + %7$d + (index * %8$d));\n" +
indent + "}\n",
generateDocumentation(indent, fieldToken),
typeName, propName, fieldLength,
generateFieldNotPresentCondition(fieldToken.version(), typeToken.encoding(), indent),
typePrefix, offset, typeSize, byteOrderStr));
sb.append(String.format("\n" +
"%1$s" +
indent + "public void Set%2$s(int index, %3$s value)\n" +
indent + "{\n" +
indent + INDENT + "if ((uint) index >= %4$d)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "ThrowHelper.ThrowIndexOutOfRangeException(index);\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "_buffer.%5$sPut%8$s(_offset + %6$d + (index * %7$d), value);\n" +
indent + "}\n",
generateDocumentation(indent, fieldToken),
propName, typeName, fieldLength, typePrefix, offset, typeSize, byteOrderStr));
if (typeToken.encoding().primitiveType() == PrimitiveType.CHAR)
{
generateCharacterEncodingMethod(sb, propertyName, typeToken.encoding().characterEncoding(), indent);
sb.append(String.format("\n" +
indent + "public int Get%1$s(byte[] dst, int dstOffset)\n" +
indent + "{\n" +
indent + INDENT + "const int length = %2$d;\n" +
"%3$s" +
indent + INDENT + "return Get%1$s(new Span<byte>(dst, dstOffset, length));\n" +
indent + "}\n",
propName, fieldLength, generateArrayFieldNotPresentCondition(fieldToken.version(), indent), offset));
sb.append(String.format("\n" +
indent + "public int Get%1$s(Span<byte> dst)\n" +
indent + "{\n" +
indent + INDENT + "const int length = %2$d;\n" +
indent + INDENT + "if (dst.Length < length)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "ThrowHelper.ThrowWhenSpanLengthTooSmall(dst.Length);\n" +
indent + INDENT + "}\n\n" +
"%3$s" +
indent + INDENT + "_buffer.GetBytes(_offset + %4$d, dst);\n" +
indent + INDENT + "return length;\n" +
indent + "}\n",
propName, fieldLength, generateArrayFieldNotPresentCondition(fieldToken.version(), indent), offset));
sb.append(String.format("\n" +
indent + "public void Set%1$s(byte[] src, int srcOffset)\n" +
indent + "{\n" +
indent + INDENT + "Set%1$s(new ReadOnlySpan<byte>(src, srcOffset, src.Length - srcOffset));\n" +
indent + "}\n",
propName, fieldLength, offset));
sb.append(String.format("\n" +
indent + "public void Set%1$s(ReadOnlySpan<byte> src)\n" +
indent + "{\n" +
indent + INDENT + "const int length = %2$d;\n" +
indent + INDENT + "if (src.Length > length)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "ThrowHelper.ThrowWhenSpanLengthTooLarge(src.Length);\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "_buffer.SetBytes(_offset + %3$d, src);\n" +
indent + "}\n",
propName, fieldLength, offset));
}
return sb;
}
private void generateCharacterEncodingMethod(
final StringBuilder sb,
final String propertyName,
final String encoding,
final String indent)
{
sb.append(String.format("\n" +
indent + "public const string %sCharacterEncoding = \"%s\";\n\n",
formatPropertyName(propertyName),
encoding));
}
private CharSequence generateConstPropertyMethods(
final String propertyName,
final Token token,
final String indent)
{
if (token.encoding().primitiveType() != PrimitiveType.CHAR)
{
// ODE: we generate a property here because the constant could
// become a field in a newer version of the protocol
return String.format("\n" +
"%1s" +
indent + INDENT + "public %2$s %3$s { get { return %4$s; } }\n",
generateDocumentation(indent + INDENT, token),
cSharpTypeName(token.encoding().primitiveType()),
toUpperFirstChar(propertyName),
generateLiteral(token.encoding().primitiveType(), token.encoding().constValue().toString()));
}
final StringBuilder sb = new StringBuilder();
final String javaTypeName = cSharpTypeName(token.encoding().primitiveType());
final byte[] constantValue = token.encoding().constValue().byteArrayValue(token.encoding().primitiveType());
final CharSequence values = generateByteLiteralList(
token.encoding().constValue().byteArrayValue(token.encoding().primitiveType()));
sb.append(String.format(
"\n" +
indent + INDENT + "private static readonly byte[] _%1$sValue = { %2$s };\n",
propertyName,
values));
sb.append(String.format(
"\n" +
indent + INDENT + "public const int %1$sLength = %2$d;\n",
toUpperFirstChar(propertyName),
constantValue.length));
sb.append(String.format(
indent + INDENT + "public %1$s %2$s(int index)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "return _%3$sValue[index];\n" +
indent + INDENT + "}\n\n",
javaTypeName,
toUpperFirstChar(propertyName),
propertyName));
sb.append(String.format(
indent + INDENT + "public int Get%1$s(byte[] dst, int offset, int length)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "int bytesCopied = Math.Min(length, %2$d);\n" +
indent + INDENT + INDENT + "Array.Copy(_%3$sValue, 0, dst, offset, bytesCopied);\n" +
indent + INDENT + INDENT + "return bytesCopied;\n" +
indent + INDENT + "}\n",
toUpperFirstChar(propertyName),
constantValue.length,
propertyName));
return sb;
}
private CharSequence generateByteLiteralList(final byte[] bytes)
{
final StringBuilder values = new StringBuilder();
for (final byte b : bytes)
{
values.append(b).append(", ");
}
if (values.length() > 0)
{
values.setLength(values.length() - 2);
}
return values;
}
private CharSequence generateFixedFlyweightCode(final int size)
{
return String.format(
INDENT + INDENT + "private DirectBuffer _buffer;\n" +
INDENT + INDENT + "private int _offset;\n" +
INDENT + INDENT + "private int _actingVersion;\n\n" +
INDENT + INDENT + "public void Wrap(DirectBuffer buffer, int offset, int actingVersion)\n" +
INDENT + INDENT + "{\n" +
INDENT + INDENT + INDENT + "_offset = offset;\n" +
INDENT + INDENT + INDENT + "_actingVersion = actingVersion;\n" +
INDENT + INDENT + INDENT + "_buffer = buffer;\n" +
INDENT + INDENT + "}\n\n" +
INDENT + INDENT + "public const int Size = %d;\n",
size);
}
private CharSequence generateMessageFlyweightCode(final String className, final Token token, final String indent)
{
final String blockLengthType = cSharpTypeName(ir.headerStructure().blockLengthType());
final String templateIdType = cSharpTypeName(ir.headerStructure().templateIdType());
final String schemaIdType = cSharpTypeName(ir.headerStructure().schemaIdType());
final String schemaVersionType = cSharpTypeName(ir.headerStructure().schemaVersionType());
final String semanticType = token.encoding().semanticType() == null ? "" : token.encoding().semanticType();
return String.format(
indent + INDENT + "public const %1$s BlockLength = %2$s;\n" +
indent + INDENT + "public const %3$s TemplateId = %4$s;\n" +
indent + INDENT + "public const %5$s SchemaId = %6$s;\n" +
indent + INDENT + "public const %7$s SchemaVersion = %8$s;\n" +
indent + INDENT + "public const string SemanticType = \"%9$s\";\n\n" +
indent + INDENT + "private readonly %10$s _parentMessage;\n" +
indent + INDENT + "private DirectBuffer _buffer;\n" +
indent + INDENT + "private int _offset;\n" +
indent + INDENT + "private int _limit;\n" +
indent + INDENT + "private int _actingBlockLength;\n" +
indent + INDENT + "private int _actingVersion;\n" +
"\n" +
indent + INDENT + "public int Offset { get { return _offset; } }\n\n" +
indent + INDENT + "public %10$s()\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_parentMessage = this;\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "public void WrapForEncode(DirectBuffer buffer, int offset)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_buffer = buffer;\n" +
indent + INDENT + INDENT + "_offset = offset;\n" +
indent + INDENT + INDENT + "_actingBlockLength = BlockLength;\n" +
indent + INDENT + INDENT + "_actingVersion = SchemaVersion;\n" +
indent + INDENT + INDENT + "Limit = offset + _actingBlockLength;\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "public void WrapForEncodeAndApplyHeader(DirectBuffer buffer, int offset, " +
" MessageHeader headerEncoder)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "headerEncoder.Wrap(buffer, offset, SchemaVersion);\n" +
indent + INDENT + INDENT + "headerEncoder.BlockLength = BlockLength;\n" +
indent + INDENT + INDENT + "headerEncoder.TemplateId = TemplateId;\n" +
indent + INDENT + INDENT + "headerEncoder.SchemaId = SchemaId;\n" +
indent + INDENT + INDENT + "headerEncoder.Version = SchemaVersion;\n" +
indent + INDENT + INDENT + "\n" +
indent + INDENT + INDENT + "WrapForEncode(buffer, offset + MessageHeader.Size);\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "public void WrapForDecode(DirectBuffer buffer, int offset, " +
"int actingBlockLength, int actingVersion)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "_buffer = buffer;\n" +
indent + INDENT + INDENT + "_offset = offset;\n" +
indent + INDENT + INDENT + "_actingBlockLength = actingBlockLength;\n" +
indent + INDENT + INDENT + "_actingVersion = actingVersion;\n" +
indent + INDENT + INDENT + "Limit = offset + _actingBlockLength;\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "public int Size\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "get\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "return _limit - _offset;\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "public int Limit\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "get\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "return _limit;\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + INDENT + "set\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "_buffer.CheckLimit(value);\n" +
indent + INDENT + INDENT + INDENT + "_limit = value;\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n\n",
blockLengthType,
generateLiteral(ir.headerStructure().blockLengthType(), Integer.toString(token.encodedLength())),
templateIdType,
generateLiteral(ir.headerStructure().templateIdType(), Integer.toString(token.id())),
schemaIdType,
generateLiteral(ir.headerStructure().schemaIdType(), Integer.toString(ir.id())),
schemaVersionType,
generateLiteral(ir.headerStructure().schemaVersionType(), Integer.toString(ir.version())),
semanticType,
className);
}
private CharSequence generateFields(final List<Token> tokens, final String indent)
{
final StringBuilder sb = new StringBuilder();
for (int i = 0, size = tokens.size(); i < size; i++)
{
final Token signalToken = tokens.get(i);
if (signalToken.signal() == Signal.BEGIN_FIELD)
{
final Token encodingToken = tokens.get(i + 1);
final String propertyName = signalToken.name();
generateFieldIdMethod(sb, signalToken, indent + INDENT);
generateSinceActingDeprecated(
sb, indent, CSharpUtil.formatPropertyName(signalToken.name()), signalToken);
generateOffsetMethod(sb, signalToken, indent + INDENT);
generateFieldMetaAttributeMethod(sb, signalToken, indent + INDENT);
switch (encodingToken.signal())
{
case ENCODING:
sb.append(generatePrimitiveProperty(propertyName, signalToken, encodingToken, indent));
break;
case BEGIN_ENUM:
sb.append(generateEnumProperty(propertyName, signalToken, encodingToken, indent));
break;
case BEGIN_SET:
sb.append(generateBitSetProperty(propertyName, signalToken, encodingToken, indent));
break;
case BEGIN_COMPOSITE:
sb.append(generateCompositeProperty(propertyName, signalToken, encodingToken, indent));
break;
}
}
}
return sb;
}
private void generateFieldIdMethod(final StringBuilder sb, final Token token, final String indent)
{
sb.append(String.format("\n" +
indent + "public const int %sId = %d;\n",
CSharpUtil.formatPropertyName(token.name()),
token.id()));
}
private void generateOffsetMethod(final StringBuilder sb, final Token token, final String indent)
{
sb.append(String.format("\n" +
indent + "public const int %sOffset = %d;\n",
CSharpUtil.formatPropertyName(token.name()),
token.offset()));
}
private void generateFieldMetaAttributeMethod(final StringBuilder sb, final Token token, final String indent)
{
final Encoding encoding = token.encoding();
final String epoch = encoding.epoch() == null ? "" : encoding.epoch();
final String timeUnit = encoding.timeUnit() == null ? "" : encoding.timeUnit();
final String semanticType = encoding.semanticType() == null ? "" : encoding.semanticType();
final String presence = encoding.presence() == null ? "" : encoding.presence().toString().toLowerCase();
sb.append(String.format("\n" +
indent + "public static string %sMetaAttribute(MetaAttribute metaAttribute)\n" +
indent + "{\n" +
indent + INDENT + "switch (metaAttribute)\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "case MetaAttribute.Epoch: return \"%s\";\n" +
indent + INDENT + INDENT + "case MetaAttribute.TimeUnit: return \"%s\";\n" +
indent + INDENT + INDENT + "case MetaAttribute.SemanticType: return \"%s\";\n" +
indent + INDENT + INDENT + "case MetaAttribute.Presence: return \"%s\";\n" +
indent + INDENT + "}\n\n" +
indent + INDENT + "return \"\";\n" +
indent + "}\n",
toUpperFirstChar(token.name()),
epoch,
timeUnit,
semanticType,
presence));
}
private CharSequence generateEnumFieldNotPresentCondition(
final int sinceVersion,
final String enumName,
final String indent)
{
if (0 == sinceVersion)
{
return "";
}
return String.format(
indent + INDENT + INDENT + "if (_actingVersion < %d) return %s.NULL_VALUE;\n\n",
sinceVersion,
enumName);
}
private CharSequence generateEnumProperty(
final String propertyName,
final Token fieldToken,
final Token typeToken,
final String indent)
{
final String enumName = formatClassName(typeToken.applicableTypeName());
final String typePrefix = toUpperFirstChar(typeToken.encoding().primitiveType().primitiveName());
final String enumUnderlyingType = cSharpTypeName(typeToken.encoding().primitiveType());
final int offset = typeToken.offset();
final ByteOrder byteOrder = typeToken.encoding().byteOrder();
final String byteOrderStr = generateByteOrder(byteOrder, typeToken.encoding().primitiveType().size());
if (fieldToken.isConstantEncoding())
{
final String constValue = fieldToken.encoding().constValue().toString();
return String.format("\n" +
"%1$s" +
indent + INDENT + "public %2$s %3$s\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "get\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "return %4$s;\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n\n",
generateDocumentation(indent + INDENT, fieldToken),
enumName,
toUpperFirstChar(propertyName),
constValue);
}
else
{
return String.format("\n" +
"%1$s" +
indent + INDENT + "public %2$s %3$s\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "get\n" +
indent + INDENT + INDENT + "{\n" +
"%4$s" +
indent + INDENT + INDENT + INDENT + "return (%5$s)_buffer.%6$sGet%8$s(_offset + %7$d);\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + INDENT + "set\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "_buffer.%6$sPut%8$s(_offset + %7$d, (%9$s)value);\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n\n",
generateDocumentation(indent + INDENT, fieldToken),
enumName,
toUpperFirstChar(propertyName),
generateEnumFieldNotPresentCondition(fieldToken.version(), enumName, indent),
enumName,
typePrefix,
offset,
byteOrderStr,
enumUnderlyingType);
}
}
private String generateBitSetProperty(
final String propertyName, final Token fieldToken, final Token typeToken, final String indent)
{
final String bitSetName = formatClassName(typeToken.applicableTypeName());
final int offset = typeToken.offset();
final String typePrefix = toUpperFirstChar(typeToken.encoding().primitiveType().primitiveName());
final ByteOrder byteOrder = typeToken.encoding().byteOrder();
final String byteOrderStr = generateByteOrder(byteOrder, typeToken.encoding().primitiveType().size());
final String typeName = cSharpTypeName(typeToken.encoding().primitiveType());
return String.format("\n" +
"%1$s" +
indent + INDENT + "public %2$s %3$s\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "get\n" +
indent + INDENT + INDENT + "{\n" +
"%4$s" +
indent + INDENT + INDENT + INDENT + "return (%5$s)_buffer.%6$sGet%8$s(_offset + %7$d);\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + INDENT + "set\n" +
indent + INDENT + INDENT + "{\n" +
indent + INDENT + INDENT + INDENT + "_buffer.%6$sPut%8$s(_offset + %7$d, (%9$s)value);\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n",
generateDocumentation(indent + INDENT, fieldToken),
bitSetName,
toUpperFirstChar(propertyName),
generateBitSetNotPresentCondition(fieldToken.version(), indent, bitSetName),
bitSetName,
typePrefix,
offset,
byteOrderStr,
typeName);
}
private Object generateCompositeProperty(
final String propertyName, final Token fieldToken, final Token typeToken, final String indent)
{
final String compositeName = CSharpUtil.formatClassName(typeToken.applicableTypeName());
final int offset = typeToken.offset();
final StringBuilder sb = new StringBuilder();
sb.append(String.format("\n" +
indent + INDENT + "private readonly %1$s _%2$s = new %3$s();\n",
compositeName,
toLowerFirstChar(propertyName),
compositeName));
sb.append(String.format("\n" +
"%1$s" +
indent + INDENT + "public %2$s %3$s\n" +
indent + INDENT + "{\n" +
indent + INDENT + INDENT + "get\n" +
indent + INDENT + INDENT + "{\n" +
"%4$s" +
indent + INDENT + INDENT + INDENT + "_%5$s.Wrap(_buffer, _offset + %6$d, _actingVersion);\n" +
indent + INDENT + INDENT + INDENT + "return _%5$s;\n" +
indent + INDENT + INDENT + "}\n" +
indent + INDENT + "}\n",
generateDocumentation(indent + INDENT, fieldToken),
compositeName,
toUpperFirstChar(propertyName),
generateTypeFieldNotPresentCondition(fieldToken.version(), indent),
toLowerFirstChar(propertyName),
offset));
return sb;
}
private void generateSinceActingDeprecated(
final StringBuilder sb,
final String indent,
final String propertyName,
final Token token)
{
sb.append(String.format(
indent + "public const int %1$sSinceVersion = %2$d;\n" +
indent + "public const int %1$sDeprecated = %3$d;\n" +
indent + "public bool %1$sInActingVersion()\n" +
indent + "{\n" +
indent + INDENT + "return _actingVersion >= %1$sSinceVersion;\n" +
indent + "}\n",
propertyName,
token.version(),
token.deprecated()));
}
private String generateByteOrder(final ByteOrder byteOrder, final int primitiveTypeSize)
{
if (primitiveTypeSize == 1)
{
return "";
}
if ("BIG_ENDIAN".equals(byteOrder.toString()))
{
return "BigEndian";
}
return "LittleEndian";
}
private String generateLiteral(final PrimitiveType type, final String value)
{
String literal = "";
final String castType = cSharpTypeName(type);
switch (type)
{
case CHAR:
case UINT8:
case INT8:
case INT16:
case UINT16:
literal = "(" + castType + ")" + value;
break;
case INT32:
literal = value;
break;
case UINT32:
literal = value + "U";
break;
case FLOAT:
if (value.endsWith("NaN"))
{
literal = "float.NaN";
}
else
{
literal = value + "f";
}
break;
case UINT64:
literal = "0x" + Long.toHexString(Long.parseLong(value)) + "UL";
break;
case INT64:
literal = value + "L";
break;
case DOUBLE:
if (value.endsWith("NaN"))
{
literal = "double.NaN";
}
else
{
literal = value + "d";
}
break;
}
return literal;
}
}
| [Java] Add SchemaId and SchemaVersion to fix flyweight codecs for C#.
| sbe-tool/src/main/java/uk/co/real_logic/sbe/generation/csharp/CSharpGenerator.java | [Java] Add SchemaId and SchemaVersion to fix flyweight codecs for C#. |
|
Java | bsd-3-clause | 1bd739b1759bb52f2cde4680cd40edea987ade14 | 0 | Caleydo/caleydo,Caleydo/caleydo,Caleydo/caleydo,Caleydo/caleydo,Caleydo/caleydo,Caleydo/caleydo,Caleydo/caleydo | /*******************************************************************************
* Caleydo - visualization for molecular biology - http://caleydo.org
*
* Copyright(C) 2005, 2012 Graz University of Technology, Marc Streit, Alexander
* Lex, Christian Partl, Johannes Kepler University Linz </p>
*
* This program is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program. If not, see <http://www.gnu.org/licenses/>
*******************************************************************************/
package org.caleydo.core.event;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import org.caleydo.core.manager.GeneralManager;
/**
* utility class to hold a list of event listeners to register and remove them all in an convenient way
*
* @author Samuel Gratzl
*
*/
public class EventListenerManager {
private static final EventPublisher EVENT_PUBLISHER = GeneralManager.get().getEventPublisher();
private final Set<AEventListener<?>> listeners = new HashSet<>();
protected final IListenerOwner owner;
EventListenerManager(IListenerOwner owner) {
this.owner = owner;
}
public void register(Class<? extends AEvent> event, AEventListener<?> listener) {
assert listener.getHandler() == owner;
listeners.add(listener);
EVENT_PUBLISHER.addListener(event, listener);
}
/**
* filter all methods of the listener object for <code>
*
* @ListenTo void xxx(<? extends AEvent> event); </code>
*
* and register an event listener for calling this method
*
* @param listener
*/
public final void register(Object listener) {
register(listener, null);
}
/**
* filter all methods of the listener object for <code>
*
* @ListenTo void xxx(<? extends AEvent> event); </code>
*
* and register an event listener for calling this method
*
* @param listener
* @param dataDomainID
* if {@link ListenTo#restrictToDataDomain()} or {@link ListenTo#restrictExclusiveToDataDomain()} is used
* the dataDomainID to set
*/
public final void register(Object listener, String dataDomainID) {
Class<?> clazz = listener.getClass();
while (clazz != null) {
for (Method m : clazz.getDeclaredMethods()) {
if (!matches(m))
continue;
Class<? extends AEvent> event = m.getParameterTypes()[0].asSubclass(AEvent.class);
final ListenTo a = m.getAnnotation(ListenTo.class);
boolean toMe = a.sendToMe()
&& ADirectedEvent.class.isAssignableFrom(event);
final AnnotationBasedEventListener l = new AnnotationBasedEventListener(owner, listener, m, toMe);
if (dataDomainID != null && (a.restrictExclusiveToDataDomain() || a.restrictToDataDomain())) {
if (a.restrictExclusiveToDataDomain())
l.setExclusiveDataDomainID(dataDomainID);
else
l.setDataDomainID(dataDomainID);
}
register(event, l);
}
clazz = clazz.getSuperclass();
}
}
private boolean matches(Method m) {
return m.isAnnotationPresent(ListenTo.class) && m.getParameterTypes().length == 1
&& AEvent.class.isAssignableFrom(m.getParameterTypes()[0]) && m.getReturnType() == void.class;
}
/**
* unregister all registered listeners by this listener container
*/
public final void unregisterAll() {
for (AEventListener<?> listener : listeners) {
EVENT_PUBLISHER.removeListener(listener);
}
listeners.clear();
}
/**
* marker annotation that the method is an event listener, !no DataDomain specific things are supported
*
* @author Samuel Gratzl
*
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface ListenTo {
/**
* determines if the {@link ADirectedEvent} has as its receiver our current listener object
*
* @return
*/
boolean sendToMe() default false;
boolean restrictToDataDomain() default false;
boolean restrictExclusiveToDataDomain() default false;
}
private static class AnnotationBasedEventListener extends AEventListener<IListenerOwner> {
private final Method method;
private final Object listener;
private final boolean checkSendToListener;
public AnnotationBasedEventListener(IListenerOwner handler, Object listener, Method method,
boolean checkSendToHandler) {
this.method = method;
this.listener = listener;
this.checkSendToListener = checkSendToHandler;
this.setHandler(handler);
}
@Override
public void handleEvent(AEvent event) {
if (checkSendToListener
&& (!(event instanceof ADirectedEvent) || !((ADirectedEvent) event).sentTo(listener)))
return;
try {
method.setAccessible(true);
method.invoke(listener, event);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
e.printStackTrace();
System.err.println(e);
}
}
@Override
public int hashCode() {
return Objects.hash(method, listener);
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AnnotationBasedEventListener other = (AnnotationBasedEventListener) obj;
return Objects.equals(listener, other.listener) && Objects.equals(method, other.method);
}
}
} | org.caleydo.core/src/org/caleydo/core/event/EventListenerManager.java | /*******************************************************************************
* Caleydo - visualization for molecular biology - http://caleydo.org
*
* Copyright(C) 2005, 2012 Graz University of Technology, Marc Streit, Alexander
* Lex, Christian Partl, Johannes Kepler University Linz </p>
*
* This program is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program. If not, see <http://www.gnu.org/licenses/>
*******************************************************************************/
package org.caleydo.core.event;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import org.caleydo.core.manager.GeneralManager;
/**
* utility class to hold a list of event listeners to register and remove them all in an convenient way
*
* @author Samuel Gratzl
*
*/
public class EventListenerManager {
private static final EventPublisher EVENT_PUBLISHER = GeneralManager.get().getEventPublisher();
private final Set<AEventListener<?>> listeners = new HashSet<>();
protected final IListenerOwner owner;
EventListenerManager(IListenerOwner owner) {
this.owner = owner;
}
private void register(Class<? extends AEvent> event, AEventListener<?> listener) {
listeners.add(listener);
EVENT_PUBLISHER.addListener(event, listener);
}
/**
* filter all methods of the listener object for <code>
*
* @ListenTo void xxx(<? extends AEvent> event); </code>
*
* and register an event listener for calling this method
*
* @param listener
*/
public final void register(Object listener) {
register(listener, null);
}
/**
* filter all methods of the listener object for <code>
*
* @ListenTo void xxx(<? extends AEvent> event); </code>
*
* and register an event listener for calling this method
*
* @param listener
* @param dataDomainID
* if {@link ListenTo#restrictToDataDomain()} or {@link ListenTo#restrictExclusiveToDataDomain()} is used
* the dataDomainID to set
*/
public final void register(Object listener, String dataDomainID) {
Class<?> clazz = listener.getClass();
while (clazz != null) {
for (Method m : clazz.getDeclaredMethods()) {
if (!matches(m))
continue;
Class<? extends AEvent> event = m.getParameterTypes()[0].asSubclass(AEvent.class);
final ListenTo a = m.getAnnotation(ListenTo.class);
boolean toMe = a.sendToMe()
&& ADirectedEvent.class.isAssignableFrom(event);
final AnnotationBasedEventListener l = new AnnotationBasedEventListener(owner, listener, m, toMe);
if (dataDomainID != null && (a.restrictExclusiveToDataDomain() || a.restrictToDataDomain())) {
if (a.restrictExclusiveToDataDomain())
l.setExclusiveDataDomainID(dataDomainID);
else
l.setDataDomainID(dataDomainID);
}
register(event, l);
}
clazz = clazz.getSuperclass();
}
}
private boolean matches(Method m) {
return m.isAnnotationPresent(ListenTo.class) && m.getParameterTypes().length == 1
&& AEvent.class.isAssignableFrom(m.getParameterTypes()[0]) && m.getReturnType() == void.class;
}
/**
* unregister all registered listeners by this listener container
*/
public final void unregisterAll() {
for (AEventListener<?> listener : listeners) {
EVENT_PUBLISHER.removeListener(listener);
}
listeners.clear();
}
/**
* marker annotation that the method is an event listener, !no DataDomain specific things are supported
*
* @author Samuel Gratzl
*
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface ListenTo {
/**
* determines if the {@link ADirectedEvent} has as its receiver our current listener object
*
* @return
*/
boolean sendToMe() default false;
boolean restrictToDataDomain() default false;
boolean restrictExclusiveToDataDomain() default false;
}
private static class AnnotationBasedEventListener extends AEventListener<IListenerOwner> {
private final Method method;
private final Object listener;
private final boolean checkSendToListener;
public AnnotationBasedEventListener(IListenerOwner handler, Object listener, Method method,
boolean checkSendToHandler) {
this.method = method;
this.listener = listener;
this.checkSendToListener = checkSendToHandler;
this.setHandler(handler);
}
@Override
public void handleEvent(AEvent event) {
if (checkSendToListener
&& (!(event instanceof ADirectedEvent) || !((ADirectedEvent) event).sentTo(listener)))
return;
try {
method.setAccessible(true);
method.invoke(listener, event);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
e.printStackTrace();
System.err.println(e);
}
}
@Override
public int hashCode() {
return Objects.hash(method, listener);
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AnnotationBasedEventListener other = (AnnotationBasedEventListener) obj;
return Objects.equals(listener, other.listener) && Objects.equals(method, other.method);
}
}
} | bicluster: set X,L,Z via DVI
git-svn-id: 149221363d454b9399d51e0b24a857a738336ca8@5760 1f7349ae-fd9f-0d40-aeb8-9798e6c0fce3
| org.caleydo.core/src/org/caleydo/core/event/EventListenerManager.java | bicluster: set X,L,Z via DVI |
|
Java | bsd-3-clause | 173235ba2a8d82a9ed689f71ec0ff24b11f912cd | 0 | Beachbot330/Beachbot2014Java,Beachbot330/Beachbot2013Java,Beachbot330/Beachbot2013Java | // RobotBuilder Version: 0.0.2
//
// This file was generated by RobotBuilder. It contains sections of
// code that are automatically generated and assigned by robotbuilder.
// These sections will be updated in the future when you export to
// Java from RobotBuilder. Do not put any code or make any change in
// the blocks indicating autogenerated code or it will be lost on an
// update. Deleting the comments indicating the section will prevent
// it from being updated in th future.
package org.usfirst.frc330.Beachbot2013Java;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Preferences;
import edu.wpi.first.wpilibj.command.AutoSpreadsheet;
import edu.wpi.first.wpilibj.command.Command;
import edu.wpi.first.wpilibj.command.Scheduler;
import edu.wpi.first.wpilibj.livewindow.LiveWindow;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import org.usfirst.frc330.Beachbot2013Java.commands.*;
import org.usfirst.frc330.Beachbot2013Java.subsystems.*;
/*
* $Log: Robot.java,v $
* Revision 1.30 2013-03-30 02:32:05 jross
* make distances for actual, practice, red, and blue camera distances in vision table
*
* Revision 1.29 2013-03-28 03:49:59 jross
* remove shoot high
*
* Revision 1.28 2013-03-24 18:13:39 jross
* add action item
*
* Revision 1.27 2013-03-24 17:43:57 jross
* Disable output when disabled
*
* Revision 1.26 2013-03-21 02:07:43 echan
* changed no name to no check
*
* Revision 1.25 2013-03-20 05:05:09 dstation
* added autos launcg frisbee no name and arm variable shooting
*
* Revision 1.24 2013-03-18 06:53:48 jross
* add additional commands
*
* Revision 1.23 2013-03-17 20:26:34 jross
* count frisbees
*
* Revision 1.22 2013-03-16 18:00:48 echan
* added the readvisionfile to robot
*
* Revision 1.21 2013-03-15 02:50:16 echan
* added cvs log comments
*
*/
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Robot extends IterativeRobot {
Command autonomousCommand;
public static OI oi;
public static AutoSpreadsheet auto;
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DECLARATIONS
public static Chassis chassis;
public static FrisbeePickup frisbeePickup;
public static ShooterLow shooterLow;
public static Vision vision;
public static Arm arm;
public static LCD lCD;
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DECLARATIONS
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
private static boolean practicerobot;
public static boolean isPracticerobot() {
return practicerobot;
}
public void robotInit() {
if (!Preferences.getInstance().containsKey("PracticeRobot"))
{
Preferences.getInstance().putBoolean("PracticeRobot", false);
Preferences.getInstance().save();
}
practicerobot = Preferences.getInstance().getBoolean("PracticeRobot", false);
RobotMap.init();
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
chassis = new Chassis();
frisbeePickup = new FrisbeePickup();
shooterLow = new ShooterLow();
vision = new Vision();
arm = new Arm();
lCD = new LCD();
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
// This MUST be here. If the OI creates Commands (which it very likely
// will), constructing it during the construction of CommandBase (from
// which commands extend), subsystems are not guaranteed to be
// yet. Thus, their requires() statements may grab null pointers. Bad
// news. Don't move it.
oi = new OI();
//initialize AutoSpreadsheet. Each auto command must be added here
auto = new AutoSpreadsheet();
auto.readScripts();
// System.out.println("After Auto Read Scripts");
auto.addCommand(new TurnGyroAbs(0));
auto.addCommand(new TurnGyroRel(0));
auto.addCommand(new Wait(0));
auto.addCommand(new ShiftLow());
auto.addCommand(new ShiftHigh());
auto.addCommand(new DriveEncoder(0));
auto.addCommand(new DriveEncoderRel());
auto.addCommand(new DriveEncoderGyro(0,0));
auto.addCommand(new DriveEncoderGyroRel());
auto.addCommand(new DriveTime());
auto.addCommand(new DriveEncoderGyroRamp(0,0));
auto.addCommand(new DriveEncoderGyroRampRel());
auto.addCommand(new DriveWaypoint(0,0,0,0,false));
auto.addCommand(new TurnGyroWaypoint());
auto.addCommand(new ArmClimbing());
auto.addCommand(new ArmHighShooting());
auto.addCommand(new ArmLowShooting());
auto.addCommand(new ArmLowPickup());
auto.addCommand(new HoldArm());
auto.addCommand(new LaunchFrisbee());
// auto.addCommand(new MarsRock());
auto.addCommand(new PickupDown());
auto.addCommand(new PickupFrisbeesOff());
auto.addCommand(new PickupFrisbeesOn());
auto.addCommand(new PickupFrisbeesOnBatteryCompensated());
auto.addCommand(new PickupUp());
auto.addCommand(new ShootLow());
auto.addCommand(new ShootLowCommandGroup());
auto.addCommand(new ShootLowBangBang());
auto.addCommand(new TurnCamera());
auto.addCommand(new DriveWaypointBackward(0,0,0,0,true));
auto.addCommand(new TurnGyroWaypointBackward());
auto.addCommand(new AutoWait());
auto.addCommand(new TurnCameraIterative());
auto.addCommand(new setGyroComp());
auto.addCommand(new FullSpeedShootLow());
auto.addCommand(new StopShootLow());
auto.addCommand(new LaunchFrisbeeNoCheck());
auto.addCommand(new ArmVariableShooting());
// SmartDashboard.putData(Scheduler.getInstance());
}
public void autonomousInit() {
// schedule the autonomous command (example)
chassis.resetPosition();
autonomousCommand = auto.getSelected();
if (autonomousCommand != null) autonomousCommand.start();
}
/**
* This function is called periodically during autonomous
*/
public void autonomousPeriodic() {
chassis.calcPeriodic();
frisbeePickup.calcPeriodic();
Scheduler.getInstance().run();
}
public void teleopInit() {
// This makes sure that the autonomous stops running when
// teleop starts running. If you want the autonomous to
// continue until interrupted by another command, remove
// this line or comment it out.
if (autonomousCommand != null) autonomousCommand.cancel();
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
chassis.calcPeriodic();
frisbeePickup.calcPeriodic();
Scheduler.getInstance().run();
}
/**
* This function called periodically during test mode
*/
public void testPeriodic() {
LiveWindow.run();
}
public void disabledPeriodic()
{
chassis.calcPeriodic();
Scheduler.getInstance().run();
}
public void disabledInit()
{
// System.out.println("In Disabled Init");
//TODO turn off pickup
auto.readScripts();
SmartDashboard.putBoolean("BrightCamera", false);
Robot.vision.readVisionFile();
Robot.vision.getVisionPosition();
Robot.arm.disable();
Robot.chassis.stopDrive();
Robot.shooterLow.shoot(0);
}
}
| src/org/usfirst/frc330/Beachbot2013Java/Robot.java | // RobotBuilder Version: 0.0.2
//
// This file was generated by RobotBuilder. It contains sections of
// code that are automatically generated and assigned by robotbuilder.
// These sections will be updated in the future when you export to
// Java from RobotBuilder. Do not put any code or make any change in
// the blocks indicating autogenerated code or it will be lost on an
// update. Deleting the comments indicating the section will prevent
// it from being updated in th future.
package org.usfirst.frc330.Beachbot2013Java;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Preferences;
import edu.wpi.first.wpilibj.command.AutoSpreadsheet;
import edu.wpi.first.wpilibj.command.Command;
import edu.wpi.first.wpilibj.command.Scheduler;
import edu.wpi.first.wpilibj.livewindow.LiveWindow;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import org.usfirst.frc330.Beachbot2013Java.commands.*;
import org.usfirst.frc330.Beachbot2013Java.subsystems.*;
/*
* $Log: Robot.java,v $
* Revision 1.29 2013-03-28 03:49:59 jross
* remove shoot high
*
* Revision 1.28 2013-03-24 18:13:39 jross
* add action item
*
* Revision 1.27 2013-03-24 17:43:57 jross
* Disable output when disabled
*
* Revision 1.26 2013-03-21 02:07:43 echan
* changed no name to no check
*
* Revision 1.25 2013-03-20 05:05:09 dstation
* added autos launcg frisbee no name and arm variable shooting
*
* Revision 1.24 2013-03-18 06:53:48 jross
* add additional commands
*
* Revision 1.23 2013-03-17 20:26:34 jross
* count frisbees
*
* Revision 1.22 2013-03-16 18:00:48 echan
* added the readvisionfile to robot
*
* Revision 1.21 2013-03-15 02:50:16 echan
* added cvs log comments
*
*/
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Robot extends IterativeRobot {
Command autonomousCommand;
public static OI oi;
public static AutoSpreadsheet auto;
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DECLARATIONS
public static Chassis chassis;
public static FrisbeePickup frisbeePickup;
public static ShooterLow shooterLow;
public static Vision vision;
public static Arm arm;
public static LCD lCD;
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DECLARATIONS
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
private static boolean practicerobot;
public static boolean isPracticerobot() {
return practicerobot;
}
public void robotInit() {
if (!Preferences.getInstance().containsKey("PracticeRobot"))
{
Preferences.getInstance().putBoolean("PracticeRobot", false);
Preferences.getInstance().save();
}
practicerobot = Preferences.getInstance().getBoolean("PracticeRobot", false);
RobotMap.init();
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
chassis = new Chassis();
frisbeePickup = new FrisbeePickup();
shooterLow = new ShooterLow();
vision = new Vision();
arm = new Arm();
lCD = new LCD();
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
// This MUST be here. If the OI creates Commands (which it very likely
// will), constructing it during the construction of CommandBase (from
// which commands extend), subsystems are not guaranteed to be
// yet. Thus, their requires() statements may grab null pointers. Bad
// news. Don't move it.
oi = new OI();
//initialize AutoSpreadsheet. Each auto command must be added here
auto = new AutoSpreadsheet();
auto.readScripts();
// System.out.println("After Auto Read Scripts");
auto.addCommand(new TurnGyroAbs(0));
auto.addCommand(new TurnGyroRel(0));
auto.addCommand(new Wait(0));
auto.addCommand(new ShiftLow());
auto.addCommand(new ShiftHigh());
auto.addCommand(new DriveEncoder(0));
auto.addCommand(new DriveEncoderRel());
auto.addCommand(new DriveEncoderGyro(0,0));
auto.addCommand(new DriveEncoderGyroRel());
auto.addCommand(new DriveTime());
auto.addCommand(new DriveEncoderGyroRamp(0,0));
auto.addCommand(new DriveEncoderGyroRampRel());
auto.addCommand(new DriveWaypoint(0,0,0,0,false));
auto.addCommand(new TurnGyroWaypoint());
auto.addCommand(new ArmClimbing());
auto.addCommand(new ArmHighShooting());
auto.addCommand(new ArmLowShooting());
auto.addCommand(new ArmLowPickup());
auto.addCommand(new HoldArm());
auto.addCommand(new LaunchFrisbee());
// auto.addCommand(new MarsRock());
auto.addCommand(new PickupDown());
auto.addCommand(new PickupFrisbeesOff());
auto.addCommand(new PickupFrisbeesOnBatteryCompensated());
auto.addCommand(new PickupUp());
auto.addCommand(new ShootLow());
auto.addCommand(new ShootLowCommandGroup());
auto.addCommand(new TurnCamera());
auto.addCommand(new DriveWaypointBackward(0,0,0,0,true));
auto.addCommand(new TurnGyroWaypointBackward());
auto.addCommand(new AutoWait());
auto.addCommand(new TurnCameraIterative());
auto.addCommand(new setGyroComp());
auto.addCommand(new FullSpeedShootLow());
auto.addCommand(new StopShootLow());
auto.addCommand(new LaunchFrisbeeNoCheck());
auto.addCommand(new ArmVariableShooting());
// SmartDashboard.putData(Scheduler.getInstance());
}
public void autonomousInit() {
// schedule the autonomous command (example)
chassis.resetPosition();
autonomousCommand = auto.getSelected();
if (autonomousCommand != null) autonomousCommand.start();
}
/**
* This function is called periodically during autonomous
*/
public void autonomousPeriodic() {
chassis.calcPeriodic();
frisbeePickup.calcPeriodic();
Scheduler.getInstance().run();
}
public void teleopInit() {
// This makes sure that the autonomous stops running when
// teleop starts running. If you want the autonomous to
// continue until interrupted by another command, remove
// this line or comment it out.
if (autonomousCommand != null) autonomousCommand.cancel();
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
chassis.calcPeriodic();
frisbeePickup.calcPeriodic();
Scheduler.getInstance().run();
}
/**
* This function called periodically during test mode
*/
public void testPeriodic() {
LiveWindow.run();
}
public void disabledPeriodic()
{
chassis.calcPeriodic();
Scheduler.getInstance().run();
}
public void disabledInit()
{
// System.out.println("In Disabled Init");
//TODO turn off pickup
auto.readScripts();
SmartDashboard.putBoolean("BrightCamera", false);
Robot.vision.readVisionFile();
Robot.vision.getVisionPosition();
Robot.arm.disable();
Robot.chassis.stopDrive();
Robot.shooterLow.shoot(0);
}
}
| add shootLowBangBang to auto
| src/org/usfirst/frc330/Beachbot2013Java/Robot.java | add shootLowBangBang to auto |
|
Java | bsd-3-clause | 8d0c28446a6194f6ef33945a1814036e4c8c8414 | 0 | TestingTechnologies/Play-ITS-2015 | package com.testingtech.car2x.hmi;
import android.content.Context;
import android.graphics.Color;
import android.graphics.drawable.AnimationDrawable;
import android.os.AsyncTask;
import android.os.Build;
import android.speech.tts.TextToSpeech;
import android.widget.Button;
import android.widget.ProgressBar;
import android.widget.ScrollView;
import android.widget.TableLayout;
import android.widget.TextView;
import com.testingtech.car2x.hmi.messages.ProgressMessage;
import com.testingtech.car2x.hmi.messages.TestCaseCommand;
import com.testingtech.car2x.hmi.messages.ControlMessage;
import com.testingtech.car2x.hmi.messages.Message;
import com.testingtech.car2x.hmi.messages.TestCase;
import com.testingtech.car2x.hmi.messages.VerdictMessage;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketTimeoutException;
import java.util.Date;
public class SocketClient extends AsyncTask<Void, Message, Message> {
private Context context;
private TextView debugText, statusRunningText;
private ScrollView scrollview;
private ProgressBar progressBar;
private AnimationDrawable logoAnimation;
private Button btnStart, btnStop;
private TextToSpeech speech;
private int stageNum = 0;
private Socket mySocket = null;
public SocketClient(Context con, TextView tv, ScrollView sv, ProgressBar pb,
AnimationDrawable ad, TextView sr, Button start,
Button stop, TextToSpeech tts) {
this.context = con;
this.debugText = tv;
this.scrollview = sv;
this.logoAnimation = ad;
this.statusRunningText = sr;
this.progressBar = pb;
this.btnStart = start;
this.btnStop = stop;
this.speech = tts;
}
/**
* Initialize the test: reset the debug text, set the status text to loading, animate logo,
* disable start button, enable stop button.
*/
@Override
protected void onPreExecute(){
debugText.setText("");
debugText.setTextColor(Color.TRANSPARENT);
statusRunningText.setText(context.getString(R.string.textview_loading));
logoAnimation.start();
btnStart.setEnabled(false);
btnStop.setEnabled(true);
}
/**
* Start the test: Connect a Socket, send a start message
* @param params Contains nothing.
* @return The last received message from the socket.
*/
@Override
protected Message doInBackground(Void... params) {
ControlMessage controlMessage;
Message socketMessage;
// create a socket and try to connect it with a timeout of 2 seconds
mySocket = new Socket();
ObjectOutputStream oos;
ObjectInputStream ois;
try {
mySocket.connect(new InetSocketAddress("192.168.87.148", 30000), 2000);
oos = new ObjectOutputStream(mySocket.getOutputStream());
ois = new ObjectInputStream(mySocket.getInputStream());
}catch(SocketTimeoutException ste) {
handleError("Connection failed after timeout. Try again.");
return null;
}catch (IOException ioe) {
handleError("Connecting failed: " + ioe.getMessage());
return null;
}
statusRunningText.setText(context.getString(R.string.textview_running));
// define and send the start message
controlMessage = new ControlMessage(
TestCase.TC_VEHICLE_SPEED_OVER_50, // TODO send the right test case
new Date(),
TestCaseCommand.START
);
try {
oos.writeObject(controlMessage);
oos.flush();
}catch (IOException ioe) {
handleError("Sending the start message failed.");
return null;
}
try {
socketMessage = (Message) ois.readObject();
while (!(socketMessage instanceof VerdictMessage) && !isCancelled()) {
publishProgress(socketMessage);
socketMessage = (Message) ois.readObject();
}
}catch (IOException ioe) {
handleError("Receiving messages failed.");
return null;
}catch (ClassNotFoundException cnf) {
handleError("Message format was wrong.");
return null;
}
try{
if(isCancelled()){
controlMessage = new ControlMessage(
TestCase.TC_VEHICLE_SPEED_OVER_50, // TODO send the right test case
new Date(),
TestCaseCommand.STOP
);
oos.writeObject(controlMessage);
oos.flush();
}
} catch (IOException ioe){
handleError("Sending the stop message failed.");
}
return socketMessage;
}
/**
* Report an error message by displaying it on the GUI. Cancel the Thread.
* @param message The message which is displayed.
*/
private void handleError(String message){
// TODO change the asyncthread to a runnable and use runonuithread OR put this in onprogresupdate
/*
debugText.setText(message);
debugText.setTextColor(Color.RED);
*/
System.out.println(message);
cancel(true);
}
/**
* Is called by doInBackground for updating the GUI.
* @param progress The current received message.
*/
@Override
protected void onProgressUpdate(Message... progress) {
super.onProgressUpdate(progress);
String status = "";
if(progress[0] instanceof ProgressMessage) {
status = ((ProgressMessage) progress[0]).progress.toString();
} else if(progress[0] instanceof ControlMessage) {
status = ((ControlMessage) progress[0]).command.toString();
} else if(progress[0] instanceof VerdictMessage) {
status = ((VerdictMessage) progress[0]).verdict.toString();
}
debugText.setText(status);
String toSpeak;
switch(stageNum){
case 0:
toSpeak = context.getString(R.string.stage_start_engine);
break;
case 1:
toSpeak = context.getString(R.string.stage_drive_50);
break;
case 2:
toSpeak = context.getString(R.string.stage_down_to_30);
break;
case 3:
toSpeak = context.getString(R.string.stage_roll_halt);
break;
default:
toSpeak = "";
}
if(Build.VERSION.SDK_INT < 21){
speech.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
} else{
speech.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null, "speak");
}
// get the table as child of the scrollview
TableLayout table = (TableLayout) scrollview.getChildAt(0);
if(stageNum > 0) {
// get the textview from the position above as child of the table
TextView oldText = (TextView) table.getChildAt(stageNum - 1);
// change color back to white
oldText.setBackgroundColor(Color.TRANSPARENT);
}
// get the current textview as child of the table
TextView text = (TextView) table.getChildAt(stageNum);
// change color to red
text.setBackgroundResource(R.drawable.rectangle_border_red);
// scroll to current textview
scrollview.smoothScrollTo(0, text.getTop());
// update progress bar
progressBar.setProgress(((stageNum + 1) * 100) / 4);
// next stage
if(progress[0] instanceof ProgressMessage) {
stageNum = ((ProgressMessage)progress[0]).progress.ordinal() + 1;
}
}
/**
* Is called after doInBackground if the Thread was not cancelled.
* Shows the content of the last received message on the GUI.
* @param result The last received message from the socket.
*/
@Override
protected void onPostExecute(Message result) {
if(result != null) {
if (result instanceof VerdictMessage)
debugText.setText("Verdict: " + ((VerdictMessage) result).verdict.toString());
else
debugText.setText(result.toString());
}
finish();
}
/**
* Is called after runInBackground if this Thread was cancelled. Calls the finish method.
*/
@Override
protected void onCancelled(){
finish();
}
/**
* Finishes all operations, closes the socket and resets the GUI texts.
*/
private void finish(){
// get the table as child of the scrollview
TableLayout table = (TableLayout) scrollview.getChildAt(0);
if(stageNum > 0) {
// get the last textview as child of the table
TextView oldText = (TextView) table.getChildAt(stageNum - 1);
// change color back to white
oldText.setBackgroundColor(Color.TRANSPARENT);
}
statusRunningText.setText(context.getString(R.string.textview_not_running));
logoAnimation.stop();
btnStart.setEnabled(true);
btnStop.setEnabled(false);
closeSocket();
}
/**
* Calls shutdown and close on the socket (if not null).
*/
public void closeSocket(){
try {
if(mySocket != null) {
mySocket.shutdownInput();
mySocket.shutdownOutput();
mySocket.close();
}
}catch(IOException ioe){
System.out.println("Socket cannot be closed: " + ioe.getMessage());
}
}
} | Car2xHMI/app/src/main/java/com/testingtech/car2x/hmi/SocketClient.java | package com.testingtech.car2x.hmi;
import android.content.Context;
import android.graphics.Color;
import android.graphics.drawable.AnimationDrawable;
import android.os.AsyncTask;
import android.os.Build;
import android.speech.tts.TextToSpeech;
import android.widget.Button;
import android.widget.ProgressBar;
import android.widget.ScrollView;
import android.widget.TableLayout;
import android.widget.TextView;
import com.testingtech.car2x.hmi.messages.ProgressMessage;
import com.testingtech.car2x.hmi.messages.TestCaseCommand;
import com.testingtech.car2x.hmi.messages.ControlMessage;
import com.testingtech.car2x.hmi.messages.Message;
import com.testingtech.car2x.hmi.messages.TestCase;
import com.testingtech.car2x.hmi.messages.VerdictMessage;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.Date;
public class SocketClient extends AsyncTask<Void, Message, Message> {
private Context context;
private TextView debugText, statusRunning;
private ScrollView scrollview;
private ProgressBar progressBar;
private AnimationDrawable logoAnimation;
private Button btnStart, btnStop;
private TextToSpeech ttobj;
private int stageNum = 0;
private Socket mySocket = null;
public SocketClient(Context con, TextView tv, ScrollView sv, ProgressBar pb,
AnimationDrawable ad, TextView sr, Button start,
Button stop, TextToSpeech tts) {
this.context = con;
this.debugText = tv;
this.scrollview = sv;
this.logoAnimation = ad;
this.statusRunning = sr;
this.progressBar = pb;
this.btnStart = start;
this.btnStop = stop;
this.ttobj = tts;
}
@Override
protected void onPreExecute(){
statusRunning.setText(context.getString(R.string.textview_loading));
logoAnimation.start();
btnStart.setEnabled(false);
btnStop.setEnabled(true);
}
@Override
protected Message doInBackground(Void... params) {
ControlMessage controlMessage;
Message message = null;
try {
mySocket = new Socket();
mySocket.connect(new InetSocketAddress("192.168.87.148", 30000), 2000);
statusRunning.setText(context.getString(R.string.textview_running));
ObjectOutputStream oos = new ObjectOutputStream(mySocket.getOutputStream());
ObjectInputStream ois = new ObjectInputStream(mySocket.getInputStream());
controlMessage = new ControlMessage(
TestCase.TC_VEHICLE_SPEED_OVER_50,
new Date(),
TestCaseCommand.START
);
oos.writeObject(controlMessage);
oos.flush();
message = (Message) ois.readObject();
while(!(message instanceof VerdictMessage) && !isCancelled()) {
publishProgress(message);
message = (Message) ois.readObject();
}
if(isCancelled()){
controlMessage = new ControlMessage(
TestCase.TC_VEHICLE_SPEED_OVER_50,
new Date(),
TestCaseCommand.STOP
);
oos.writeObject(controlMessage);
oos.flush();
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
mySocket.close();
}catch (IOException | NullPointerException ioe){
ioe.printStackTrace();
}
}
return message;
}
@Override
protected void onProgressUpdate(Message... progress) {
super.onProgressUpdate(progress);
String status = "";
if(progress[0] instanceof ProgressMessage) {
status = ((ProgressMessage) progress[0]).progress.toString();
} else if(progress[0] instanceof ControlMessage) {
status = ((ControlMessage) progress[0]).command.toString();
} else if(progress[0] instanceof VerdictMessage) {
status = ((VerdictMessage) progress[0]).verdict.toString();
}
debugText.setText(status);
String toSpeak;
switch(stageNum){
case 0:
toSpeak = context.getString(R.string.stage_start_engine);
break;
case 1:
toSpeak = context.getString(R.string.stage_drive_50);
break;
case 2:
toSpeak = context.getString(R.string.stage_down_to_30);
break;
case 3:
toSpeak = context.getString(R.string.stage_roll_halt);
break;
default:
toSpeak = "";
}
if(Build.VERSION.SDK_INT < 21){
ttobj.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
} else{
ttobj.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null, "speak");
}
// get the table as child of the scrollview
TableLayout table = (TableLayout) scrollview.getChildAt(0);
if(stageNum > 0) {
// get the textview from the position above as child of the table
TextView oldText = (TextView) table.getChildAt(stageNum - 1);
// change color back to white
oldText.setBackgroundColor(Color.TRANSPARENT);
}
// get the current textview as child of the table
TextView text = (TextView) table.getChildAt(stageNum);
// change color to red
text.setBackgroundResource(R.drawable.rectangle_border_red);
// scroll to current textview
scrollview.smoothScrollTo(0, text.getTop());
// update progress bar
progressBar.setProgress(((stageNum + 1) * 100) / 4);
// next stage
if(progress[0] instanceof ProgressMessage) {
stageNum = ((ProgressMessage)progress[0]).progress.ordinal() + 1;
}
}
@Override
protected void onPostExecute(Message result) {
if(result != null) {
if (result instanceof VerdictMessage)
debugText.setText(((VerdictMessage) result).verdict.toString());
else
debugText.setText(result.toString());
}
finish();
}
@Override
protected void onCancelled(){
finish();
}
private void finish(){
// get the table as child of the scrollview
TableLayout table = (TableLayout) scrollview.getChildAt(0);
if(stageNum > 0) {
// get the last textview as child of the table
TextView oldText = (TextView) table.getChildAt(stageNum - 1);
// change color back to white
oldText.setBackgroundColor(Color.TRANSPARENT);
}
statusRunning.setText(context.getString(R.string.textview_not_running));
logoAnimation.stop();
btnStart.setEnabled(true);
btnStop.setEnabled(false);
closeSocket();
}
public void closeSocket(){
try {
if(mySocket != null) {
mySocket.shutdownInput();
mySocket.shutdownOutput();
mySocket.close();
}
}catch(IOException ioe){
ioe.printStackTrace();
}
}
} | Refactored the SocketClient and added documentation
| Car2xHMI/app/src/main/java/com/testingtech/car2x/hmi/SocketClient.java | Refactored the SocketClient and added documentation |
|
Java | bsd-3-clause | 9227b5e64cc6e4d8fe5167c9c5645d1ebe46b626 | 0 | Eljah/nanohttpd,Eljah/nanohttpd | package fi.iki.elonen;
import java.util.Date;
import java.util.Map;
/**
* Created by Ilya Evlampiev on 05.03.2015.
*/
public class CrawlerTest1 extends NanoHTTPD {
public CrawlerTest1() {
super(8181);
}
@Override
public Response serve(IHTTPSession session) {
int generation = 0;
Method method = session.getMethod();
String uri = session.getUri();
System.out.println(method + " '" + uri + "' ");
String msg = "<html><body><h1>Crawler Test1</h1>\n";
Map<String, String> parms = session.getParms();
if (parms.get("generation") == null)
msg +=
"<a href='?generation=1'>" + "Generation " + generation + "</a>";
else {
generation = Integer.parseInt(parms.get("generation"));
System.out.println(generation);
if (generation < 5) {
msg += (new Date()).toString() + ": <a href='?generation=" + (generation + 1) + "'>" + "Generation " + (generation) + "</a>";
} else {
msg += (new Date()).toString() + ": <a href='?generation=" + (generation) + "'>" + "Generation " + (generation) + "</a>";
}
}
msg += "</body></html>\n";
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
e.printStackTrace();
}
return new NanoHTTPD.Response(msg);
}
public static void main(String[] args) {
ServerRunner.run(CrawlerTest1.class);
}
}
| samples/src/main/java/fi/iki/elonen/CrawlerTest1.java | package fi.iki.elonen;
import java.util.Date;
import java.util.Map;
/**
* Created by Ilya Evlampiev on 05.03.2015.
*/
public class CrawlerTest1 extends NanoHTTPD {
public CrawlerTest1() {
super(8181);
}
@Override public Response serve(IHTTPSession session) {
int generation=0;
Method method = session.getMethod();
String uri = session.getUri();
System.out.println(method + " '" + uri + "' ");
String msg = "<html><body><h1>Crawler Test1</h1>\n";
Map<String, String> parms = session.getParms();
if (parms.get("generation") == null)
msg +=
"<a href='?generation=1'>"+"Generation "+generation+"</a>";
else {
generation = Integer.parseInt(parms.get("generation"));
System.out.println(generation);
msg += (new Date()).toString()+": <a href='?generation=" + (generation+1) + "'>" + "Generation " + (generation) + "</a>";
}
msg += "</body></html>\n";
try {
Thread.sleep(30000);
} catch (InterruptedException e) {
e.printStackTrace();
}
return new NanoHTTPD.Response(msg);
}
public static void main(String[] args) {
ServerRunner.run(CrawlerTest1.class);
}
}
| crawler test 1 added-2
| samples/src/main/java/fi/iki/elonen/CrawlerTest1.java | crawler test 1 added-2 |
|
Java | mit | c3f266371406e6a6a6932cb38b181dbac8ba654c | 0 | TeamTotemic/Totemic | package pokefenn.totemic.ceremony;
import net.minecraft.block.Block;
import net.minecraft.block.BlockSapling;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.ai.EntityAIBase;
import net.minecraft.entity.passive.EntityAnimal;
import net.minecraft.entity.passive.EntityVillager;
import net.minecraft.util.EnumParticleTypes;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import pokefenn.totemic.api.ceremony.Ceremony;
import pokefenn.totemic.api.ceremony.CeremonyEffectContext;
import pokefenn.totemic.api.music.MusicInstrument;
import pokefenn.totemic.init.ModBlocks;
import pokefenn.totemic.util.EntityUtil;
public class CeremonyFertility extends Ceremony
{
public CeremonyFertility(String name, int musicNeeded, int maxStartupTime, MusicInstrument... instruments)
{
super(name, musicNeeded, maxStartupTime, instruments);
}
@Override
public void effect(World world, BlockPos pos, CeremonyEffectContext context)
{
int radius = 8;
if(!world.isRemote && context.getTime() % 30 == 0)
{
for(Entity entity: EntityUtil.getEntitiesInRange(EntityLiving.class, world, pos, radius, radius, e -> e instanceof EntityAnimal || e instanceof EntityVillager))
{
if(entity instanceof EntityAnimal)
{
EntityAnimal animal = (EntityAnimal) entity;
if(animal.getGrowingAge() == 0 && !animal.isInLove())
{
animal.setInLove(null);
break;
}
}
else
{
EntityVillager villager = (EntityVillager) entity;
if(villager.getGrowingAge() == 0 && !villager.getIsWillingToMate(false) && !villager.isMating())
{
villager.setIsWillingToMate(true);
villager.tasks.addTask(0, new EntityAIVillagerFertility(villager));
break;
}
}
}
}
if(context.getTime() % 20 == 0)
{
for(BlockPos p: BlockPos.getAllInBoxMutable(pos.add(-radius, -radius, -radius), pos.add(radius, radius, radius)))
{
IBlockState state = world.getBlockState(p);
Block block = state.getBlock();
if(block instanceof BlockSapling && block != ModBlocks.cedar_sapling)
{
world.setBlockState(p, ModBlocks.cedar_sapling.getDefaultState(), 3);
spawnParticles(world, p.getX() + 0.5, p.getY() + 0.5, p.getZ() + 0.5);
}
}
}
}
@Override
public int getEffectTime()
{
return SHORT;
}
@Override
public int getMusicPer5()
{
return 6;
}
private void spawnParticles(World world, double x, double y, double z)
{
if(world.isRemote)
{
double dx = world.rand.nextGaussian();
double dy = world.rand.nextGaussian() * 0.5;
double dz = world.rand.nextGaussian();
double velY = world.rand.nextGaussian();
for(int i = 0; i < 10; i++)
world.spawnParticle(EnumParticleTypes.VILLAGER_HAPPY, x + dx, y + dy, z + dz, 0, velY, 0);
}
}
//Replacement for EntityAIVillagerMate
private static class EntityAIVillagerFertility extends EntityAIBase
{
private final EntityVillager villager;
private EntityVillager mate;
private final World world;
private int matingTimeout;
public EntityAIVillagerFertility(EntityVillager villagerIn)
{
this.villager = villagerIn;
this.world = villagerIn.world;
this.setMutexBits(3);
}
@Override
public boolean shouldExecute()
{
if(villager.getIsWillingToMate(true))
{
EntityVillager mate = this.world.findNearestEntityWithinAABB(EntityVillager.class, this.villager.getEntityBoundingBox().grow(8.0D, 3.0D, 8.0D), this.villager);
if (mate == null)
return false;
else
{
this.mate = mate;
return mate.getGrowingAge() == 0 && mate.getIsWillingToMate(true);
}
}
else
return false;
}
@Override
public void startExecuting()
{
this.matingTimeout = 300;
this.villager.setMating(true);
}
@Override
public void resetTask()
{
this.mate = null;
this.villager.setMating(false);
}
@Override
public boolean shouldContinueExecuting()
{
return this.matingTimeout >= 0 && this.villager.getGrowingAge() == 0 && this.villager.getIsWillingToMate(false);
}
@Override
public void updateTask()
{
--this.matingTimeout;
this.villager.getLookHelper().setLookPositionWithEntity(this.mate, 10.0F, 30.0F);
if (this.villager.getDistanceSq(this.mate) > 2.25D)
{
this.villager.getNavigator().tryMoveToEntityLiving(this.mate, 0.25D);
}
else if (this.matingTimeout == 0 && this.mate.isMating())
{
this.giveBirth();
}
if (this.villager.getRNG().nextInt(35) == 0)
{
this.world.setEntityState(this.villager, (byte)12);
}
}
private void giveBirth()
{
net.minecraft.entity.EntityAgeable entityvillager = this.villager.createChild(this.mate);
this.mate.setGrowingAge(6000);
this.villager.setGrowingAge(6000);
this.mate.setIsWillingToMate(false);
this.villager.setIsWillingToMate(false);
final net.minecraftforge.event.entity.living.BabyEntitySpawnEvent event = new net.minecraftforge.event.entity.living.BabyEntitySpawnEvent(villager, mate, entityvillager);
if (net.minecraftforge.common.MinecraftForge.EVENT_BUS.post(event) || event.getChild() == null) { return; }
entityvillager = event.getChild();
entityvillager.setGrowingAge(-24000);
entityvillager.setLocationAndAngles(this.villager.posX, this.villager.posY, this.villager.posZ, 0.0F, 0.0F);
this.world.spawnEntity(entityvillager);
this.world.setEntityState(entityvillager, (byte)12);
}
}
}
| src/main/java/pokefenn/totemic/ceremony/CeremonyFertility.java | package pokefenn.totemic.ceremony;
import net.minecraft.block.Block;
import net.minecraft.block.BlockSapling;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLiving;
import net.minecraft.entity.passive.EntityAnimal;
import net.minecraft.entity.passive.EntityVillager;
import net.minecraft.util.EnumParticleTypes;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import pokefenn.totemic.api.ceremony.Ceremony;
import pokefenn.totemic.api.ceremony.CeremonyEffectContext;
import pokefenn.totemic.api.music.MusicInstrument;
import pokefenn.totemic.init.ModBlocks;
import pokefenn.totemic.util.EntityUtil;
public class CeremonyFertility extends Ceremony
{
public CeremonyFertility(String name, int musicNeeded, int maxStartupTime, MusicInstrument... instruments)
{
super(name, musicNeeded, maxStartupTime, instruments);
}
@Override
public void effect(World world, BlockPos pos, CeremonyEffectContext context)
{
int radius = 8;
if(!world.isRemote && context.getTime() % 30 == 0)
{
for(Entity entity: EntityUtil.getEntitiesInRange(EntityLiving.class, world, pos, radius, radius, e -> e instanceof EntityAnimal || e instanceof EntityVillager))
{
if(entity instanceof EntityAnimal && ((EntityAnimal) entity).getGrowingAge() == 0 && !((EntityAnimal) entity).isInLove())
{
EntityAnimal animal = (EntityAnimal) entity;
animal.setInLove(null);
break;
}
else
{
//TODO: figure out how to make villagers mate
/*
EntityVillager villager = (EntityVillager) entity;
villager.setIsWillingToMate(true);
villager.tasks.addTask(0, new EntityAIVillagerMate(villager));
villager.setMating(true);
*/
}
}
}
if(context.getTime() % 20 == 0)
{
for(BlockPos p: BlockPos.getAllInBoxMutable(pos.add(-radius, -radius, -radius), pos.add(radius, radius, radius)))
{
IBlockState state = world.getBlockState(p);
Block block = state.getBlock();
if(block instanceof BlockSapling && block != ModBlocks.cedar_sapling)
{
world.setBlockState(p, ModBlocks.cedar_sapling.getDefaultState(), 3);
spawnParticles(world, p.getX() + 0.5, p.getY() + 0.5, p.getZ() + 0.5);
}
}
}
}
@Override
public int getEffectTime()
{
return SHORT;
}
@Override
public int getMusicPer5()
{
return 6;
}
private void spawnParticles(World world, double x, double y, double z)
{
if(world.isRemote)
{
double dx = world.rand.nextGaussian();
double dy = world.rand.nextGaussian() * 0.5;
double dz = world.rand.nextGaussian();
double velY = world.rand.nextGaussian();
for(int i = 0; i < 10; i++)
world.spawnParticle(EnumParticleTypes.VILLAGER_HAPPY, x + dx, y + dy, z + dz, 0, velY, 0);
}
}
}
| Villager breeding in Rite of Spring now works | src/main/java/pokefenn/totemic/ceremony/CeremonyFertility.java | Villager breeding in Rite of Spring now works |
|
Java | mit | 0ee5f5ff92e9ae2644a9933d0545e296facb4fbb | 0 | douglascarlos/MuseumVisualizr,douglascarlos/MuseumVisualizr,douglascarlos/MuseumVisualizr | package douglas_carlos.museu.feevale.br.museumvisualizr.utils;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.os.AsyncTask;
import android.provider.Settings;
import android.util.Log;
import android.widget.Toast;
import org.json.JSONArray;
import org.json.JSONException;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.util.List;
import douglas_carlos.museu.feevale.br.museumvisualizr.VisitsActivity;
import douglas_carlos.museu.feevale.br.museumvisualizr.utils.DBHelper;
import douglas_carlos.museu.feevale.br.museumvisualizr.utils.Visit;
public class Requester extends AsyncTask<Void, Void, Void> {
private ProgressDialog progress;
private Context context;
private String data;
protected final static String SERVICE_DOMAIN = "museumvisualizr.esy.es";
public Requester(Context context) {
this.progress = new ProgressDialog(context);;
this.context = context;
this.syncVisits();
}
private static JSONArray toJsonArray(List<Visit> visits) throws JSONException {
JSONArray json = new JSONArray();
for (Visit visit : visits) json.put(visit.toJson());
return json;
}
public void syncVisits(){
DBHelper helper = new DBHelper(context);
helper.open();
List<Visit> visits = Visit.allNotSynced(helper);
if(visits.size()>0) {
try {
data = toJsonArray(visits).toString();
Visit.updateAsSynchronized(helper, visits);
Log.d("JSON", data);
this.execute();
} catch (JSONException e) {
e.printStackTrace();
}
}else{
Toast.makeText(context, "As visitas já estão sincronizadas!", Toast.LENGTH_LONG).show();
}
helper.close();
}
@Override
protected void onPreExecute() {
super.onPreExecute();
progress.setMessage("Sincronizando...");
progress.show();
}
@Override
protected Void doInBackground(Void... params) {
try{
StringBuilder values = new StringBuilder();
values.append(URLEncoder.encode("json", "UTF-8"));
values.append("=");
values.append(URLEncoder.encode(data, "UTF-8"));
values.append("&");
values.append(URLEncoder.encode("android_id", "UTF-8"));
values.append("=");
values.append(URLEncoder.encode(Settings.Secure.ANDROID_ID, "UTF-8"));
URL url = new URL("http://" + SERVICE_DOMAIN + "/sync.php");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setReadTimeout(10000);
conn.setConnectTimeout(15000);
conn.setDoInput(true);
conn.setDoOutput(true);
OutputStream os = conn.getOutputStream();
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(os, "UTF-8"));
out.write(values.toString());
out.flush();
BufferedReader in = new BufferedReader(new InputStreamReader(conn.getInputStream()));
Log.d("POST_DATA", values.toString());
Log.d("CODE", conn.getResponseCode() + "");
Log.d("IN", in.readLine());
out.close();
os.close();
}catch (Exception e){
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
progress.dismiss();
}
}
| app/src/main/java/douglas_carlos/museu/feevale/br/museumvisualizr/utils/Requester.java | package douglas_carlos.museu.feevale.br.museumvisualizr.utils;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.os.AsyncTask;
import android.provider.Settings;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.util.List;
import douglas_carlos.museu.feevale.br.museumvisualizr.VisitsActivity;
import douglas_carlos.museu.feevale.br.museumvisualizr.utils.DBHelper;
import douglas_carlos.museu.feevale.br.museumvisualizr.utils.Visit;
public class Requester extends AsyncTask<Void, Void, Void> {
private ProgressDialog progress;
private Context context;
private String data;
protected final static String SERVICE_DOMAIN = "museumvisualizr.esy.es";
public Requester(Context context) {
this.progress = new ProgressDialog(context);;
this.context = context;
this.syncVisits();
Log.d("JSON", data);
this.execute();
}
private static JSONArray toJsonArray(List<Visit> visits) throws JSONException {
JSONArray json = new JSONArray();
for (Visit visit : visits) json.put(visit.toJson());
return json;
}
public void syncVisits(){
DBHelper helper = new DBHelper(context);
helper.open();
List<Visit> visits = Visit.allNotSynced(helper);
try {
data = toJsonArray(visits).toString();
Visit.updateAsSynchronized(helper, visits);
} catch (JSONException e) {
e.printStackTrace();
}
helper.close();
}
@Override
protected void onPreExecute() {
super.onPreExecute();
progress.setMessage("Sincronizando...");
progress.show();
}
@Override
protected Void doInBackground(Void... params) {
try{
StringBuilder values = new StringBuilder();
values.append(URLEncoder.encode("json", "UTF-8"));
values.append("=");
values.append(URLEncoder.encode(data, "UTF-8"));
values.append("&");
values.append(URLEncoder.encode("android_id", "UTF-8"));
values.append("=");
values.append(URLEncoder.encode(Settings.Secure.ANDROID_ID, "UTF-8"));
URL url = new URL("http://" + SERVICE_DOMAIN + "/sync.php");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setReadTimeout(10000);
conn.setConnectTimeout(15000);
conn.setDoInput(true);
conn.setDoOutput(true);
OutputStream os = conn.getOutputStream();
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(os, "UTF-8"));
out.write(values.toString());
out.flush();
BufferedReader in = new BufferedReader(new InputStreamReader(conn.getInputStream()));
Log.d("POST_DATA", values.toString());
Log.d("CODE", conn.getResponseCode() + "");
Log.d("IN", in.readLine());
out.close();
os.close();
}catch (Exception e){
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
progress.dismiss();
}
}
| checking if the list is synchronized
| app/src/main/java/douglas_carlos/museu/feevale/br/museumvisualizr/utils/Requester.java | checking if the list is synchronized |
|
Java | mit | 9ffbb529a3e6e42d3601a1276481d29e4ff72b1b | 0 | Cnly/GitHup | package io.github.Cnly.GitHup;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Timer;
import java.util.TimerTask;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class GitHup
{
private final long checkInterval;
private final VersionInfo originalVersion;
private final GitHupListener listener;
private final URL baseUrl;
private final URL infoUrl;
private boolean started = false;
private Timer timer = null;
private static final Pattern VERSION_PATTERN = Pattern
.compile("version: (.*)");
private static final Pattern RELEASE_DATE_PATTERN = Pattern
.compile("releaseDate: (.*)");
private static final Pattern DESCRIPTION_PATTERN = Pattern.compile(
"description: (.*)", Pattern.DOTALL);
/**
* Create an instance of GitHup and start a timer task to check for updates.
*
* @param username
* GitHub username
* @param repoName
* GitHub repo name
* @param projectName
* project name in the repo
* @param checkInterval
* check interval in milliseconds
* @param originalVersion
* original version information
* @param listener
* The GitHup Listener for calling back when an update is
* available
* @throws MalformedURLException
*/
public GitHup(String username, String repoName, String projectName,
long checkInterval, VersionInfo originalVersion,
GitHupListener listener)
{
if (listener == null)
throw new NullPointerException("listener cannot be null!");
if (checkInterval <= 0L)
throw new IllegalArgumentException(
"checkInterval must be positive!");
this.checkInterval = checkInterval;
this.originalVersion = originalVersion;
this.listener = listener;
try
{
this.baseUrl = new URL(new StringBuilder(
"https://raw.githubusercontent.com/").append(username)
.append('/').append(repoName).append("/githup-updates/")
.append(projectName).append("/").toString());
}
catch (MalformedURLException e)
{
throw new IllegalArgumentException("Cannot create URL properly!", e);
}
try
{
this.infoUrl = new URL(baseUrl, "info.txt");
}
catch (MalformedURLException e)
{
throw new IllegalArgumentException("Cannot create URL properly!", e);
}
}
/**
* Start checking for updates.
*
* @return false if it's already started. Otherwise true.
*/
public boolean start()
{
if (started) return false;
TimerTask timerTask = new TimerTask()
{
@Override
public void run()
{
checkForUpdates();
}
};
this.timer = new Timer();
timer.schedule(timerTask, 0L, this.checkInterval);
started = true;
return true;
}
/**
* Stop checking for updates. Once a GitHup is stopped, it cannot be started
* again.
*
* @return false if it's not started or it's been stopped once.
*/
public boolean stop()
{
if (!started) return false;
timer.cancel();
return true;
}
/**
* Checks for updates manually
*/
public void checkForUpdates()
{
BufferedReader reader = null;
try
{
reader = new BufferedReader(new InputStreamReader(
infoUrl.openStream(), "utf-8"));
}
catch (UnsupportedEncodingException e1)
{
e1.printStackTrace();
}
catch (IOException e1)
{
e1.printStackTrace();
}
char[] buffer = new char[1024];
StringBuilder sb = new StringBuilder();
int charsRead = 0;
try
{
while ((charsRead = reader.read(buffer)) != -1)
{
sb.append(buffer, 0, charsRead);
}
}
catch (IOException e)
{
new IOException(
"An error occured while checking for update!", e)
.printStackTrace();
}
VersionInfo info = toVersionInfo(sb.toString());
if (!this.originalVersion.equals(info))
{// An update is available!
this.listener.onUpdateAvailable(info);
}
}
/**
* Create an instance of UpdateInfo with the information from
* infoFileContents
*
* @param infoFileContents
* the contents of info.txt
*/
private static VersionInfo toVersionInfo(String infoFileContents)
{
String version = null;
String releaseDate = null;
String description = null;
Matcher versionMatcher = VERSION_PATTERN.matcher(infoFileContents);
if (!versionMatcher.find())
throw new AssertionError(
"Cannot find the version field in the info.txt!");
version = versionMatcher.group(1);
Matcher releaseDateMatcher = RELEASE_DATE_PATTERN
.matcher(infoFileContents);
if (!releaseDateMatcher.find())
throw new AssertionError(
"Cannot find the releaseDate field in the info.txt!");
releaseDate = releaseDateMatcher.group(1);
Matcher descriptionMatcher = DESCRIPTION_PATTERN
.matcher(infoFileContents);
if (!descriptionMatcher.find())
throw new AssertionError(
"Cannot find the description field in the info.txt!");
description = descriptionMatcher.group(1);
return new VersionInfo(version, releaseDate, description);
}
public boolean isStarted()
{
return started;
}
}
| src/main/java/io/github/Cnly/GitHup/GitHup.java | package io.github.Cnly.GitHup;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Timer;
import java.util.TimerTask;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class GitHup
{
private final long checkInterval;
private final VersionInfo originalVersion;
private final GitHupListener listener;
private final URL baseUrl;
private final URL infoUrl;
private boolean started = false;
private Timer timer = null;
private static final Pattern VERSION_PATTERN = Pattern
.compile("version: (.*)");
private static final Pattern RELEASE_DATE_PATTERN = Pattern
.compile("releaseDate: (.*)");
private static final Pattern DESCRIPTION_PATTERN = Pattern.compile(
"description: (.*)", Pattern.DOTALL);
/**
* Create an instance of GitHup and start a timer task to check for updates.
*
* @param username
* GitHub username
* @param repoName
* GitHub repo name
* @param projectName
* project name in the repo
* @param checkInterval
* check interval in milliseconds
* @param originalVersion
* original version information
* @param listener
* The GitHup Listener for calling back when an update is
* available
* @throws MalformedURLException
*/
public GitHup(String username, String repoName, String projectName,
long checkInterval, VersionInfo originalVersion,
GitHupListener listener)
{
if (listener == null)
throw new NullPointerException("listener cannot be null!");
if (checkInterval <= 0L)
throw new IllegalArgumentException(
"checkInterval must be positive!");
this.checkInterval = checkInterval;
this.originalVersion = originalVersion;
this.listener = listener;
try
{
this.baseUrl = new URL(new StringBuilder(
"https://raw.githubusercontent.com/").append(username)
.append('/').append(repoName).append("/githup-updates/")
.append(projectName).append("/").toString());
}
catch (MalformedURLException e)
{
throw new IllegalArgumentException("Cannot create URL properly!", e);
}
try
{
this.infoUrl = new URL(baseUrl, "info.txt");
}
catch (MalformedURLException e)
{
throw new IllegalArgumentException("Cannot create URL properly!", e);
}
}
/**
* Start checking for updates.
*
* @return false if it's already started. Otherwise true.
*/
public boolean start()
{
if (started) return false;
TimerTask timerTask = new TimerTask()
{
@Override
public void run()
{
BufferedReader reader = null;
try
{
reader = new BufferedReader(new InputStreamReader(
infoUrl.openStream(), "utf-8"));
}
catch (UnsupportedEncodingException e1)
{
e1.printStackTrace();
}
catch (IOException e1)
{
e1.printStackTrace();
}
char[] buffer = new char[1024];
StringBuilder sb = new StringBuilder();
int charsRead = 0;
try
{
while ((charsRead = reader.read(buffer)) != -1)
{
sb.append(buffer, 0, charsRead);
}
}
catch (IOException e)
{
new IOException(
"An error occured while checking for update!", e)
.printStackTrace();
}
VersionInfo info = toVersionInfo(sb.toString());
if (!GitHup.this.originalVersion.equals(info))
{// An update is available!
GitHup.this.listener.onUpdateAvailable(info);
}
}
};
this.timer = new Timer();
timer.schedule(timerTask, 0L, this.checkInterval);
started = true;
return true;
}
/**
* Stop checking for updates. Once a GitHup is stopped, it cannot be started
* again.
*
* @return false if it's not started or it's been stopped once.
*/
public boolean stop()
{
if (!started) return false;
timer.cancel();
return true;
}
/**
* Create an instance of UpdateInfo with the information from
* infoFileContents
*
* @param infoFileContents
* the contents of info.txt
*/
private static VersionInfo toVersionInfo(String infoFileContents)
{
String version = null;
String releaseDate = null;
String description = null;
Matcher versionMatcher = VERSION_PATTERN.matcher(infoFileContents);
if (!versionMatcher.find())
throw new AssertionError(
"Cannot find the version field in the info.txt!");
version = versionMatcher.group(1);
Matcher releaseDateMatcher = RELEASE_DATE_PATTERN
.matcher(infoFileContents);
if (!releaseDateMatcher.find())
throw new AssertionError(
"Cannot find the releaseDate field in the info.txt!");
releaseDate = releaseDateMatcher.group(1);
Matcher descriptionMatcher = DESCRIPTION_PATTERN
.matcher(infoFileContents);
if (!descriptionMatcher.find())
throw new AssertionError(
"Cannot find the description field in the info.txt!");
description = descriptionMatcher.group(1);
return new VersionInfo(version, releaseDate, description);
}
public boolean isStarted()
{
return started;
}
}
| Add the ability of checking for updates manually | src/main/java/io/github/Cnly/GitHup/GitHup.java | Add the ability of checking for updates manually |
|
Java | mit | bc4a152a83e1955b582ebf36213472ab200fd02f | 0 | samarth-math/project-connect | package globalfunctions;
import java.net.*;
import org.apache.commons.net.util.*; // Depends on apache commons-net-3.3 library
public class IpAddress
{
public static long ipToLong(InetAddress ip) //Converts IP address to a long integer in order to make comparisons
{
byte[] octets = ip.getAddress();
long result = 0;
for (byte octet : octets) // for a variable octet defined by each element in the array octets
{
result <<= 8;//left shift by 8 (due to 8 bytes)
result |= octet & 0xff;// Or the byte with the octet anded with the number 1 (0xff is hexadecimal for 1)
}
return result;
}
//***checks if the specified ip address falls in the specified network address
public static boolean iprangeverify(String ipadd, String netadd) throws IllegalArgumentException // Format of netadd : xxx.xxx.xxx.xxx/xx where /xx is network mask
{
SubnetUtils utils = new SubnetUtils(netadd);
return utils.getInfo().isInRange(ipadd);
}
public static String findmac(NetworkInterface n) throws SocketException // Gives mac address of specific network interface
{
byte [] mac = n.getHardwareAddress();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < mac.length; i++)
{
sb.append(String.format("%02X", mac[i]));
}
return sb.toString();
}
}
| IPml/src/globalfunctions/IpAddress.java | package globalfunctions;
import java.net.*;
import org.apache.commons.net.util.*; // Depends on apache commons-net-3.3 library
public class IpAddress
{
public static long ipToLong(InetAddress ip) //Converts IP address to a long integer in order to make comparisons
{
byte[] octets = ip.getAddress();
long result = 0;
for (byte octet : octets) // for a variable octet defined by each element in the array octets
{
result <<= 8;//left shift by 8 (due to 8 bytes)
result |= octet & 0xff;// Or the byte with the octet anded with the number 1 (0xff is hexadecimal for 1)
}
return result;
}
//***checks if the specified ip address falls in the specified network address
public static boolean iprangeverify(String ipadd, String netadd)// Format of netadd : xxx.xxx.xxx.xxx/xx where /xx is network mask
{
SubnetUtils utils = new SubnetUtils(netadd);
return utils.getInfo().isInRange(ipadd);
}
}
| Added new overall useful functions | IPml/src/globalfunctions/IpAddress.java | Added new overall useful functions |
|
Java | mit | cb20d667c42bd8895b48d08d24d877433c35b300 | 0 | elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin | package com.elmakers.mine.bukkit.action.builtin;
import com.elmakers.mine.bukkit.action.CompoundAction;
import com.elmakers.mine.bukkit.api.action.CastContext;
import com.elmakers.mine.bukkit.api.effect.EffectPlay;
import com.elmakers.mine.bukkit.api.effect.EffectPlayer;
import com.elmakers.mine.bukkit.api.spell.Spell;
import com.elmakers.mine.bukkit.api.spell.SpellResult;
import com.elmakers.mine.bukkit.spell.BaseSpell;
import com.elmakers.mine.bukkit.utility.BoundingBox;
import com.elmakers.mine.bukkit.utility.CompatibilityUtils;
import de.slikey.effectlib.util.DynamicLocation;
import org.bukkit.Location;
import org.bukkit.block.Block;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Entity;
import org.bukkit.util.Vector;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
public class CustomProjectileAction extends CompoundAction
{
private int interval;
private int lifetime;
private double speed;
private int startDistance;
private String projectileEffectKey;
private String hitEffectKey;
private boolean targetEntities;
private boolean targetSelf;
private double radius;
private double gravity;
private double drag;
private long lastUpdate;
private long nextUpdate;
private long deadline;
private boolean hit = false;
private Vector velocity = null;
private DynamicLocation effectLocation = null;
private Collection<EffectPlay> activeProjectileEffects;
private class CandidateEntity {
public final Entity entity;
public final BoundingBox bounds;
public CandidateEntity(Entity entity) {
this.entity = entity;
this.bounds = CompatibilityUtils.getHitbox(entity).expand(radius);
}
}
@Override
public void prepare(CastContext context, ConfigurationSection parameters) {
super.prepare(context, parameters);
interval = parameters.getInt("interval", 200);
lifetime = parameters.getInt("lifetime", 5000);
speed = parameters.getDouble("speed", 0.1);
startDistance = parameters.getInt("start", 0);
projectileEffectKey = parameters.getString("projectile_effects", "projectile");
hitEffectKey = parameters.getString("hit_effects", "hit");
targetEntities = parameters.getBoolean("target_entities", true);
targetSelf = parameters.getBoolean("target_self", false);
radius = parameters.getDouble("size", 1) / 2;
gravity = parameters.getDouble("gravity", 0);
drag = parameters.getDouble("drag", 0);
}
@Override
public void reset(CastContext context)
{
super.reset(context);
long now = System.currentTimeMillis();
nextUpdate = 0;
lastUpdate = now;
deadline = now + lifetime;
hit = false;
effectLocation = null;
velocity = null;
activeProjectileEffects = null;
}
@Override
public SpellResult perform(CastContext context) {
long now = System.currentTimeMillis();
if (now < nextUpdate)
{
return SpellResult.PENDING;
}
if (now > deadline)
{
return hit(context);
}
if (hit)
{
return super.perform(context);
}
nextUpdate = now + interval;
// Check for initialization required
Location targetLocation = context.getTargetLocation();
if (targetLocation == null) {
targetLocation = context.getWandLocation().clone();
context.setTargetLocation(targetLocation);
}
if (velocity == null)
{
velocity = context.getDirection().clone().normalize();
if (startDistance != 0) {
targetLocation.add(velocity.clone().multiply(startDistance));
}
// Start up projectile FX
Collection<EffectPlayer> projectileEffects = context.getEffects(projectileEffectKey);
for (EffectPlayer apiEffectPlayer : projectileEffects)
{
if (effectLocation == null) {
effectLocation = new DynamicLocation(targetLocation);
effectLocation.setDirection(velocity);
}
if (activeProjectileEffects == null) {
activeProjectileEffects = new ArrayList<EffectPlay>();
}
// Hrm- this is ugly, but I don't want the API to depend on EffectLib.
if (apiEffectPlayer instanceof com.elmakers.mine.bukkit.effect.EffectPlayer)
{
com.elmakers.mine.bukkit.effect.EffectPlayer effectPlayer = (com.elmakers.mine.bukkit.effect.EffectPlayer)apiEffectPlayer;
effectPlayer.setEffectPlayList(activeProjectileEffects);
effectPlayer.startEffects(effectLocation, null);
}
}
}
else if (effectLocation != null)
{
effectLocation.updateFrom(targetLocation);
effectLocation.setDirection(velocity);
}
// Advance position, checking for collisions
long delta = now - lastUpdate;
lastUpdate = now;
// Apply gravity and drag
if (gravity > 0) {
velocity.setY(velocity.getY() - gravity * delta / 50);
}
if (drag > 0) {
double size = velocity.length();
size = size - drag * delta / 50;
if (size <= 0) {
return hit(context);
}
velocity.normalize().multiply(size);
}
// Compute incremental speed movement
double remainingSpeed = speed * delta / 50;
List<CandidateEntity> candidates = null;
if (radius >= 0 && targetEntities) {
Entity sourceEntity = context.getEntity();
candidates = new ArrayList<CandidateEntity>();
double boundSize = Math.ceil(remainingSpeed) * radius + 2;
List<Entity> nearbyEntities = CompatibilityUtils.getNearbyEntities(targetLocation, boundSize, boundSize, boundSize);
for (Entity entity : nearbyEntities)
{
if ((targetSelf || entity != sourceEntity) && context.canTarget(entity))
{
candidates.add(new CandidateEntity(entity));
}
}
if (candidates.isEmpty())
{
candidates = null;
}
}
// Put a sane limit on the number of iterations here
for (int i = 0; i < 256; i++) {
// Check for entity collisions first
Vector targetVector = targetLocation.toVector();
if (candidates != null) {
for (CandidateEntity candidate : candidates) {
if (candidate.bounds.contains(targetVector)) {
context.setTargetEntity(candidate.entity);
return hit(context);
}
}
}
int y = targetLocation.getBlockY();
if (y >= targetLocation.getWorld().getMaxHeight() || y <= 0) {
return hit(context);
}
Block block = targetLocation.getBlock();
if (!block.getChunk().isLoaded()) {
return hit(context);
}
if (!context.isTransparent(block.getType())) {
return hit(context);
}
double partialSpeed = Math.min(0.5, remainingSpeed);
Vector speedVector = velocity.clone().multiply(partialSpeed);
remainingSpeed -= 0.5;
Vector newLocation = targetLocation.toVector().add(speedVector);
// Skip over same blocks, we increment by 0.5 to try and catch diagonals
if (newLocation.getBlockX() == targetLocation.getBlockX()
&& newLocation.getBlockY() == targetLocation.getBlockY()
&& newLocation.getBlockZ() == targetLocation.getBlockZ()) {
remainingSpeed -= 0.5;
newLocation = newLocation.add(speedVector);
targetLocation.setX(newLocation.getX());
targetLocation.setY(newLocation.getY());
targetLocation.setZ(newLocation.getZ());
} else {
targetLocation.setX(newLocation.getX());
targetLocation.setY(newLocation.getY());
targetLocation.setZ(newLocation.getZ());
}
if (remainingSpeed <= 0) break;
}
return SpellResult.PENDING;
}
protected SpellResult hit(CastContext context) {
hit = true;
if (activeProjectileEffects != null) {
for (EffectPlay play : activeProjectileEffects) {
play.cancel();
}
}
context.playEffects(hitEffectKey);
return super.perform(context);
}
@Override
public void getParameterNames(Spell spell, Collection<String> parameters)
{
super.getParameterNames(spell, parameters);
parameters.add("interval");
parameters.add("lifetime");
parameters.add("speed");
parameters.add("start");
parameters.add("gravity");
parameters.add("drag");
parameters.add("target_entities");
parameters.add("target_self");
}
@Override
public void getParameterOptions(Spell spell, String parameterKey, Collection<String> examples)
{
super.getParameterOptions(spell, parameterKey, examples);
if (parameterKey.equals("speed") || parameterKey.equals("lifetime") ||
parameterKey.equals("interval") || parameterKey.equals("start") || parameterKey.equals("size") ||
parameterKey.equals("gravity") || parameterKey.equals("drag")) {
examples.addAll(Arrays.asList(BaseSpell.EXAMPLE_SIZES));
} else if (parameterKey.equals("target_entities") || parameterKey.equals("target_self")) {
examples.addAll(Arrays.asList(BaseSpell.EXAMPLE_BOOLEANS));
}
}
}
| src/main/java/com/elmakers/mine/bukkit/action/builtin/CustomProjectileAction.java | package com.elmakers.mine.bukkit.action.builtin;
import com.elmakers.mine.bukkit.action.CompoundAction;
import com.elmakers.mine.bukkit.api.action.CastContext;
import com.elmakers.mine.bukkit.api.effect.EffectPlay;
import com.elmakers.mine.bukkit.api.effect.EffectPlayer;
import com.elmakers.mine.bukkit.api.spell.Spell;
import com.elmakers.mine.bukkit.api.spell.SpellResult;
import com.elmakers.mine.bukkit.spell.BaseSpell;
import com.elmakers.mine.bukkit.utility.BoundingBox;
import com.elmakers.mine.bukkit.utility.CompatibilityUtils;
import de.slikey.effectlib.util.DynamicLocation;
import org.bukkit.Location;
import org.bukkit.block.Block;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Entity;
import org.bukkit.util.Vector;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
public class CustomProjectileAction extends CompoundAction
{
private int interval;
private int lifetime;
private double speed;
private int startDistance;
private String projectileEffectKey;
private String hitEffectKey;
private boolean targetEntities;
private boolean targetSelf;
private double radius;
private double gravity;
private double drag;
private long lastUpdate;
private long nextUpdate;
private long deadline;
private boolean hit = false;
private Vector velocity = null;
private DynamicLocation effectLocation = null;
private Collection<EffectPlay> activeProjectileEffects;
private class CandidateEntity {
public final Entity entity;
public final BoundingBox bounds;
public CandidateEntity(Entity entity) {
this.entity = entity;
this.bounds = CompatibilityUtils.getHitbox(entity).expand(radius);
}
}
@Override
public void prepare(CastContext context, ConfigurationSection parameters) {
super.prepare(context, parameters);
interval = parameters.getInt("interval", 200);
lifetime = parameters.getInt("lifetime", 5000);
speed = parameters.getDouble("speed", 0.1);
startDistance = parameters.getInt("start", 0);
projectileEffectKey = parameters.getString("projectile_effects", "projectile");
hitEffectKey = parameters.getString("hit_effects", "hit");
targetEntities = parameters.getBoolean("target_entities", true);
targetSelf = parameters.getBoolean("target_self", false);
radius = parameters.getDouble("size", 1) / 2;
gravity = parameters.getDouble("gravity", 0);
drag = parameters.getDouble("drag", 0);
}
@Override
public void reset(CastContext context)
{
super.reset(context);
long now = System.currentTimeMillis();
nextUpdate = 0;
lastUpdate = now;
deadline = now + lifetime;
hit = false;
effectLocation = null;
velocity = null;
activeProjectileEffects = null;
}
@Override
public SpellResult perform(CastContext context) {
long now = System.currentTimeMillis();
if (now < nextUpdate)
{
return SpellResult.PENDING;
}
if (now > deadline)
{
hit(context);
}
if (hit)
{
return super.perform(context);
}
nextUpdate = now + interval;
// Check for initialization required
Location targetLocation = context.getTargetLocation();
if (targetLocation == null) {
targetLocation = context.getWandLocation().clone();
context.setTargetLocation(targetLocation);
}
if (velocity == null)
{
velocity = context.getDirection().clone().normalize();
if (startDistance != 0) {
targetLocation.add(velocity.clone().multiply(startDistance));
}
// Start up projectile FX
Collection<EffectPlayer> projectileEffects = context.getEffects(projectileEffectKey);
for (EffectPlayer apiEffectPlayer : projectileEffects)
{
if (effectLocation == null) {
effectLocation = new DynamicLocation(targetLocation);
effectLocation.setDirection(velocity);
}
if (activeProjectileEffects == null) {
activeProjectileEffects = new ArrayList<EffectPlay>();
}
// Hrm- this is ugly, but I don't want the API to depend on EffectLib.
if (apiEffectPlayer instanceof com.elmakers.mine.bukkit.effect.EffectPlayer)
{
com.elmakers.mine.bukkit.effect.EffectPlayer effectPlayer = (com.elmakers.mine.bukkit.effect.EffectPlayer)apiEffectPlayer;
effectPlayer.setEffectPlayList(activeProjectileEffects);
effectPlayer.startEffects(effectLocation, null);
}
}
}
else if (effectLocation != null)
{
effectLocation.updateFrom(targetLocation);
effectLocation.setDirection(velocity);
}
// Advance position, checking for collisions
long delta = now - lastUpdate;
lastUpdate = now;
// Apply gravity and drag
if (gravity > 0) {
velocity.setY(velocity.getY() - gravity * delta / 50);
}
if (drag > 0) {
double size = velocity.length();
size = size - drag * delta / 50;
if (size <= 0) {
hit(context);
return SpellResult.PENDING;
}
velocity.normalize().multiply(size);
}
// Compute incremental speed movement
double remainingSpeed = speed * delta / 50;
List<CandidateEntity> candidates = null;
if (radius >= 0 && targetEntities) {
Entity sourceEntity = context.getEntity();
candidates = new ArrayList<CandidateEntity>();
double boundSize = Math.ceil(remainingSpeed) * radius + 2;
List<Entity> nearbyEntities = CompatibilityUtils.getNearbyEntities(targetLocation, boundSize, boundSize, boundSize);
for (Entity entity : nearbyEntities)
{
if ((targetSelf || entity != sourceEntity) && context.canTarget(entity))
{
candidates.add(new CandidateEntity(entity));
}
}
if (candidates.isEmpty())
{
candidates = null;
}
}
// Put a sane limit on the number of iterations here
for (int i = 0; i < 256; i++) {
// Check for entity collisions first
Vector targetVector = targetLocation.toVector();
if (candidates != null) {
for (CandidateEntity candidate : candidates) {
if (candidate.bounds.contains(targetVector)) {
context.setTargetEntity(candidate.entity);
hit(context);
return SpellResult.PENDING;
}
}
}
int y = targetLocation.getBlockY();
if (y >= targetLocation.getWorld().getMaxHeight() || y <= 0) {
hit(context);
return SpellResult.PENDING;
}
Block block = targetLocation.getBlock();
if (!block.getChunk().isLoaded()) {
hit(context);
return SpellResult.PENDING;
}
if (!context.isTransparent(block.getType())) {
hit(context);
return SpellResult.PENDING;
}
double partialSpeed = Math.min(0.5, remainingSpeed);
Vector speedVector = velocity.clone().multiply(partialSpeed);
remainingSpeed -= 0.5;
Vector newLocation = targetLocation.toVector().add(speedVector);
// Skip over same blocks, we increment by 0.5 to try and catch diagonals
if (newLocation.getBlockX() == targetLocation.getBlockX()
&& newLocation.getBlockY() == targetLocation.getBlockY()
&& newLocation.getBlockZ() == targetLocation.getBlockZ()) {
remainingSpeed -= 0.5;
newLocation = newLocation.add(speedVector);
targetLocation.setX(newLocation.getX());
targetLocation.setY(newLocation.getY());
targetLocation.setZ(newLocation.getZ());
} else {
targetLocation.setX(newLocation.getX());
targetLocation.setY(newLocation.getY());
targetLocation.setZ(newLocation.getZ());
}
if (remainingSpeed <= 0) break;
}
return SpellResult.PENDING;
}
protected void hit(CastContext context) {
hit = true;
if (activeProjectileEffects != null) {
for (EffectPlay play : activeProjectileEffects) {
play.cancel();
}
}
context.playEffects(hitEffectKey);
}
@Override
public void getParameterNames(Spell spell, Collection<String> parameters)
{
super.getParameterNames(spell, parameters);
parameters.add("interval");
parameters.add("lifetime");
parameters.add("speed");
parameters.add("start");
parameters.add("gravity");
parameters.add("drag");
parameters.add("target_entities");
parameters.add("target_self");
}
@Override
public void getParameterOptions(Spell spell, String parameterKey, Collection<String> examples)
{
super.getParameterOptions(spell, parameterKey, examples);
if (parameterKey.equals("speed") || parameterKey.equals("lifetime") ||
parameterKey.equals("interval") || parameterKey.equals("start") || parameterKey.equals("size") ||
parameterKey.equals("gravity") || parameterKey.equals("drag")) {
examples.addAll(Arrays.asList(BaseSpell.EXAMPLE_SIZES));
} else if (parameterKey.equals("target_entities") || parameterKey.equals("target_self")) {
examples.addAll(Arrays.asList(BaseSpell.EXAMPLE_BOOLEANS));
}
}
}
| CustomProjectile will immediately start to process its hit actions on hit, rather than waiting a tick
| src/main/java/com/elmakers/mine/bukkit/action/builtin/CustomProjectileAction.java | CustomProjectile will immediately start to process its hit actions on hit, rather than waiting a tick |
|
Java | mit | 1b972e965e6e33080ec2089c54a3859c212f878a | 0 | Heigvd/Wegas,Heigvd/Wegas,Heigvd/Wegas,Heigvd/Wegas,Heigvd/Wegas | /*
* Wegas
* http://wegas.albasim.ch
*
* Copyright (c) 2013-2018 School of Business and Engineering Vaud, Comem, MEI
* Licensed under the MIT License
*/
package com.wegas.core.persistence.game;
import com.fasterxml.jackson.annotation.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.wegas.core.Helper;
import com.wegas.core.exception.client.WegasIncompatibleType;
import com.wegas.core.jcr.page.Page;
import com.wegas.core.jcr.page.Pages;
import com.wegas.core.persistence.AbstractEntity;
import com.wegas.core.persistence.Broadcastable;
import com.wegas.core.persistence.EntityComparators;
import com.wegas.core.persistence.InstanceOwner;
import com.wegas.core.persistence.NamedEntity;
import com.wegas.core.persistence.variable.DescriptorListI;
import com.wegas.core.persistence.variable.VariableDescriptor;
import com.wegas.core.persistence.variable.VariableInstance;
import com.wegas.core.rest.util.Views;
import com.wegas.core.security.persistence.User;
import com.wegas.core.security.util.WegasEntityPermission;
import com.wegas.core.security.util.WegasMembership;
import com.wegas.core.security.util.WegasPermission;
import java.util.*;
import java.util.Map.Entry;
import javax.jcr.RepositoryException;
import javax.persistence.*;
import javax.validation.constraints.Pattern;
import org.apache.shiro.SecurityUtils;
/**
* @author Francois-Xavier Aeberhard (fx at red-agent.com)
*/
@Entity
//@Table(uniqueConstraints =
// @UniqueConstraint(columnNames = "name"))
@JsonIgnoreProperties(ignoreUnknown = true)
@NamedQueries({
@NamedQuery(name = "GameModel.findIdById", query = "SELECT gm.id FROM GameModel gm WHERE gm.id = :gameModelId"),
@NamedQuery(name = "GameModel.findByStatus", query = "SELECT a FROM GameModel a WHERE a.status = :status ORDER BY a.name ASC"),
@NamedQuery(name = "GameModel.findDistinctChildrenLabels", query = "SELECT DISTINCT(child.label) FROM VariableDescriptor child WHERE child.root.id = :containerId"),
@NamedQuery(name = "GameModel.findByName", query = "SELECT a FROM GameModel a WHERE a.name = :name"),
@NamedQuery(name = "GameModel.countByName", query = "SELECT count(gm.id) FROM GameModel gm WHERE gm.name = :name"),
@NamedQuery(name = "GameModel.findAll", query = "SELECT gm FROM GameModel gm"),
@NamedQuery(name = "GameModel.findAllInstantiations", query = "SELECT gm FROM GameModel gm where gm.basedOn.id = :id")
})
@Table(
indexes = {
@Index(columnList = "createdby_id"),
@Index(columnList = "basedon_id")
}
)
public class GameModel extends AbstractEntity implements DescriptorListI<VariableDescriptor>, InstanceOwner, Broadcastable, NamedEntity {
private static final long serialVersionUID = 1L;
@Transient
private Boolean canView = null;
@Transient
private Boolean canEdit = null;
@Transient
private Boolean canInstantiate = null;
@Transient
private Boolean canDuplicate = null;
/**
*
*/
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE)
@JsonView(Views.IndexI.class)
private Long id;
/**
*
*/
@Basic(optional = false)
@Pattern(regexp = "^.*\\S+.*$", message = "GameModel name cannot be empty")// must at least contains one non-whitespace character
private String name;
@Basic(optional = false)
private Integer UIVersion = 1;
@OneToMany(mappedBy = "gameModel", cascade = {CascadeType.ALL}, orphanRemoval = true)
private List<GameModelLanguage> languages = new ArrayList<>();
/**
*
*/
@Lob
//@Basic(fetch = FetchType.LAZY)
@JsonView(Views.ExtendedI.class)
private String description;
/**
*
*/
@Enumerated(value = EnumType.STRING)
@Column(length = 24, columnDefinition = "character varying(24) default 'LIVE'::character varying")
private Status status = Status.LIVE;
/**
*
*/
@Lob
//@Basic(fetch = FetchType.LAZY)
@JsonView(Views.ExtendedI.class)
private String comments;
/**
*
*/
@Temporal(TemporalType.TIMESTAMP)
@Column(columnDefinition = "timestamp with time zone")
private Date createdTime = new Date();
/**
*
*/
@ManyToOne(fetch = FetchType.LAZY)
@JsonIgnore
private User createdBy;
/**
* Link to original gameModel for "PLAY" gameModel
*/
@ManyToOne(fetch = FetchType.LAZY)
@JsonIgnore
private GameModel basedOn;
/*
*
*
* @JsonIgnore private Boolean template = true;
*/
/**
*
*/
@OneToMany(mappedBy = "gameModel", cascade = {CascadeType.ALL}, orphanRemoval = true, fetch = FetchType.LAZY)
@JsonIgnore
private Set<VariableDescriptor> variableDescriptors = new HashSet<>();
/**
* A list of Variable Descriptors that are at the root level of the
* hierarchy (other VariableDescriptor can be placed inside of a
* ListDescriptor's items List).
*/
@OneToMany(mappedBy = "root", cascade = {CascadeType.ALL}, fetch = FetchType.LAZY)
@OrderColumn(name = "gm_items_order")
//@JsonManagedReference
private List<VariableDescriptor> items = new ArrayList<>();
/**
* All gameModelScoped instances
*/
@JsonIgnore
@OneToMany(mappedBy = "gameModel", cascade = CascadeType.ALL)
private List<VariableInstance> privateInstances = new ArrayList<>();
/**
*
*/
@OneToMany(mappedBy = "gameModel", cascade = {CascadeType.ALL}, orphanRemoval = true, fetch = FetchType.LAZY)
@JsonManagedReference
@JsonIgnore
//@JsonView(Views.ExportI.class)
private List<Game> games = new ArrayList<>();
/**
* Holds all the scripts contained in current game model.
*/
@OneToMany(mappedBy = "scriptlibrary_GameModel", cascade = CascadeType.ALL, orphanRemoval = true)
@JsonView({Views.ExportI.class})
private List<GameModelContent> scriptLibrary = new ArrayList<>();
/**
*
*/
@OneToMany(mappedBy = "csslibrary_GameModel", cascade = CascadeType.ALL, orphanRemoval = true)
@JsonView({Views.ExportI.class})
private List<GameModelContent> cssLibrary = new ArrayList<>();
/**
*
*/
@OneToMany(mappedBy = "clientscriptlibrary_GameModel", cascade = CascadeType.ALL, orphanRemoval = true)
@JsonView({Views.ExportI.class})
private List<GameModelContent> clientScriptLibrary = new ArrayList<>();
/**
*
*/
@Embedded
private GameModelProperties properties = new GameModelProperties();
/**
* Holds a reference to the pages, used to serialize page and game model at
* the same time.
*/
@Transient
@JsonView({Views.ExportI.class})
private Map<String, JsonNode> pages;
/**
*
*/
public GameModel() {
}
/**
* @param name
*/
public GameModel(String name) {
this.name = name;
}
/**
* @param pageMap
*
* @throws RepositoryException
*/
@JsonCreator
public GameModel(@JsonProperty("pages") JsonNode pageMap) throws RepositoryException {
Map<String, JsonNode> map = new HashMap<>();
if (pageMap == null) {
return;
}
String curKey;
Iterator<String> iterator = pageMap.fieldNames();
while (iterator.hasNext()) {
curKey = iterator.next();
map.put(curKey, pageMap.get(curKey));
}
this.setPages(map);
}
/**
* Set the gameModel this PLAY gameModel is based on
*
* @param srcGameModel the original game model this gameModel is a duplicata of
*/
public void setBasedOn(GameModel srcGameModel) {
this.basedOn = srcGameModel;
}
/**
* Returns the original game model this gameModel is a duplicata of
*
* @return the original game model
*/
public GameModel getBasedOn() {
return this.basedOn;
}
/**
*
*/
public void propagateGameModel() {
//this.variableDescriptors.clear();
this.propagateGameModel(this);
}
/**
* Register new descriptor within the main descriptor list
* Method do nothing id descriptor is already registered
*
* @param vd the new descriptor to register
*/
public void addToVariableDescriptors(VariableDescriptor vd) {
if (!this.getVariableDescriptors().contains(vd)) {
this.getVariableDescriptors().add(vd);
vd.setGameModel(this);
}
}
/**
* Remove
*
* @param vd
*/
public void removeFromVariableDescriptors(VariableDescriptor vd) {
this.getVariableDescriptors().remove(vd);
}
/**
* Make sur all descriptor (in the given list, deep) a registered within the main descriptor list
*
* @param list base list to fetch new descriptor from
*/
private void propagateGameModel(final DescriptorListI<? extends VariableDescriptor> list) {
for (VariableDescriptor vd : list.getItems()) {
this.addToVariableDescriptors(vd);
if (vd instanceof DescriptorListI) {
this.propagateGameModel((DescriptorListI<? extends VariableDescriptor>) vd);
}
}
}
@Override
public void merge(AbstractEntity n) {
if (n instanceof GameModel) {
GameModel other = (GameModel) n;
this.setName(other.getName());
this.setDescription(other.getDescription()); // Set description first, since fetching this lazy loaded attribute will cause an entity refresh
this.setComments(other.getComments());
this.setUIVersion(other.getUIVersion());
this.getProperties().merge(other.getProperties());
//this.setLanguages(ListUtils.mergeLists(this.getLanguages(), other.getLanguages())); // Note For Modeler-> not in default merge
} else {
throw new WegasIncompatibleType(this.getClass().getSimpleName() + ".merge (" + n.getClass().getSimpleName() + ") is not possible");
}
}
/**
*
*/
@PrePersist
public void prePersist() {
this.setCreatedTime(new Date());
}
/**
* For serialization
*
* @return true if current user has view permission on this
*/
@JsonView(Views.LobbyI.class)
public Boolean getCanView() {
if (canView != null) {
return canView;
} else {
Helper.printWegasStackTrace(new Exception());
return true; // by design, non readable gameModel will throws an exception
}
}
/**
* @return true if current user has edit permission on this
*/
@JsonView(Views.LobbyI.class)
public Boolean getCanEdit() {
if (canEdit != null) {
return canEdit;
} else {
// I DO NOT LIKE VERY MUCH USING SHIRO WITHIN ENTITIES...
Helper.printWegasStackTrace(new Exception());
return SecurityUtils.getSubject().isPermitted("GameModel:Edit:gm" + this.id);
}
}
/**
* @return true if current user has duplicate permission on this
*/
@JsonView(Views.LobbyI.class)
public Boolean getCanDuplicate() {
if (canDuplicate != null) {
return canDuplicate;
} else {
// I DO NOT LIKE VERY MUCH USING SHIRO WITHIN ENTITIES...
Helper.printWegasStackTrace(new Exception());
return SecurityUtils.getSubject().isPermitted("GameModel:Duplicate:gm" + this.id);
}
}
/**
* @return true if current user has instantiate permission on this
*/
@JsonView(Views.LobbyI.class)
public Boolean getCanInstantiate() {
if (canInstantiate != null) {
return canInstantiate;
} else {
// I DO NOT LIKE VERY MUCH USING SHIRO WITHIN ENTITIES...
Helper.printWegasStackTrace(new Exception());
return SecurityUtils.getSubject().isPermitted("GameModel:Instantiate:gm" + this.id);
}
}
public void setCanView(Boolean canView) {
this.canView = canView;
}
public void setCanEdit(Boolean canEdit) {
this.canEdit = canEdit;
}
public void setCanInstantiate(Boolean canInstantiate) {
this.canInstantiate = canInstantiate;
}
public void setCanDuplicate(Boolean canDuplicate) {
this.canDuplicate = canDuplicate;
}
@Override
public Long getId() {
return id;
}
/**
* @param id
*/
public void setId(Long id) {
this.id = id;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
public Integer getUIVersion() {
return UIVersion;
}
public void setUIVersion(Integer UIVersion) {
this.UIVersion = UIVersion;
}
/**
* @return Current GameModel's status
*/
@JsonIgnore
public Status getStatus() {
return status;
}
/**
* Change the status of the gameModel.
*
* @param status status to set
*/
@JsonIgnore
public void setStatus(Status status) {
this.status = status;
}
/**
* get the set of all descriptor from the game model
*
* @return all variable descriptors
*/
@JsonIgnore
public Set<VariableDescriptor> getVariableDescriptors() {
return variableDescriptors;
}
/**
* @param variableDescriptors
*/
public void setVariableDescriptors(Set<VariableDescriptor> variableDescriptors) {
this.variableDescriptors = new HashSet<>();
for (VariableDescriptor vd : variableDescriptors) {
this.addToVariableDescriptors(vd);
}
}
@Override
public List<VariableInstance> getPrivateInstances() {
return privateInstances;
}
@Override
public List<VariableInstance> getAllInstances() {
List<VariableInstance> instances = new ArrayList<>();
instances.addAll(getPrivateInstances());
for (Game g : getGames()) {
for (Team t : g.getTeams()) {
instances.addAll(t.getAllInstances());
}
}
return instances;
}
public void setPrivateInstances(List<VariableInstance> privateInstances) {
this.privateInstances = privateInstances;
}
/**
* @return a list of Variable Descriptors that are at the root level of the
* hierarchy (other VariableDescriptor can be placed inside of a
* ListDescriptor's items List)
*/
@JsonIgnore
public List<VariableDescriptor> getChildVariableDescriptors() {
return this.getItems();
}
/**
* @param variableDescriptors
*/
@JsonProperty
public void setChildVariableDescriptors(List<VariableDescriptor> variableDescriptors) {
this.setItems(variableDescriptors);
}
@Override
public void setChildParent(VariableDescriptor child) {
child.setRoot(this);
}
/**
* @return the games
*/
@JsonIgnore
public List<Game> getGames() {
return games;
}
/**
* @param games the games to set
*/
public void setGames(List<Game> games) {
this.games = games;
for (Game g : games) {
g.setGameModel(this);
}
}
/**
* @param game
*/
public void addGame(Game game) {
this.getGames().add(game);
game.setGameModel(this);
//game.setGameModelId(this.getId());
}
/**
* @return the scriptLibrary
*/
@JsonIgnore
public List<GameModelContent> getScriptLibraryList() {
return scriptLibrary;
}
/**
* @param scriptLibrary the scriptLibrary to set
*/
@JsonIgnore
public void setScriptLibraryList(List<GameModelContent> scriptLibrary) {
this.scriptLibrary = scriptLibrary;
}
/**
* @return all players from all teams and all games
*/
@JsonIgnore
@Override
public List<Player> getPlayers() {
List<Player> players = new ArrayList<>();
for (Game g : this.getGames()) {
players.addAll(g.getPlayers());
}
return players;
}
@Override
@JsonIgnore
public Player getAnyLivePlayer() {
for (Game game : this.getGames()) {
Player p = game.getAnyLivePlayer();
if (p != null) {
return p;
}
}
return null;
}
/**
* Return a test player.
* It may be a player in any team of a DebugGame or a player in a DebugTeam
*
* @return testPlayer
*/
@JsonIgnore
public Player findTestPlayer() {
Player p = null;
for (Game game : this.getGames()) {
if (game instanceof DebugGame) {
p = game.getAnyLivePlayer();
if (p != null) {
return p;
}
} else {
for (Team team : game.getTeams()) {
if (team instanceof DebugTeam) {
p = team.getAnyLivePlayer();
if (p != null) {
return p;
}
}
}
}
}
return null;
}
/**
* @return the createdTime
*/
public Date getCreatedTime() {
return (createdTime != null ? new Date(createdTime.getTime()) : null);
}
/**
* @param createdTime the createdTime to set
*/
public void setCreatedTime(Date createdTime) {
this.createdTime = createdTime != null ? new Date(createdTime.getTime()) : null;
}
/**
* @return the properties
*/
public GameModelProperties getProperties() {
return this.properties;
}
/**
* @param properties the properties to set
*/
public void setProperties(GameModelProperties properties) {
this.properties = properties;
}
/**
* @return the cssLibrary
*/
@JsonIgnore
public List<GameModelContent> getCssLibraryList() {
return cssLibrary;
}
/**
* @param cssLibrary the cssLibrary to set
*/
@JsonIgnore
public void setCssLibraryList(List<GameModelContent> cssLibrary) {
this.cssLibrary = cssLibrary;
}
private Map<String, GameModelContent> getLibraryAsMap(List<GameModelContent> library) {
Map<String, GameModelContent> map = new HashMap<>();
for (GameModelContent gmc : library) {
map.put(gmc.getContentKey(), gmc);
}
return map;
}
public Map<String, GameModelContent> getCssLibrary() {
return getLibraryAsMap(cssLibrary);
}
public void setCssLibrary(Map<String, GameModelContent> library) {
this.cssLibrary = new ArrayList<>();
for (Entry<String, GameModelContent> entry : library.entrySet()) {
String key = entry.getKey();
GameModelContent gmc = entry.getValue();
gmc.setCsslibrary_GameModel(this);
gmc.setContentKey(key);
cssLibrary.add(gmc);
}
}
/**
* @return the clientScriptLibrary
*/
@JsonIgnore
public List<GameModelContent> getClientScriptLibraryList() {
return clientScriptLibrary;
}
public Map<String, GameModelContent> getScriptLibrary() {
return getLibraryAsMap(scriptLibrary);
}
public Map<String, GameModelContent> getClientScriptLibrary() {
return getLibraryAsMap(clientScriptLibrary);
}
public void setScriptLibrary(Map<String, GameModelContent> library) {
this.scriptLibrary = new ArrayList<>();
for (Entry<String, GameModelContent> entry : library.entrySet()) {
String key = entry.getKey();
GameModelContent gmc = entry.getValue();
gmc.setScriptlibrary_GameModel(this);
gmc.setContentKey(key);
scriptLibrary.add(gmc);
}
}
public void setClientScriptLibrary(Map<String, GameModelContent> library) {
this.clientScriptLibrary = new ArrayList<>();
for (Entry<String, GameModelContent> entry : library.entrySet()) {
String key = entry.getKey();
GameModelContent gmc = entry.getValue();
gmc.setClientscriptlibrary_GameModel(this);
gmc.setContentKey(key);
clientScriptLibrary.add(gmc);
}
}
/**
* @param key
*
* @return the clientScript matching the key or null
*/
public GameModelContent getClientScript(String key) {
return this.getGameModelContent(clientScriptLibrary, key);
}
/**
* @param key
*
* @return the clientScript matching the key or null
*/
public GameModelContent getScript(String key) {
return this.getGameModelContent(scriptLibrary, key);
}
/**
* @param key
*
* @return the clientScript matching the key or null
*/
public GameModelContent getCss(String key) {
return this.getGameModelContent(cssLibrary, key);
}
public GameModelContent getGameModelContent(List<GameModelContent> list, String key) {
for (GameModelContent gmc : list) {
if (gmc.getContentKey().equals(key)) {
return gmc;
}
}
return null;
}
/**
* @param clientScriptLibrary the clientScriptLibrary to set
*/
@JsonIgnore
public void setClientScriptLibraryList(List<GameModelContent> clientScriptLibrary) {
this.clientScriptLibrary = clientScriptLibrary;
}
/**
* @return the pages
*/
public Map<String, JsonNode> getPages() {
// do not even try to fetch pages from repository if the gamemodel define a pagesURI
if (Helper.isNullOrEmpty(getProperties().getPagesUri())) {
try (final Pages pagesDAO = new Pages(this.id)) {
return pagesDAO.getPagesContent();
} catch (RepositoryException ex) {
return new HashMap<>();
}
} else {
return new HashMap<>();
}
}
/**
* @param pageMap
*/
public final void setPages(Map<String, JsonNode> pageMap) {
this.pages = pageMap;
if (this.id != null) {
this.storePages();
}
}
@Override
@JsonView(Views.ExportI.class)
public List<VariableDescriptor> getItems() {
return this.items;
}
@Override
public void setItems(List<VariableDescriptor> items) {
this.items = new ArrayList<>();
for (VariableDescriptor vd : items) {
this.addItem(vd);
}
}
@Override
@JsonIgnore
public GameModel getGameModel() {
return this;
}
@PostPersist
private void storePages() {
if (this.pages != null) {
try (final Pages pagesDAO = new Pages(this.id)) {
pagesDAO.delete(); // Remove existing pages
// Pay Attention: this.pages != this.getPages() !
// this.pages contains deserialized pages, getPages() fetchs them from the jackrabbit repository
for (Entry<String, JsonNode> p : this.pages.entrySet()) { // Add all pages
pagesDAO.store(new Page(p.getKey(), p.getValue()));
}
} catch (RepositoryException ex) {
System.err.println("Failed to create repository for GameModel " + this.id);
}
}
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @param description the description to set
*/
public void setDescription(String description) {
this.description = description;
}
public String getComments() {
return comments;
}
public void setComments(String comments) {
this.comments = comments;
}
/**
* @return the createdBy
*/
@JsonIgnore
public User getCreatedBy() {
return createdBy;
}
/**
* @param createdBy the createdBy to set
*/
public void setCreatedBy(User createdBy) {
this.createdBy = createdBy;
}
/**
* @return name of the user who created this or null if user no longer
* exists
*/
public String getCreatedByName() {
if (this.getCreatedBy() != null) {
return this.getCreatedBy().getName();
}
return null;
}
/**
* @param createdByName
*/
public void setCreatedByName(String createdByName) {
// Here so game deserialization works
}
@JsonIgnore
public List<GameModelLanguage> getRawLanguages() {
return this.languages;
}
public List<GameModelLanguage> getLanguages() {
return Helper.copyAndSort(this.languages, new EntityComparators.OrderComparator<>());
}
public void setLanguages(List<GameModelLanguage> languages) {
this.languages = languages;
int i = 0;
for (GameModelLanguage lang : this.languages) {
lang.setIndexOrder(i++);
lang.setGameModel(this);
}
}
/**
*
* @param code
*
* @return
*/
public GameModelLanguage getLanguageByCode(String code) {
if (code != null) {
for (GameModelLanguage lang : this.getLanguages()) {
if (code.equals(lang.getCode())){
return lang;
}
}
}
return null;
}
/**
* get list of language refName, sorted according to player preferences if such a player is provided;
*
* @param player may be null
*
* @return list
*/
public List<String> getPreferredLanguagesRefName(Player player) {
List<GameModelLanguage> sortedLanguages = getLanguages();
ArrayList<String> langs = new ArrayList<>(sortedLanguages.size());
for (GameModelLanguage gml : sortedLanguages) {
if (player != null && gml.getRefName().equals(player.getRefName())) {
langs.add(0, gml.getRefName());
} else {
langs.add(gml.getRefName());
}
}
return langs;
}
/**
* get list of language code, the given one first
*
*
* @param preferredRefName preferred refName, may be null or empty
*
* @return list
*/
public List<String> getPreferredLanguagesCode(String preferredRefName) {
List<GameModelLanguage> sortedLanguages = getLanguages();
ArrayList<String> langs = new ArrayList<>(sortedLanguages.size());
for (GameModelLanguage gml : sortedLanguages) {
if (gml.getRefName().equals(preferredRefName)) {
langs.add(0, gml.getCode());
} else {
langs.add(gml.getCode());
}
}
return langs;
}
/**
* get list of language refName, the given one first
*
*
* @param preferredRefName preferred refName, may be null or empty
*
* @return list
*/
public List<String> getPreferredLanguagesRefName(String preferredRefName) {
List<GameModelLanguage> sortedLanguages = getLanguages();
ArrayList<String> langs = new ArrayList<>(sortedLanguages.size());
for (GameModelLanguage gml : sortedLanguages) {
if (gml.getRefName().equals(preferredRefName)) {
langs.add(0, gml.getRefName());
} else {
langs.add(gml.getRefName());
}
}
return langs;
}
/**
* @return the template
*/
public Boolean getTemplate() {
return status != Status.PLAY;
}
/**
* @param template the template to set public void setTemplate(Boolean
* template) { this.template = template; }
*/
/**
* TODO: select game.* FROM GAME where dtype like 'DEBUGGAME' and
* gamemodelid = this.getId()
*
* @return true if the gameModel has a DebugGame
*/
public boolean hasDebugGame() {
for (Game g : getGames()) {
if (g instanceof DebugGame) {
return true;
}
}
return false;
}
@Override
@JsonIgnore
public String getChannel() {
return Helper.GAMEMODEL_CHANNEL_PREFIX + getId();
}
@Override
public Collection<WegasPermission> getRequieredUpdatePermission() {
return WegasPermission.getAsCollection(this.getAssociatedWritePermission());
}
@Override
public Collection<WegasPermission> getRequieredReadPermission() {
return WegasPermission.getAsCollection(this.getAssociatedReadPermission());
}
@Override
public Collection<WegasPermission> getRequieredCreatePermission() {
if (this.getStatus() == Status.PLAY) {
return WegasMembership.TRAINER;
} else {
return WegasMembership.SCENARIST;
}
}
@Override
public WegasPermission getAssociatedReadPermission() {
return new WegasEntityPermission(this.getId(), WegasEntityPermission.Level.READ, WegasEntityPermission.EntityType.GAMEMODEL);
}
@Override
public WegasPermission getAssociatedWritePermission() {
return new WegasEntityPermission(this.getId(), WegasEntityPermission.Level.WRITE, WegasEntityPermission.EntityType.GAMEMODEL);
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, List<AbstractEntity>> getEntities() {
Map<String, List<AbstractEntity>> map = new HashMap<>();
ArrayList<AbstractEntity> entities = new ArrayList<>();
entities.add(this);
map.put(this.getChannel(), entities);
return map;
}
/**
* <ul>
* <li>PLAY: {@link Status#PLAY}
* <li>LIVE: {@link Status#LIVE}</li>
* <li>BIN: {@link Status#BIN}</li>
* <li>DELETE: {@link Status#DELETE}</li>
* <li>SUPPRESSED: {@link Status#SUPPRESSED}</li>
* </ul>
*/
public enum Status {
/**
* Not a template game model but one linked to an effective game
*/
PLAY,
/**
* Template GameModel
*/
LIVE,
/**
* Template GameModel in the wast bin
*/
BIN,
/**
* Template GameModel Scheduled for deletion
*/
DELETE,
/**
* Does not exist anymore. Actually, this status should never persist.
* Used internally as game's missing.
*/
SUPPRESSED
}
/* try transient anotation on field "pages". Problem with anotation mixin'
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
this.pages = new HashMap<>();
}*/
}
| wegas-core/src/main/java/com/wegas/core/persistence/game/GameModel.java | /*
* Wegas
* http://wegas.albasim.ch
*
* Copyright (c) 2013-2018 School of Business and Engineering Vaud, Comem, MEI
* Licensed under the MIT License
*/
package com.wegas.core.persistence.game;
import com.fasterxml.jackson.annotation.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.wegas.core.Helper;
import com.wegas.core.exception.client.WegasIncompatibleType;
import com.wegas.core.jcr.page.Page;
import com.wegas.core.jcr.page.Pages;
import com.wegas.core.persistence.AbstractEntity;
import com.wegas.core.persistence.Broadcastable;
import com.wegas.core.persistence.EntityComparators;
import com.wegas.core.persistence.InstanceOwner;
import com.wegas.core.persistence.NamedEntity;
import com.wegas.core.persistence.variable.DescriptorListI;
import com.wegas.core.persistence.variable.VariableDescriptor;
import com.wegas.core.persistence.variable.VariableInstance;
import com.wegas.core.rest.util.Views;
import com.wegas.core.security.persistence.User;
import com.wegas.core.security.util.WegasEntityPermission;
import com.wegas.core.security.util.WegasMembership;
import com.wegas.core.security.util.WegasPermission;
import java.util.*;
import java.util.Map.Entry;
import javax.jcr.RepositoryException;
import javax.persistence.*;
import javax.validation.constraints.Pattern;
import org.apache.shiro.SecurityUtils;
/**
* @author Francois-Xavier Aeberhard (fx at red-agent.com)
*/
@Entity
//@Table(uniqueConstraints =
// @UniqueConstraint(columnNames = "name"))
@JsonIgnoreProperties(ignoreUnknown = true)
@NamedQueries({
@NamedQuery(name = "GameModel.findIdById", query = "SELECT gm.id FROM GameModel gm WHERE gm.id = :gameModelId"),
@NamedQuery(name = "GameModel.findByStatus", query = "SELECT a FROM GameModel a WHERE a.status = :status ORDER BY a.name ASC"),
@NamedQuery(name = "GameModel.findDistinctChildrenLabels", query = "SELECT DISTINCT(child.label) FROM VariableDescriptor child WHERE child.root.id = :containerId"),
@NamedQuery(name = "GameModel.findByName", query = "SELECT a FROM GameModel a WHERE a.name = :name"),
@NamedQuery(name = "GameModel.countByName", query = "SELECT count(gm.id) FROM GameModel gm WHERE gm.name = :name"),
@NamedQuery(name = "GameModel.findAll", query = "SELECT gm FROM GameModel gm"),
@NamedQuery(name = "GameModel.findAllInstantiations", query = "SELECT gm FROM GameModel gm where gm.basedOn.id = :id")
})
@Table(
indexes = {
@Index(columnList = "createdby_id"),
@Index(columnList = "basedon_id")
}
)
public class GameModel extends AbstractEntity implements DescriptorListI<VariableDescriptor>, InstanceOwner, Broadcastable, NamedEntity {
private static final long serialVersionUID = 1L;
@Transient
private Boolean canView = null;
@Transient
private Boolean canEdit = null;
@Transient
private Boolean canInstantiate = null;
@Transient
private Boolean canDuplicate = null;
/**
*
*/
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE)
@JsonView(Views.IndexI.class)
private Long id;
/**
*
*/
@Basic(optional = false)
@Pattern(regexp = "^.*\\S+.*$", message = "GameModel name cannot be empty")// must at least contains one non-whitespace character
private String name;
@Basic(optional = false)
private Integer UIVersion = 2;
@OneToMany(mappedBy = "gameModel", cascade = {CascadeType.ALL}, orphanRemoval = true)
private List<GameModelLanguage> languages = new ArrayList<>();
/**
*
*/
@Lob
//@Basic(fetch = FetchType.LAZY)
@JsonView(Views.ExtendedI.class)
private String description;
/**
*
*/
@Enumerated(value = EnumType.STRING)
@Column(length = 24, columnDefinition = "character varying(24) default 'LIVE'::character varying")
private Status status = Status.LIVE;
/**
*
*/
@Lob
//@Basic(fetch = FetchType.LAZY)
@JsonView(Views.ExtendedI.class)
private String comments;
/**
*
*/
@Temporal(TemporalType.TIMESTAMP)
@Column(columnDefinition = "timestamp with time zone")
private Date createdTime = new Date();
/**
*
*/
@ManyToOne(fetch = FetchType.LAZY)
@JsonIgnore
private User createdBy;
/**
* Link to original gameModel for "PLAY" gameModel
*/
@ManyToOne(fetch = FetchType.LAZY)
@JsonIgnore
private GameModel basedOn;
/*
*
*
* @JsonIgnore private Boolean template = true;
*/
/**
*
*/
@OneToMany(mappedBy = "gameModel", cascade = {CascadeType.ALL}, orphanRemoval = true, fetch = FetchType.LAZY)
@JsonIgnore
private Set<VariableDescriptor> variableDescriptors = new HashSet<>();
/**
* A list of Variable Descriptors that are at the root level of the
* hierarchy (other VariableDescriptor can be placed inside of a
* ListDescriptor's items List).
*/
@OneToMany(mappedBy = "root", cascade = {CascadeType.ALL}, fetch = FetchType.LAZY)
@OrderColumn(name = "gm_items_order")
//@JsonManagedReference
private List<VariableDescriptor> items = new ArrayList<>();
/**
* All gameModelScoped instances
*/
@JsonIgnore
@OneToMany(mappedBy = "gameModel", cascade = CascadeType.ALL)
private List<VariableInstance> privateInstances = new ArrayList<>();
/**
*
*/
@OneToMany(mappedBy = "gameModel", cascade = {CascadeType.ALL}, orphanRemoval = true, fetch = FetchType.LAZY)
@JsonManagedReference
@JsonIgnore
//@JsonView(Views.ExportI.class)
private List<Game> games = new ArrayList<>();
/**
* Holds all the scripts contained in current game model.
*/
@OneToMany(mappedBy = "scriptlibrary_GameModel", cascade = CascadeType.ALL, orphanRemoval = true)
@JsonView({Views.ExportI.class})
private List<GameModelContent> scriptLibrary = new ArrayList<>();
/**
*
*/
@OneToMany(mappedBy = "csslibrary_GameModel", cascade = CascadeType.ALL, orphanRemoval = true)
@JsonView({Views.ExportI.class})
private List<GameModelContent> cssLibrary = new ArrayList<>();
/**
*
*/
@OneToMany(mappedBy = "clientscriptlibrary_GameModel", cascade = CascadeType.ALL, orphanRemoval = true)
@JsonView({Views.ExportI.class})
private List<GameModelContent> clientScriptLibrary = new ArrayList<>();
/**
*
*/
@Embedded
private GameModelProperties properties = new GameModelProperties();
/**
* Holds a reference to the pages, used to serialize page and game model at
* the same time.
*/
@Transient
@JsonView({Views.ExportI.class})
private Map<String, JsonNode> pages;
/**
*
*/
public GameModel() {
}
/**
* @param name
*/
public GameModel(String name) {
this.name = name;
}
/**
* @param pageMap
*
* @throws RepositoryException
*/
@JsonCreator
public GameModel(@JsonProperty("pages") JsonNode pageMap) throws RepositoryException {
Map<String, JsonNode> map = new HashMap<>();
if (pageMap == null) {
return;
}
String curKey;
Iterator<String> iterator = pageMap.fieldNames();
while (iterator.hasNext()) {
curKey = iterator.next();
map.put(curKey, pageMap.get(curKey));
}
this.setPages(map);
}
/**
* Set the gameModel this PLAY gameModel is based on
*
* @param srcGameModel the original game model this gameModel is a duplicata of
*/
public void setBasedOn(GameModel srcGameModel) {
this.basedOn = srcGameModel;
}
/**
* Returns the original game model this gameModel is a duplicata of
*
* @return the original game model
*/
public GameModel getBasedOn() {
return this.basedOn;
}
/**
*
*/
public void propagateGameModel() {
//this.variableDescriptors.clear();
this.propagateGameModel(this);
}
/**
* Register new descriptor within the main descriptor list
* Method do nothing id descriptor is already registered
*
* @param vd the new descriptor to register
*/
public void addToVariableDescriptors(VariableDescriptor vd) {
if (!this.getVariableDescriptors().contains(vd)) {
this.getVariableDescriptors().add(vd);
vd.setGameModel(this);
}
}
/**
* Remove
*
* @param vd
*/
public void removeFromVariableDescriptors(VariableDescriptor vd) {
this.getVariableDescriptors().remove(vd);
}
/**
* Make sur all descriptor (in the given list, deep) a registered within the main descriptor list
*
* @param list base list to fetch new descriptor from
*/
private void propagateGameModel(final DescriptorListI<? extends VariableDescriptor> list) {
for (VariableDescriptor vd : list.getItems()) {
this.addToVariableDescriptors(vd);
if (vd instanceof DescriptorListI) {
this.propagateGameModel((DescriptorListI<? extends VariableDescriptor>) vd);
}
}
}
@Override
public void merge(AbstractEntity n) {
if (n instanceof GameModel) {
GameModel other = (GameModel) n;
this.setName(other.getName());
this.setDescription(other.getDescription()); // Set description first, since fetching this lazy loaded attribute will cause an entity refresh
this.setComments(other.getComments());
this.setUIVersion(other.getUIVersion());
this.getProperties().merge(other.getProperties());
//this.setLanguages(ListUtils.mergeLists(this.getLanguages(), other.getLanguages())); // Note For Modeler-> not in default merge
} else {
throw new WegasIncompatibleType(this.getClass().getSimpleName() + ".merge (" + n.getClass().getSimpleName() + ") is not possible");
}
}
/**
*
*/
@PrePersist
public void prePersist() {
this.setCreatedTime(new Date());
}
/**
* For serialization
*
* @return true if current user has view permission on this
*/
@JsonView(Views.LobbyI.class)
public Boolean getCanView() {
if (canView != null) {
return canView;
} else {
Helper.printWegasStackTrace(new Exception());
return true; // by design, non readable gameModel will throws an exception
}
}
/**
* @return true if current user has edit permission on this
*/
@JsonView(Views.LobbyI.class)
public Boolean getCanEdit() {
if (canEdit != null) {
return canEdit;
} else {
// I DO NOT LIKE VERY MUCH USING SHIRO WITHIN ENTITIES...
Helper.printWegasStackTrace(new Exception());
return SecurityUtils.getSubject().isPermitted("GameModel:Edit:gm" + this.id);
}
}
/**
* @return true if current user has duplicate permission on this
*/
@JsonView(Views.LobbyI.class)
public Boolean getCanDuplicate() {
if (canDuplicate != null) {
return canDuplicate;
} else {
// I DO NOT LIKE VERY MUCH USING SHIRO WITHIN ENTITIES...
Helper.printWegasStackTrace(new Exception());
return SecurityUtils.getSubject().isPermitted("GameModel:Duplicate:gm" + this.id);
}
}
/**
* @return true if current user has instantiate permission on this
*/
@JsonView(Views.LobbyI.class)
public Boolean getCanInstantiate() {
if (canInstantiate != null) {
return canInstantiate;
} else {
// I DO NOT LIKE VERY MUCH USING SHIRO WITHIN ENTITIES...
Helper.printWegasStackTrace(new Exception());
return SecurityUtils.getSubject().isPermitted("GameModel:Instantiate:gm" + this.id);
}
}
public void setCanView(Boolean canView) {
this.canView = canView;
}
public void setCanEdit(Boolean canEdit) {
this.canEdit = canEdit;
}
public void setCanInstantiate(Boolean canInstantiate) {
this.canInstantiate = canInstantiate;
}
public void setCanDuplicate(Boolean canDuplicate) {
this.canDuplicate = canDuplicate;
}
@Override
public Long getId() {
return id;
}
/**
* @param id
*/
public void setId(Long id) {
this.id = id;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
public Integer getUIVersion() {
return UIVersion;
}
public void setUIVersion(Integer UIVersion) {
this.UIVersion = UIVersion;
}
/**
* @return Current GameModel's status
*/
@JsonIgnore
public Status getStatus() {
return status;
}
/**
* Change the status of the gameModel.
*
* @param status status to set
*/
@JsonIgnore
public void setStatus(Status status) {
this.status = status;
}
/**
* get the set of all descriptor from the game model
*
* @return all variable descriptors
*/
@JsonIgnore
public Set<VariableDescriptor> getVariableDescriptors() {
return variableDescriptors;
}
/**
* @param variableDescriptors
*/
public void setVariableDescriptors(Set<VariableDescriptor> variableDescriptors) {
this.variableDescriptors = new HashSet<>();
for (VariableDescriptor vd : variableDescriptors) {
this.addToVariableDescriptors(vd);
}
}
@Override
public List<VariableInstance> getPrivateInstances() {
return privateInstances;
}
@Override
public List<VariableInstance> getAllInstances() {
List<VariableInstance> instances = new ArrayList<>();
instances.addAll(getPrivateInstances());
for (Game g : getGames()) {
for (Team t : g.getTeams()) {
instances.addAll(t.getAllInstances());
}
}
return instances;
}
public void setPrivateInstances(List<VariableInstance> privateInstances) {
this.privateInstances = privateInstances;
}
/**
* @return a list of Variable Descriptors that are at the root level of the
* hierarchy (other VariableDescriptor can be placed inside of a
* ListDescriptor's items List)
*/
@JsonIgnore
public List<VariableDescriptor> getChildVariableDescriptors() {
return this.getItems();
}
/**
* @param variableDescriptors
*/
@JsonProperty
public void setChildVariableDescriptors(List<VariableDescriptor> variableDescriptors) {
this.setItems(variableDescriptors);
}
@Override
public void setChildParent(VariableDescriptor child) {
child.setRoot(this);
}
/**
* @return the games
*/
@JsonIgnore
public List<Game> getGames() {
return games;
}
/**
* @param games the games to set
*/
public void setGames(List<Game> games) {
this.games = games;
for (Game g : games) {
g.setGameModel(this);
}
}
/**
* @param game
*/
public void addGame(Game game) {
this.getGames().add(game);
game.setGameModel(this);
//game.setGameModelId(this.getId());
}
/**
* @return the scriptLibrary
*/
@JsonIgnore
public List<GameModelContent> getScriptLibraryList() {
return scriptLibrary;
}
/**
* @param scriptLibrary the scriptLibrary to set
*/
@JsonIgnore
public void setScriptLibraryList(List<GameModelContent> scriptLibrary) {
this.scriptLibrary = scriptLibrary;
}
/**
* @return all players from all teams and all games
*/
@JsonIgnore
@Override
public List<Player> getPlayers() {
List<Player> players = new ArrayList<>();
for (Game g : this.getGames()) {
players.addAll(g.getPlayers());
}
return players;
}
@Override
@JsonIgnore
public Player getAnyLivePlayer() {
for (Game game : this.getGames()) {
Player p = game.getAnyLivePlayer();
if (p != null) {
return p;
}
}
return null;
}
/**
* Return a test player.
* It may be a player in any team of a DebugGame or a player in a DebugTeam
*
* @return testPlayer
*/
@JsonIgnore
public Player findTestPlayer() {
Player p = null;
for (Game game : this.getGames()) {
if (game instanceof DebugGame) {
p = game.getAnyLivePlayer();
if (p != null) {
return p;
}
} else {
for (Team team : game.getTeams()) {
if (team instanceof DebugTeam) {
p = team.getAnyLivePlayer();
if (p != null) {
return p;
}
}
}
}
}
return null;
}
/**
* @return the createdTime
*/
public Date getCreatedTime() {
return (createdTime != null ? new Date(createdTime.getTime()) : null);
}
/**
* @param createdTime the createdTime to set
*/
public void setCreatedTime(Date createdTime) {
this.createdTime = createdTime != null ? new Date(createdTime.getTime()) : null;
}
/**
* @return the properties
*/
public GameModelProperties getProperties() {
return this.properties;
}
/**
* @param properties the properties to set
*/
public void setProperties(GameModelProperties properties) {
this.properties = properties;
}
/**
* @return the cssLibrary
*/
@JsonIgnore
public List<GameModelContent> getCssLibraryList() {
return cssLibrary;
}
/**
* @param cssLibrary the cssLibrary to set
*/
@JsonIgnore
public void setCssLibraryList(List<GameModelContent> cssLibrary) {
this.cssLibrary = cssLibrary;
}
private Map<String, GameModelContent> getLibraryAsMap(List<GameModelContent> library) {
Map<String, GameModelContent> map = new HashMap<>();
for (GameModelContent gmc : library) {
map.put(gmc.getContentKey(), gmc);
}
return map;
}
public Map<String, GameModelContent> getCssLibrary() {
return getLibraryAsMap(cssLibrary);
}
public void setCssLibrary(Map<String, GameModelContent> library) {
this.cssLibrary = new ArrayList<>();
for (Entry<String, GameModelContent> entry : library.entrySet()) {
String key = entry.getKey();
GameModelContent gmc = entry.getValue();
gmc.setCsslibrary_GameModel(this);
gmc.setContentKey(key);
cssLibrary.add(gmc);
}
}
/**
* @return the clientScriptLibrary
*/
@JsonIgnore
public List<GameModelContent> getClientScriptLibraryList() {
return clientScriptLibrary;
}
public Map<String, GameModelContent> getScriptLibrary() {
return getLibraryAsMap(scriptLibrary);
}
public Map<String, GameModelContent> getClientScriptLibrary() {
return getLibraryAsMap(clientScriptLibrary);
}
public void setScriptLibrary(Map<String, GameModelContent> library) {
this.scriptLibrary = new ArrayList<>();
for (Entry<String, GameModelContent> entry : library.entrySet()) {
String key = entry.getKey();
GameModelContent gmc = entry.getValue();
gmc.setScriptlibrary_GameModel(this);
gmc.setContentKey(key);
scriptLibrary.add(gmc);
}
}
public void setClientScriptLibrary(Map<String, GameModelContent> library) {
this.clientScriptLibrary = new ArrayList<>();
for (Entry<String, GameModelContent> entry : library.entrySet()) {
String key = entry.getKey();
GameModelContent gmc = entry.getValue();
gmc.setClientscriptlibrary_GameModel(this);
gmc.setContentKey(key);
clientScriptLibrary.add(gmc);
}
}
/**
* @param key
*
* @return the clientScript matching the key or null
*/
public GameModelContent getClientScript(String key) {
return this.getGameModelContent(clientScriptLibrary, key);
}
/**
* @param key
*
* @return the clientScript matching the key or null
*/
public GameModelContent getScript(String key) {
return this.getGameModelContent(scriptLibrary, key);
}
/**
* @param key
*
* @return the clientScript matching the key or null
*/
public GameModelContent getCss(String key) {
return this.getGameModelContent(cssLibrary, key);
}
public GameModelContent getGameModelContent(List<GameModelContent> list, String key) {
for (GameModelContent gmc : list) {
if (gmc.getContentKey().equals(key)) {
return gmc;
}
}
return null;
}
/**
* @param clientScriptLibrary the clientScriptLibrary to set
*/
@JsonIgnore
public void setClientScriptLibraryList(List<GameModelContent> clientScriptLibrary) {
this.clientScriptLibrary = clientScriptLibrary;
}
/**
* @return the pages
*/
public Map<String, JsonNode> getPages() {
// do not even try to fetch pages from repository if the gamemodel define a pagesURI
if (Helper.isNullOrEmpty(getProperties().getPagesUri())) {
try (final Pages pagesDAO = new Pages(this.id)) {
return pagesDAO.getPagesContent();
} catch (RepositoryException ex) {
return new HashMap<>();
}
} else {
return new HashMap<>();
}
}
/**
* @param pageMap
*/
public final void setPages(Map<String, JsonNode> pageMap) {
this.pages = pageMap;
if (this.id != null) {
this.storePages();
}
}
@Override
@JsonView(Views.ExportI.class)
public List<VariableDescriptor> getItems() {
return this.items;
}
@Override
public void setItems(List<VariableDescriptor> items) {
this.items = new ArrayList<>();
for (VariableDescriptor vd : items) {
this.addItem(vd);
}
}
@Override
@JsonIgnore
public GameModel getGameModel() {
return this;
}
@PostPersist
private void storePages() {
if (this.pages != null) {
try (final Pages pagesDAO = new Pages(this.id)) {
pagesDAO.delete(); // Remove existing pages
// Pay Attention: this.pages != this.getPages() !
// this.pages contains deserialized pages, getPages() fetchs them from the jackrabbit repository
for (Entry<String, JsonNode> p : this.pages.entrySet()) { // Add all pages
pagesDAO.store(new Page(p.getKey(), p.getValue()));
}
} catch (RepositoryException ex) {
System.err.println("Failed to create repository for GameModel " + this.id);
}
}
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @param description the description to set
*/
public void setDescription(String description) {
this.description = description;
}
public String getComments() {
return comments;
}
public void setComments(String comments) {
this.comments = comments;
}
/**
* @return the createdBy
*/
@JsonIgnore
public User getCreatedBy() {
return createdBy;
}
/**
* @param createdBy the createdBy to set
*/
public void setCreatedBy(User createdBy) {
this.createdBy = createdBy;
}
/**
* @return name of the user who created this or null if user no longer
* exists
*/
public String getCreatedByName() {
if (this.getCreatedBy() != null) {
return this.getCreatedBy().getName();
}
return null;
}
/**
* @param createdByName
*/
public void setCreatedByName(String createdByName) {
// Here so game deserialization works
}
@JsonIgnore
public List<GameModelLanguage> getRawLanguages() {
return this.languages;
}
public List<GameModelLanguage> getLanguages() {
return Helper.copyAndSort(this.languages, new EntityComparators.OrderComparator<>());
}
public void setLanguages(List<GameModelLanguage> languages) {
this.languages = languages;
int i = 0;
for (GameModelLanguage lang : this.languages) {
lang.setIndexOrder(i++);
lang.setGameModel(this);
}
}
/**
*
* @param code
*
* @return
*/
public GameModelLanguage getLanguageByCode(String code) {
if (code != null) {
for (GameModelLanguage lang : this.getLanguages()) {
if (code.equals(lang.getCode())){
return lang;
}
}
}
return null;
}
/**
* get list of language refName, sorted according to player preferences if such a player is provided;
*
* @param player may be null
*
* @return list
*/
public List<String> getPreferredLanguagesRefName(Player player) {
List<GameModelLanguage> sortedLanguages = getLanguages();
ArrayList<String> langs = new ArrayList<>(sortedLanguages.size());
for (GameModelLanguage gml : sortedLanguages) {
if (player != null && gml.getRefName().equals(player.getRefName())) {
langs.add(0, gml.getRefName());
} else {
langs.add(gml.getRefName());
}
}
return langs;
}
/**
* get list of language code, the given one first
*
*
* @param preferredRefName preferred refName, may be null or empty
*
* @return list
*/
public List<String> getPreferredLanguagesCode(String preferredRefName) {
List<GameModelLanguage> sortedLanguages = getLanguages();
ArrayList<String> langs = new ArrayList<>(sortedLanguages.size());
for (GameModelLanguage gml : sortedLanguages) {
if (gml.getRefName().equals(preferredRefName)) {
langs.add(0, gml.getCode());
} else {
langs.add(gml.getCode());
}
}
return langs;
}
/**
* get list of language refName, the given one first
*
*
* @param preferredRefName preferred refName, may be null or empty
*
* @return list
*/
public List<String> getPreferredLanguagesRefName(String preferredRefName) {
List<GameModelLanguage> sortedLanguages = getLanguages();
ArrayList<String> langs = new ArrayList<>(sortedLanguages.size());
for (GameModelLanguage gml : sortedLanguages) {
if (gml.getRefName().equals(preferredRefName)) {
langs.add(0, gml.getRefName());
} else {
langs.add(gml.getRefName());
}
}
return langs;
}
/**
* @return the template
*/
public Boolean getTemplate() {
return status != Status.PLAY;
}
/**
* @param template the template to set public void setTemplate(Boolean
* template) { this.template = template; }
*/
/**
* TODO: select game.* FROM GAME where dtype like 'DEBUGGAME' and
* gamemodelid = this.getId()
*
* @return true if the gameModel has a DebugGame
*/
public boolean hasDebugGame() {
for (Game g : getGames()) {
if (g instanceof DebugGame) {
return true;
}
}
return false;
}
@Override
@JsonIgnore
public String getChannel() {
return Helper.GAMEMODEL_CHANNEL_PREFIX + getId();
}
@Override
public Collection<WegasPermission> getRequieredUpdatePermission() {
return WegasPermission.getAsCollection(this.getAssociatedWritePermission());
}
@Override
public Collection<WegasPermission> getRequieredReadPermission() {
return WegasPermission.getAsCollection(this.getAssociatedReadPermission());
}
@Override
public Collection<WegasPermission> getRequieredCreatePermission() {
if (this.getStatus() == Status.PLAY) {
return WegasMembership.TRAINER;
} else {
return WegasMembership.SCENARIST;
}
}
@Override
public WegasPermission getAssociatedReadPermission() {
return new WegasEntityPermission(this.getId(), WegasEntityPermission.Level.READ, WegasEntityPermission.EntityType.GAMEMODEL);
}
@Override
public WegasPermission getAssociatedWritePermission() {
return new WegasEntityPermission(this.getId(), WegasEntityPermission.Level.WRITE, WegasEntityPermission.EntityType.GAMEMODEL);
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, List<AbstractEntity>> getEntities() {
Map<String, List<AbstractEntity>> map = new HashMap<>();
ArrayList<AbstractEntity> entities = new ArrayList<>();
entities.add(this);
map.put(this.getChannel(), entities);
return map;
}
/**
* <ul>
* <li>PLAY: {@link Status#PLAY}
* <li>LIVE: {@link Status#LIVE}</li>
* <li>BIN: {@link Status#BIN}</li>
* <li>DELETE: {@link Status#DELETE}</li>
* <li>SUPPRESSED: {@link Status#SUPPRESSED}</li>
* </ul>
*/
public enum Status {
/**
* Not a template game model but one linked to an effective game
*/
PLAY,
/**
* Template GameModel
*/
LIVE,
/**
* Template GameModel in the wast bin
*/
BIN,
/**
* Template GameModel Scheduled for deletion
*/
DELETE,
/**
* Does not exist anymore. Actually, this status should never persist.
* Used internally as game's missing.
*/
SUPPRESSED
}
/* try transient anotation on field "pages". Problem with anotation mixin'
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
this.pages = new HashMap<>();
}*/
}
| GameModel default to UI v1
Unkown versions are most certainly old GameModels
| wegas-core/src/main/java/com/wegas/core/persistence/game/GameModel.java | GameModel default to UI v1 |
|
Java | mit | b049ea18c96eab0c7dae2609f616b8a314b35d50 | 0 | david540/cpp-2a-info,matthieu637/cpp-2a-info,david540/cpp-2a-info,david540/cpp-2a-info,matthieu637/cpp-2a-info,matthieu637/cpp-2a-info | package core;
public class Achat extends Ordre {
public Achat(int id_ordre, Action action, float prix, int volume, Joueur joueur, long temps) {
super(id_ordre, action, prix, volume, joueur, temps);
}
@Override
public int compareTo(Ordre o) {
int c = Float.compare(o.prix, this.prix);
if (c == 0)
return Long.compare(this.temps, o.temps);
return c;
}
}
| 2017/SimBourse/src/core/Achat.java | package core;
public class Achat extends Ordre {
public Achat(int id_ordre, Action action, float prix, int volume, Joueur joueur) {
super(id_ordre, action, prix, volume, joueur);
}
@Override
public int compareTo(Ordre o) {
int c = Float.compare(o.prix, this.prix);
if (c == 0)
return Long.compare(this.temps, o.temps);
return c;
}
}
| Update Achat.java | 2017/SimBourse/src/core/Achat.java | Update Achat.java |
|
Java | mit | ca6aeda3f71daf15c1729a72f58183e95da7b642 | 0 | oaplatform/oap,oaplatform/oap | /*
* The MIT License (MIT)
*
* Copyright (c) Open Application Platform Authors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package oap.json;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import com.fasterxml.jackson.datatype.joda.JodaModule;
import com.fasterxml.jackson.datatype.joda.cfg.JacksonJodaDateFormat;
import com.fasterxml.jackson.datatype.joda.deser.DateTimeDeserializer;
import com.fasterxml.jackson.datatype.joda.ser.DateTimeSerializer;
import com.fasterxml.jackson.module.afterburner.AfterburnerModule;
import oap.io.Resources;
import oap.testng.AbstractPerformance;
import oap.util.Dates;
import org.joda.time.DateTime;
import org.joda.time.ReadableInstant;
import org.testng.annotations.Test;
import java.util.Map;
import java.util.Optional;
public class ParserPerformance extends AbstractPerformance {
private static final JacksonJodaDateFormat jodaDateFormat = new JacksonJodaDateFormat( Dates.FORMAT_MILLIS );
public static String yearJson = Resources.readString( ParserPerformance.class, "year.json" ).get();
@SuppressWarnings( "unchecked" )
private static <T extends ReadableInstant> JsonDeserializer<T> forType( Class<T> cls ) {
return ( JsonDeserializer<T> ) new DateTimeDeserializer( cls, jodaDateFormat );
}
@Test
public void performance() {
final ObjectMapper mapper = new ObjectMapper();
mapper.registerModule( new Jdk8Module() );
final JodaModule module = new JodaModule();
module.addDeserializer( DateTime.class, forType( DateTime.class ) );
module.addSerializer( DateTime.class, new DateTimeSerializer( jodaDateFormat ) );
mapper.registerModule( module );
mapper.enable( DeserializationFeature.USE_LONG_FOR_INTS );
mapper.disable( DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES );
mapper.disable( SerializationFeature.WRITE_DATES_AS_TIMESTAMPS );
mapper.setVisibility( PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY );
mapper.setSerializationInclusion( JsonInclude.Include.NON_NULL );
mapper.registerModule( new OapJsonModule() );
benchmark( "mapParser-jackson", 5000, 5,
i -> mapper.writeValueAsString( mapper.readValue( yearJson, Map.class ) ) );
final ObjectMapper mapper2 = new ObjectMapper();
mapper2.registerModule( new Jdk8Module() );
mapper2.registerModule( module );
mapper2.enable( DeserializationFeature.USE_LONG_FOR_INTS );
mapper2.disable( DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES );
mapper2.disable( SerializationFeature.WRITE_DATES_AS_TIMESTAMPS );
mapper2.setVisibility( PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY );
mapper2.setSerializationInclusion( JsonInclude.Include.NON_NULL );
mapper2.registerModule( new OapJsonModule() );
mapper2.registerModule( new AfterburnerModule() );
benchmark( "mapParser-jackson2", 5000, 5, i ->
mapper2.writeValueAsString( mapper2.readValue( yearJson, Map.class ) ) );
}
@Test
public void testNullVsOptional() {
final String testEmpty = Binder.json.marshal( new TestNull( null, null, null ) );
final String testNotEmpty = Binder.json.marshal( new TestNull( "123", "567", new TestNull( "q", "w", new TestNull( null, null, null ) ) ) );
System.out.println( testEmpty );
System.out.println( testNotEmpty );
benchmark( "parse-null", 5000000, 5, ( i ) -> {
Binder.json.unmarshal( TestNull.class, testEmpty );
Binder.json.unmarshal( TestNull.class, testNotEmpty );
} );
benchmark( "parse-optional-empty", 5000000, 5, ( i ) -> {
Binder.json.unmarshal( TestOptional.class, testEmpty );
Binder.json.unmarshal( TestOptional.class, testNotEmpty );
} );
}
public static class TestNull {
public String test1;
public String test2;
public TestNull test3;
public TestNull( String test1, String test2, TestNull test3 ) {
this.test1 = test1;
this.test2 = test2;
this.test3 = test3;
}
public TestNull() {
}
}
public static class TestOptional {
public Optional<String> test1;
public Optional<String> test2;
public Optional<TestOptional> test3;
public TestOptional( Optional<String> test1, Optional<String> test2, Optional<TestOptional> test3 ) {
this.test1 = test1;
this.test2 = test2;
this.test3 = test3;
}
public TestOptional() {
}
}
}
| oap-json/src/test/java/oap/json/ParserPerformance.java | /*
* The MIT License (MIT)
*
* Copyright (c) Open Application Platform Authors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package oap.json;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import com.fasterxml.jackson.datatype.joda.JodaModule;
import com.fasterxml.jackson.datatype.joda.cfg.JacksonJodaDateFormat;
import com.fasterxml.jackson.datatype.joda.deser.DateTimeDeserializer;
import com.fasterxml.jackson.datatype.joda.ser.DateTimeSerializer;
import com.fasterxml.jackson.module.afterburner.AfterburnerModule;
import oap.io.Resources;
import oap.testng.AbstractPerformance;
import oap.util.Dates;
import org.joda.time.DateTime;
import org.joda.time.ReadableInstant;
import org.testng.annotations.Test;
import java.util.Map;
public class ParserPerformance extends AbstractPerformance {
public static String yearJson = Resources.readString( ParserPerformance.class, "year.json" ).get();
private static final JacksonJodaDateFormat jodaDateFormat = new JacksonJodaDateFormat( Dates.FORMAT_MILLIS );
@SuppressWarnings( "unchecked" )
private static <T extends ReadableInstant> JsonDeserializer<T> forType( Class<T> cls ) {
return (JsonDeserializer<T>) new DateTimeDeserializer( cls, jodaDateFormat );
}
@Test
public void performance() {
final ObjectMapper mapper = new ObjectMapper();
mapper.registerModule( new Jdk8Module() );
final JodaModule module = new JodaModule();
module.addDeserializer( DateTime.class, forType( DateTime.class ) );
module.addSerializer( DateTime.class, new DateTimeSerializer( jodaDateFormat ) );
mapper.registerModule( module );
mapper.enable( DeserializationFeature.USE_LONG_FOR_INTS );
mapper.disable( DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES );
mapper.disable( SerializationFeature.WRITE_DATES_AS_TIMESTAMPS );
mapper.setVisibility( PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY );
mapper.setSerializationInclusion( JsonInclude.Include.NON_NULL );
mapper.registerModule( new OapJsonModule() );
benchmark( "mapParser-jackson", 5000, 5,
i -> mapper.writeValueAsString( mapper.readValue( yearJson, Map.class ) ) );
final ObjectMapper mapper2 = new ObjectMapper();
mapper2.registerModule( new Jdk8Module() );
mapper2.registerModule( module );
mapper2.enable( DeserializationFeature.USE_LONG_FOR_INTS );
mapper2.disable( DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES );
mapper2.disable( SerializationFeature.WRITE_DATES_AS_TIMESTAMPS );
mapper2.setVisibility( PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY );
mapper2.setSerializationInclusion( JsonInclude.Include.NON_NULL );
mapper2.registerModule( new OapJsonModule() );
mapper2.registerModule( new AfterburnerModule() );
benchmark( "mapParser-jackson2", 5000, 5, i ->
mapper2.writeValueAsString( mapper2.readValue( yearJson, Map.class ) ) );
}
}
| add: null vs optional perf
| oap-json/src/test/java/oap/json/ParserPerformance.java | add: null vs optional perf |
|
Java | mit | 0a327f84be2601397b0eed0cd1939d874d7756eb | 0 | jukta/j-tahoe,jukta/j-tahoe | package com.jukta.jtahoe.taglib;
import com.jukta.jtahoe.BlockFactory;
import com.jukta.jtahoe.DataHandlerProvider;
import com.jukta.jtahoe.RuntimeBlockFactory;
import com.jukta.jtahoe.gen.xml.XmlBlockModelProvider;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
/**
* @since 1.0
*/
public class ContextListener implements ServletContextListener {
@Override
public void contextInitialized(ServletContextEvent sce) {
try {
ServletContext context = sce.getServletContext();
String blocksDir = context.getInitParameter("blocksDir");
BlockFactory blockFactory = new RuntimeBlockFactory(new XmlBlockModelProvider(blocksDir));
context.setAttribute("_jTahoe_blockFactory", blockFactory);
String dataHandlerProviderClass = context.getInitParameter("dataHandlerProviderClass");
if (dataHandlerProviderClass != null) {
DataHandlerProvider dataHandlerProvider = (DataHandlerProvider) Class.forName(dataHandlerProviderClass).newInstance();
context.setAttribute("_jTahoe_dataHandlerProvider", dataHandlerProvider);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void contextDestroyed(ServletContextEvent sce) {
}
}
| j-tahoe-taglib/src/main/java/com/jukta/jtahoe/taglib/ContextListener.java | package com.jukta.jtahoe.taglib;
import com.jukta.jtahoe.BlockFactory;
import com.jukta.jtahoe.DataHandlerProvider;
import com.jukta.jtahoe.RuntimeBlockFactory;
import com.jukta.jtahoe.gen.xml.XmlBlockModelProvider;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
/**
* @since 1.0
*/
public class ContextListener implements ServletContextListener {
@Override
public void contextInitialized(ServletContextEvent sce) {
try {
ServletContext context = sce.getServletContext();
String blocksDir = context.getInitParameter("blocksDir");
String dataHandlerProviderClass = context.getInitParameter("dataHandlerProviderClass");
DataHandlerProvider dataHandlerProvider = (DataHandlerProvider) Class.forName(dataHandlerProviderClass).newInstance();
BlockFactory blockFactory = new RuntimeBlockFactory(new XmlBlockModelProvider(blocksDir));
context.setAttribute("_jTahoe_blockFactory", blockFactory);
context.setAttribute("_jTahoe_dataHandlerProvider", dataHandlerProvider);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void contextDestroyed(ServletContextEvent sce) {
}
}
| bugfixing
| j-tahoe-taglib/src/main/java/com/jukta/jtahoe/taglib/ContextListener.java | bugfixing |
|
Java | lgpl-2.1 | dfc37566876348e5326e2ad162a5f4e332795053 | 0 | lopescan/languagetool,jimregan/languagetool,jimregan/languagetool,jimregan/languagetool,lopescan/languagetool,languagetool-org/languagetool,lopescan/languagetool,lopescan/languagetool,languagetool-org/languagetool,jimregan/languagetool,languagetool-org/languagetool,lopescan/languagetool,jimregan/languagetool,languagetool-org/languagetool,languagetool-org/languagetool | /* LanguageTool, a natural language style checker
* Copyright (C) 2015 Daniel Naber (http://www.danielnaber.de)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
* USA
*/
package org.languagetool.rules.patterns;
import org.jetbrains.annotations.NotNull;
import org.languagetool.AnalyzedSentence;
import org.languagetool.Language;
import org.languagetool.rules.RuleMatch;
import org.languagetool.tools.StringTools;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Matches 'regexp' elements from XML rules against sentences.
* @since 3.2
*/
class RegexPatternRule extends AbstractPatternRule implements RuleMatcher {
private static final Pattern suggestionPattern = Pattern.compile("<suggestion>(.*?)</suggestion>"); // TODO: this needs to be cleaned up, there should be no need to parse this?
private final Pattern pattern;
private final int markGroup;
RegexPatternRule(String id, String description, String message, String suggestionsOutMsg, Language language, Pattern regex, int regexpMark) {
super(id, description, language, regex, regexpMark);
this.message = message;
this.pattern = regex;
this.suggestionsOutMsg = suggestionsOutMsg;
markGroup = regexpMark;
}
public Pattern getPattern() {
return pattern;
}
@Override
public RuleMatch[] match(AnalyzedSentence sentenceObj) throws IOException {
String sentence = sentenceObj.getText();
Matcher matcher = pattern.matcher(sentence);
int startPos = 0;
List<RuleMatch> matches = new ArrayList<>();
while (matcher.find(startPos)) {
String msg = replaceBackRefs(matcher, message);
boolean sentenceStart = matcher.start(0) == 0;
List<String> suggestions = extractSuggestions(matcher, msg);
List<String> matchSuggestions = getMatchSuggestions(sentence, matcher);
msg = replaceMatchElements(msg, matchSuggestions);
int markStart = matcher.start(markGroup);
int markEnd = matcher.end(markGroup);
RuleMatch ruleMatch = new RuleMatch(this, markStart, markEnd, msg, null, sentenceStart, null);
List<String> allSuggestions = new ArrayList<>();
if (matchSuggestions.size() > 0) {
allSuggestions.addAll(matchSuggestions);
} else {
allSuggestions.addAll(suggestions);
List<String> extendedSuggestions = extractSuggestions(matcher, getSuggestionsOutMsg());
allSuggestions.addAll(extendedSuggestions);
}
ruleMatch.setSuggestedReplacements(allSuggestions);
matches.add(ruleMatch);
startPos = matcher.end();
}
return matches.toArray(new RuleMatch[matches.size()]);
}
@NotNull
private List<String> getMatchSuggestions(String sentence, Matcher matcher) {
List<String> matchSuggestions = new ArrayList<>();
for (Match match : getSuggestionMatches()) {
String errorText = sentence.substring(matcher.start(), matcher.end());
String regexReplace = match.getRegexReplace();
if (regexReplace != null) {
String suggestion = match.getRegexMatch().matcher(errorText).replaceFirst(regexReplace);
suggestion = CaseConversionHelper.convertCase(match.getCaseConversionType(), suggestion, errorText, getLanguage());
matchSuggestions.add(suggestion);
}
}
return matchSuggestions;
}
private String replaceMatchElements(String msg, List<String> suggestions) {
Matcher sMatcher = suggestionPattern.matcher(msg);
StringBuffer sb = new StringBuffer();
int i = 0;
while (sMatcher.find()) {
if (i < suggestions.size()) {
sMatcher.appendReplacement(sb, "<suggestion>" + suggestions.get(i++) + "</suggestion>");
}
}
sMatcher.appendTail(sb);
return sb.toString();
}
private List<String> extractSuggestions(Matcher matcher, String msg) {
Matcher sMatcher = suggestionPattern.matcher(msg);
int startPos = 0;
List<String> result = new ArrayList<>();
while (sMatcher.find(startPos)) {
String suggestion = sMatcher.group(1);
if (matcher.start() == 0) {
result.add(replaceBackRefs(matcher, StringTools.uppercaseFirstChar(suggestion)));
} else {
result.add(replaceBackRefs(matcher, suggestion));
}
startPos = sMatcher.end();
}
return result;
}
private String replaceBackRefs(Matcher matcher, String msg) {
String replacedMsg = msg;
for (int i = 0; i <= matcher.groupCount(); i++) {
String replacement = matcher.group(i);
if (replacement != null) {
replacedMsg = replacedMsg.replace("\\" + i, replacement);
}
}
replacedMsg = replacedMsg.replaceAll("\\\\[0-9]", ""); // optional matches need to be replaced by empty string
return replacedMsg;
}
@Override
public String toString() {
return pattern.toString() + "/flags:" + pattern.flags();
}
}
| languagetool-core/src/main/java/org/languagetool/rules/patterns/RegexPatternRule.java | /* LanguageTool, a natural language style checker
* Copyright (C) 2015 Daniel Naber (http://www.danielnaber.de)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
* USA
*/
package org.languagetool.rules.patterns;
import org.jetbrains.annotations.NotNull;
import org.languagetool.AnalyzedSentence;
import org.languagetool.Language;
import org.languagetool.rules.RuleMatch;
import org.languagetool.tools.StringTools;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Matches 'regexp' elements from XML rules against sentences.
* @since 3.2
*/
class RegexPatternRule extends AbstractPatternRule implements RuleMatcher {
private static final Pattern suggestionPattern = Pattern.compile("<suggestion>(.*?)</suggestion>"); // TODO: this needs to be cleaned up, there should be no need to parse this?
private final Pattern pattern;
private final int markGroup;
RegexPatternRule(String id, String description, String message, String suggestionsOutMsg, Language language, Pattern regex, int regexpMark) {
super(id, description, language, regex, regexpMark);
this.message = message;
this.pattern = regex;
this.suggestionsOutMsg = suggestionsOutMsg;
markGroup = regexpMark;
}
public Pattern getPattern() {
return pattern;
}
@Override
public RuleMatch[] match(AnalyzedSentence sentenceObj) throws IOException {
String sentence = sentenceObj.getText();
Matcher matcher = pattern.matcher(sentence);
int startPos = 0;
List<RuleMatch> matches = new ArrayList<>();
long startTime = System.currentTimeMillis();
while (matcher.find(startPos)) {
String msg = replaceBackRefs(matcher, message);
boolean sentenceStart = matcher.start(0) == 0;
List<String> suggestions = extractSuggestions(matcher, msg);
List<String> matchSuggestions = getMatchSuggestions(sentence, matcher);
msg = replaceMatchElements(msg, matchSuggestions);
int markStart = matcher.start(markGroup);
int markEnd = matcher.end(markGroup);
RuleMatch ruleMatch = new RuleMatch(this, markStart, markEnd, msg, null, sentenceStart, null);
List<String> allSuggestions = new ArrayList<>();
if (matchSuggestions.size() > 0) {
allSuggestions.addAll(matchSuggestions);
} else {
allSuggestions.addAll(suggestions);
List<String> extendedSuggestions = extractSuggestions(matcher, getSuggestionsOutMsg());
allSuggestions.addAll(extendedSuggestions);
}
ruleMatch.setSuggestedReplacements(allSuggestions);
matches.add(ruleMatch);
startPos = matcher.end();
}
// TODO: remove logging:
long endTime = System.currentTimeMillis();
long runTime = endTime - startTime;
if (runTime > 1) {
System.err.println("matcher.find() loop took " + runTime + "ms for rule " + getFullId() + "@" + getLanguage());
}
return matches.toArray(new RuleMatch[matches.size()]);
}
@NotNull
private List<String> getMatchSuggestions(String sentence, Matcher matcher) {
List<String> matchSuggestions = new ArrayList<>();
for (Match match : getSuggestionMatches()) {
String errorText = sentence.substring(matcher.start(), matcher.end());
String regexReplace = match.getRegexReplace();
if (regexReplace != null) {
String suggestion = match.getRegexMatch().matcher(errorText).replaceFirst(regexReplace);
suggestion = CaseConversionHelper.convertCase(match.getCaseConversionType(), suggestion, errorText, getLanguage());
matchSuggestions.add(suggestion);
}
}
return matchSuggestions;
}
private String replaceMatchElements(String msg, List<String> suggestions) {
Matcher sMatcher = suggestionPattern.matcher(msg);
StringBuffer sb = new StringBuffer();
int i = 0;
while (sMatcher.find()) {
if (i < suggestions.size()) {
sMatcher.appendReplacement(sb, "<suggestion>" + suggestions.get(i++) + "</suggestion>");
}
}
sMatcher.appendTail(sb);
return sb.toString();
}
private List<String> extractSuggestions(Matcher matcher, String msg) {
Matcher sMatcher = suggestionPattern.matcher(msg);
int startPos = 0;
List<String> result = new ArrayList<>();
while (sMatcher.find(startPos)) {
String suggestion = sMatcher.group(1);
if (matcher.start() == 0) {
result.add(replaceBackRefs(matcher, StringTools.uppercaseFirstChar(suggestion)));
} else {
result.add(replaceBackRefs(matcher, suggestion));
}
startPos = sMatcher.end();
}
return result;
}
private String replaceBackRefs(Matcher matcher, String msg) {
String replacedMsg = msg;
for (int i = 0; i <= matcher.groupCount(); i++) {
String replacement = matcher.group(i);
if (replacement != null) {
replacedMsg = replacedMsg.replace("\\" + i, replacement);
}
}
replacedMsg = replacedMsg.replaceAll("\\\\[0-9]", ""); // optional matches need to be replaced by empty string
return replacedMsg;
}
@Override
public String toString() {
return pattern.toString() + "/flags:" + pattern.flags();
}
}
| remove temp logging again
| languagetool-core/src/main/java/org/languagetool/rules/patterns/RegexPatternRule.java | remove temp logging again |
|
Java | lgpl-2.1 | b15d7c31e07bf5a6ed6d1f76ddd3992d00bea3b3 | 0 | lucee/Lucee,lucee/Lucee,lucee/Lucee,lucee/Lucee | /**
*
* Copyright (c) 2014, the Railo Company Ltd. All rights reserved.
* Copyright (c) 2015, Lucee Assosication Switzerland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
*
**/
package lucee.runtime.engine;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import lucee.commons.io.IOUtil;
import lucee.commons.io.SystemUtil;
import lucee.commons.io.log.Log;
import lucee.commons.io.log.LogUtil;
import lucee.commons.io.res.Resource;
import lucee.commons.io.res.filter.ExtensionResourceFilter;
import lucee.commons.io.res.filter.ResourceFilter;
import lucee.commons.io.res.util.ResourceUtil;
import lucee.commons.lang.ExceptionUtil;
import lucee.runtime.CFMLFactoryImpl;
import lucee.runtime.Mapping;
import lucee.runtime.MappingImpl;
import lucee.runtime.PageSource;
import lucee.runtime.PageSourcePool;
import lucee.runtime.config.ConfigImpl;
import lucee.runtime.config.ConfigServer;
import lucee.runtime.config.ConfigWeb;
import lucee.runtime.config.ConfigWebImpl;
import lucee.runtime.config.DeployHandler;
import lucee.runtime.config.XMLConfigAdmin;
import lucee.runtime.functions.system.PagePoolClear;
import lucee.runtime.lock.LockManagerImpl;
import lucee.runtime.net.smtp.SMTPConnectionPool;
import lucee.runtime.op.Caster;
import lucee.runtime.schedule.SchedulerImpl;
import lucee.runtime.type.scope.ScopeContext;
import lucee.runtime.type.scope.storage.StorageScopeFile;
import lucee.runtime.type.util.ArrayUtil;
/**
* own thread how check the main thread and his data
*/
public final class Controler extends Thread {
private static final long TIMEOUT = 50 * 1000;
private static final ControllerState INACTIVE = new ControllerStateImpl(false);
private int interval;
private long lastMinuteInterval = System.currentTimeMillis() - (1000 * 59); // first after a second
private long last10SecondsInterval = System.currentTimeMillis() - (1000 * 9); // first after a second
private long lastHourInterval = System.currentTimeMillis();
private final Map contextes;
// private ScheduleThread scheduleThread;
private final ConfigServer configServer;
// private final ShutdownHook shutdownHook;
private ControllerState state;
/**
* @param contextes
* @param interval
* @param run
*/
public Controler(ConfigServer configServer, Map contextes, int interval, ControllerState state) {
this.contextes = contextes;
this.interval = interval;
this.state = state;
this.configServer = configServer;
// shutdownHook=new ShutdownHook(configServer);
// Runtime.getRuntime().addShutdownHook(shutdownHook);
}
private static class ControlerThread extends Thread {
private Controler controler;
private CFMLFactoryImpl[] factories;
private boolean firstRun;
private long done = -1;
private Throwable t;
private Log log;
private long start;
public ControlerThread(Controler controler, CFMLFactoryImpl[] factories, boolean firstRun, Log log) {
this.start = System.currentTimeMillis();
this.controler = controler;
this.factories = factories;
this.firstRun = firstRun;
this.log = log;
}
@Override
public void run() {
long start = System.currentTimeMillis();
try {
controler.control(factories, firstRun);
done = System.currentTimeMillis() - start;
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
this.t = t;
}
// long time=System.currentTimeMillis()-start;
// if(time>10000) {
// log.info("controller", "["+hashCode()+"] controller was running for "+time+"ms");
// }
}
}
@Override
public void run() {
// scheduleThread.start();
boolean firstRun = true;
List<ControlerThread> threads = new ArrayList<ControlerThread>();
CFMLFactoryImpl factories[] = null;
while (state.active()) {
// sleep
SystemUtil.wait(this, interval);
if (!state.active()) break;
factories = toFactories(factories, contextes);
// start the thread that calls control
ControlerThread ct = new ControlerThread(this, factories, firstRun, configServer.getLog("application"));
ct.start();
threads.add(ct);
if (threads.size() > 10 && lastMinuteInterval + 60000 < System.currentTimeMillis())
configServer.getLog("application").info("controller", threads.size() + " active controller threads");
// now we check all threads we have
Iterator<ControlerThread> it = threads.iterator();
long time;
while (it.hasNext()) {
ct = it.next();
// print.e(ct.hashCode());
time = System.currentTimeMillis() - ct.start;
// done
if (ct.done >= 0) {
if (time > 10000) configServer.getLog("application").info("controller", "controller took " + ct.done + "ms to execute successfully.");
it.remove();
}
// failed
else if (ct.t != null) {
configServer.getLog("application").log(Log.LEVEL_ERROR, "controler", ct.t);
it.remove();
}
// stop it!
else if (time > TIMEOUT) {
SystemUtil.stop(ct);
// print.e(ct.getStackTrace());
if (!ct.isAlive()) {
configServer.getLog("application").error("controller", "controller thread [" + ct.hashCode() + "] forced to stop after " + time + "ms");
it.remove();
}
else {
Throwable t = new Throwable();
t.setStackTrace(ct.getStackTrace());
configServer.getLog("application").log(Log.LEVEL_ERROR, "controler", "was not able to stop controller thread running for " + time + "ms", t);
}
}
}
if (factories.length > 0) firstRun = false;
}
}
private void control(CFMLFactoryImpl[] factories, boolean firstRun) {
long now = System.currentTimeMillis();
boolean do10Seconds = last10SecondsInterval + 10000 < now;
if (do10Seconds) last10SecondsInterval = now;
boolean doMinute = lastMinuteInterval + 60000 < now;
if (doMinute) lastMinuteInterval = now;
boolean doHour = (lastHourInterval + (1000 * 60 * 60)) < now;
if (doHour) lastHourInterval = now;
// broadcast cluster scope
try {
ScopeContext.getClusterScope(configServer, true).broadcast();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// every 10 seconds
if (do10Seconds) {
// deploy extensions, archives ...
// try{DeployHandler.deploy(configServer);}catch(Throwable t){ExceptionUtil.rethrowIfNecessary(t);}
}
// every minute
if (doMinute) {
// deploy extensions, archives ...
try {
DeployHandler.deploy(configServer);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
try {
XMLConfigAdmin.checkForChangesInConfigFile(configServer);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
// every hour
if (doHour) {
try {
configServer.checkPermGenSpace(true);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
for (int i = 0; i < factories.length; i++) {
control(factories[i], do10Seconds, doMinute, doHour, firstRun);
}
}
private void control(CFMLFactoryImpl cfmlFactory, boolean do10Seconds, boolean doMinute, boolean doHour, boolean firstRun) {
try {
boolean isRunning = cfmlFactory.getUsedPageContextLength() > 0;
if (isRunning) {
cfmlFactory.checkTimeout();
}
ConfigWeb config = null;
if (firstRun) {
config = cfmlFactory.getConfig();
ThreadLocalConfig.register(config);
config.reloadTimeServerOffset();
checkOldClientFile(config);
// try{checkStorageScopeFile(config,Session.SCOPE_CLIENT);}catch(Throwable t)
// {ExceptionUtil.rethrowIfNecessary(t);}
// try{checkStorageScopeFile(config,Session.SCOPE_SESSION);}catch(Throwable t)
// {ExceptionUtil.rethrowIfNecessary(t);}
try {
config.reloadTimeServerOffset();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
try {
checkTempDirectorySize(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
try {
checkCacheFileSize(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
try {
cfmlFactory.getScopeContext().clearUnused();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
if (config == null) {
config = cfmlFactory.getConfig();
}
ThreadLocalConfig.register(config);
if (do10Seconds) {
// try{DeployHandler.deploy(config);}catch(Throwable t){ExceptionUtil.rethrowIfNecessary(t);}
}
// every Minute
if (doMinute) {
if (config == null) {
config = cfmlFactory.getConfig();
}
ThreadLocalConfig.register(config);
try {
((SchedulerImpl) ((ConfigWebImpl) config).getScheduler()).startIfNecessary();
}
catch (Exception e) {
LogUtil.log(ThreadLocalPageContext.getConfig(configServer), Controler.class.getName(), e);
}
// double check templates
try {
((ConfigWebImpl) config).getCompiler().checkWatched();
}
catch (Exception e) {
LogUtil.log(ThreadLocalPageContext.getConfig(configServer), Controler.class.getName(), e);
}
// deploy extensions, archives ...
try {
DeployHandler.deploy(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// clear unused DB Connections
try {
((ConfigImpl) config).getDatasourceConnectionPool().clear(false);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// clear all unused scopes
try {
cfmlFactory.getScopeContext().clearUnused();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// Memory usage
// clear Query Cache
/*
* try{ ConfigWebUtil.getCacheHandlerFactories(config).query.clean(null);
* ConfigWebUtil.getCacheHandlerFactories(config).include.clean(null);
* ConfigWebUtil.getCacheHandlerFactories(config).function.clean(null);
* //cfmlFactory.getDefaultQueryCache().clearUnused(null); }catch(Throwable
* t){ExceptionUtil.rethrowIfNecessary(t);}
*/
// contract Page Pool
try {
doClearPagePools((ConfigWebImpl) config);
}
catch (Exception e) {}
// try{checkPermGenSpace((ConfigWebImpl) config);}catch(Throwable t)
// {ExceptionUtil.rethrowIfNecessary(t);}
try {
doCheckMappings(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
try {
doClearMailConnections();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// clean LockManager
if (cfmlFactory.getUsedPageContextLength() == 0) try {
((LockManagerImpl) config.getLockManager()).clean();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
try {
XMLConfigAdmin.checkForChangesInConfigFile(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
// every hour
if (doHour) {
if (config == null) {
config = cfmlFactory.getConfig();
}
ThreadLocalConfig.register(config);
// time server offset
try {
config.reloadTimeServerOffset();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// check file based client/session scope
// try{checkStorageScopeFile(config,Session.SCOPE_CLIENT);}catch(Throwable t)
// {ExceptionUtil.rethrowIfNecessary(t);}
// try{checkStorageScopeFile(config,Session.SCOPE_SESSION);}catch(Throwable t)
// {ExceptionUtil.rethrowIfNecessary(t);}
// check temp directory
try {
checkTempDirectorySize(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// check cache directory
try {
checkCacheFileSize(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
try {
configServer.checkPermGenSpace(true);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
finally {
ThreadLocalConfig.release();
}
}
private void doClearPagePools(ConfigWebImpl config) {
PagePoolClear.clear(null, config, true);
}
private CFMLFactoryImpl[] toFactories(CFMLFactoryImpl[] factories, Map contextes) {
if (factories == null || factories.length != contextes.size()) factories = (CFMLFactoryImpl[]) contextes.values().toArray(new CFMLFactoryImpl[contextes.size()]);
return factories;
}
private void doClearMailConnections() {
SMTPConnectionPool.closeSessions();
}
private void checkOldClientFile(ConfigWeb config) {
ExtensionResourceFilter filter = new ExtensionResourceFilter(".script", false);
// move old structured file in new structure
try {
Resource dir = config.getClientScopeDir(), trgres;
Resource[] children = dir.listResources(filter);
String src, trg;
int index;
for (int i = 0; i < children.length; i++) {
src = children[i].getName();
index = src.indexOf('-');
trg = StorageScopeFile.getFolderName(src.substring(0, index), src.substring(index + 1), false);
trgres = dir.getRealResource(trg);
if (!trgres.exists()) {
trgres.createFile(true);
ResourceUtil.copy(children[i], trgres);
}
// children[i].moveTo(trgres);
children[i].delete();
}
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
private void checkCacheFileSize(ConfigWeb config) {
checkSize(config, config.getCacheDir(), config.getCacheDirSize(), new ExtensionResourceFilter(".cache"));
}
private void checkTempDirectorySize(ConfigWeb config) {
checkSize(config, config.getTempDirectory(), 1024 * 1024 * 1024, null);
}
private void checkSize(ConfigWeb config, Resource dir, long maxSize, ResourceFilter filter) {
if (!dir.exists()) return;
Resource res = null;
int count = ArrayUtil.size(filter == null ? dir.list() : dir.list(filter));
long size = ResourceUtil.getRealSize(dir, filter);
LogUtil.log(ThreadLocalPageContext.getConfig(config), Log.LEVEL_INFO, Controler.class.getName(),
"check size of directory [" + dir + "]; current size [" + size + "];max size [" + maxSize + "]");
int len = -1;
while (count > 100000 || size > maxSize) {
Resource[] files = filter == null ? dir.listResources() : dir.listResources(filter);
if (len == files.length) break;// protect from inifinti loop
len = files.length;
for (int i = 0; i < files.length; i++) {
if (res == null || res.lastModified() > files[i].lastModified()) {
res = files[i];
}
}
if (res != null) {
size -= res.length();
try {
res.remove(true);
count--;
}
catch (IOException e) {
LogUtil.log(ThreadLocalPageContext.getConfig(config), Log.LEVEL_ERROR, Controler.class.getName(), "cannot remove resource " + res.getAbsolutePath());
break;
}
}
res = null;
}
}
private void doCheckMappings(ConfigWeb config) {
Mapping[] mappings = config.getMappings();
for (int i = 0; i < mappings.length; i++) {
Mapping mapping = mappings[i];
mapping.check();
}
}
private PageSourcePool[] getPageSourcePools(ConfigWeb config) {
return getPageSourcePools(config.getMappings());
}
private PageSourcePool[] getPageSourcePools(Mapping... mappings) {
PageSourcePool[] pools = new PageSourcePool[mappings.length];
// int size=0;
for (int i = 0; i < mappings.length; i++) {
pools[i] = ((MappingImpl) mappings[i]).getPageSourcePool();
// size+=pools[i].size();
}
return pools;
}
private int getPageSourcePoolSize(PageSourcePool[] pools) {
int size = 0;
for (int i = 0; i < pools.length; i++)
size += pools[i].size();
return size;
}
private void removeOldest(PageSourcePool[] pools) {
PageSourcePool pool = null;
String key = null;
PageSource ps = null;
long date = -1;
for (int i = 0; i < pools.length; i++) {
try {
String[] keys = pools[i].keys();
for (int y = 0; y < keys.length; y++) {
ps = pools[i].getPageSource(keys[y], false);
if (date == -1 || date > ps.getLastAccessTime()) {
pool = pools[i];
key = keys[y];
date = ps.getLastAccessTime();
}
}
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
pools[i].clear();
}
}
if (pool != null) pool.remove(key);
}
private void clear(PageSourcePool[] pools) {
for (int i = 0; i < pools.length; i++) {
pools[i].clear();
}
}
public void close() {
state = INACTIVE;
SystemUtil.notify(this);
}
/*
* private void doLogMemoryUsage(ConfigWeb config) { if(config.logMemoryUsage()&&
* config.getMemoryLogger()!=null) config.getMemoryLogger().write(); }
*/
static class ExpiresFilter implements ResourceFilter {
private long time;
private boolean allowDir;
public ExpiresFilter(long time, boolean allowDir) {
this.allowDir = allowDir;
this.time = time;
}
@Override
public boolean accept(Resource res) {
if (res.isDirectory()) return allowDir;
// load content
String str = null;
try {
str = IOUtil.toString(res, "UTF-8");
}
catch (IOException e) {
return false;
}
int index = str.indexOf(':');
if (index != -1) {
long expires = Caster.toLongValue(str.substring(0, index), -1L);
// check is for backward compatibility, old files have no expires date inside. they do ot expire
if (expires != -1) {
if (expires < System.currentTimeMillis()) {
return true;
}
str = str.substring(index + 1);
return false;
}
}
// old files not having a timestamp inside
else if (res.lastModified() <= time) {
return true;
}
return false;
}
}
}
| core/src/main/java/lucee/runtime/engine/Controler.java | /**
*
* Copyright (c) 2014, the Railo Company Ltd. All rights reserved.
* Copyright (c) 2015, Lucee Assosication Switzerland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
*
**/
package lucee.runtime.engine;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import lucee.commons.io.IOUtil;
import lucee.commons.io.SystemUtil;
import lucee.commons.io.log.Log;
import lucee.commons.io.log.LogUtil;
import lucee.commons.io.res.Resource;
import lucee.commons.io.res.filter.ExtensionResourceFilter;
import lucee.commons.io.res.filter.ResourceFilter;
import lucee.commons.io.res.util.ResourceUtil;
import lucee.commons.lang.ExceptionUtil;
import lucee.runtime.CFMLFactoryImpl;
import lucee.runtime.Mapping;
import lucee.runtime.MappingImpl;
import lucee.runtime.PageSource;
import lucee.runtime.PageSourcePool;
import lucee.runtime.config.ConfigImpl;
import lucee.runtime.config.ConfigServer;
import lucee.runtime.config.ConfigWeb;
import lucee.runtime.config.ConfigWebImpl;
import lucee.runtime.config.DeployHandler;
import lucee.runtime.config.XMLConfigAdmin;
import lucee.runtime.functions.system.PagePoolClear;
import lucee.runtime.lock.LockManagerImpl;
import lucee.runtime.net.smtp.SMTPConnectionPool;
import lucee.runtime.op.Caster;
import lucee.runtime.schedule.SchedulerImpl;
import lucee.runtime.type.scope.ScopeContext;
import lucee.runtime.type.scope.storage.StorageScopeFile;
import lucee.runtime.type.util.ArrayUtil;
/**
* own thread how check the main thread and his data
*/
public final class Controler extends Thread {
private static final long TIMEOUT = 50 * 1000;
private static final ControllerState INACTIVE = new ControllerStateImpl(false);
private int interval;
private long lastMinuteInterval = System.currentTimeMillis() - (1000 * 59); // first after a second
private long last10SecondsInterval = System.currentTimeMillis() - (1000 * 9); // first after a second
private long lastHourInterval = System.currentTimeMillis();
private final Map contextes;
// private ScheduleThread scheduleThread;
private final ConfigServer configServer;
// private final ShutdownHook shutdownHook;
private ControllerState state;
/**
* @param contextes
* @param interval
* @param run
*/
public Controler(ConfigServer configServer, Map contextes, int interval, ControllerState state) {
this.contextes = contextes;
this.interval = interval;
this.state = state;
this.configServer = configServer;
// shutdownHook=new ShutdownHook(configServer);
// Runtime.getRuntime().addShutdownHook(shutdownHook);
}
private static class ControlerThread extends Thread {
private Controler controler;
private CFMLFactoryImpl[] factories;
private boolean firstRun;
private long done = -1;
private Throwable t;
private Log log;
private long start;
public ControlerThread(Controler controler, CFMLFactoryImpl[] factories, boolean firstRun, Log log) {
this.start = System.currentTimeMillis();
this.controler = controler;
this.factories = factories;
this.firstRun = firstRun;
this.log = log;
}
@Override
public void run() {
long start = System.currentTimeMillis();
try {
controler.control(factories, firstRun);
done = System.currentTimeMillis() - start;
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
this.t = t;
}
// long time=System.currentTimeMillis()-start;
// if(time>10000) {
// log.info("controller", "["+hashCode()+"] controller was running for "+time+"ms");
// }
}
}
@Override
public void run() {
// scheduleThread.start();
boolean firstRun = true;
List<ControlerThread> threads = new ArrayList<ControlerThread>();
CFMLFactoryImpl factories[] = null;
while (state.active()) {
// sleep
SystemUtil.wait(this, interval);
if (!state.active()) break;
factories = toFactories(factories, contextes);
// start the thread that calls control
ControlerThread ct = new ControlerThread(this, factories, firstRun, configServer.getLog("application"));
ct.start();
threads.add(ct);
if (threads.size() > 10 && lastMinuteInterval + 60000 < System.currentTimeMillis())
configServer.getLog("application").info("controller", threads.size() + " active controller threads");
// now we check all threads we have
Iterator<ControlerThread> it = threads.iterator();
long time;
while (it.hasNext()) {
ct = it.next();
// print.e(ct.hashCode());
time = System.currentTimeMillis() - ct.start;
// done
if (ct.done >= 0) {
if (time > 10000) configServer.getLog("application").info("controller", "controler took " + ct.done + "ms to execute sucessfully.");
it.remove();
}
// failed
else if (ct.t != null) {
configServer.getLog("application").log(Log.LEVEL_ERROR, "controler", ct.t);
it.remove();
}
// stop it!
else if (time > TIMEOUT) {
SystemUtil.stop(ct);
// print.e(ct.getStackTrace());
if (!ct.isAlive()) {
configServer.getLog("application").error("controller", "controler thread [" + ct.hashCode() + "] forced to stop after " + time + "ms");
it.remove();
}
else {
Throwable t = new Throwable();
t.setStackTrace(ct.getStackTrace());
configServer.getLog("application").log(Log.LEVEL_ERROR, "controler", "was not able to stop controller thread running for " + time + "ms", t);
}
}
}
if (factories.length > 0) firstRun = false;
}
}
private void control(CFMLFactoryImpl[] factories, boolean firstRun) {
long now = System.currentTimeMillis();
boolean do10Seconds = last10SecondsInterval + 10000 < now;
if (do10Seconds) last10SecondsInterval = now;
boolean doMinute = lastMinuteInterval + 60000 < now;
if (doMinute) lastMinuteInterval = now;
boolean doHour = (lastHourInterval + (1000 * 60 * 60)) < now;
if (doHour) lastHourInterval = now;
// broadcast cluster scope
try {
ScopeContext.getClusterScope(configServer, true).broadcast();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// every 10 seconds
if (do10Seconds) {
// deploy extensions, archives ...
// try{DeployHandler.deploy(configServer);}catch(Throwable t){ExceptionUtil.rethrowIfNecessary(t);}
}
// every minute
if (doMinute) {
// deploy extensions, archives ...
try {
DeployHandler.deploy(configServer);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
try {
XMLConfigAdmin.checkForChangesInConfigFile(configServer);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
// every hour
if (doHour) {
try {
configServer.checkPermGenSpace(true);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
for (int i = 0; i < factories.length; i++) {
control(factories[i], do10Seconds, doMinute, doHour, firstRun);
}
}
private void control(CFMLFactoryImpl cfmlFactory, boolean do10Seconds, boolean doMinute, boolean doHour, boolean firstRun) {
try {
boolean isRunning = cfmlFactory.getUsedPageContextLength() > 0;
if (isRunning) {
cfmlFactory.checkTimeout();
}
ConfigWeb config = null;
if (firstRun) {
config = cfmlFactory.getConfig();
ThreadLocalConfig.register(config);
config.reloadTimeServerOffset();
checkOldClientFile(config);
// try{checkStorageScopeFile(config,Session.SCOPE_CLIENT);}catch(Throwable t)
// {ExceptionUtil.rethrowIfNecessary(t);}
// try{checkStorageScopeFile(config,Session.SCOPE_SESSION);}catch(Throwable t)
// {ExceptionUtil.rethrowIfNecessary(t);}
try {
config.reloadTimeServerOffset();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
try {
checkTempDirectorySize(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
try {
checkCacheFileSize(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
try {
cfmlFactory.getScopeContext().clearUnused();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
if (config == null) {
config = cfmlFactory.getConfig();
}
ThreadLocalConfig.register(config);
if (do10Seconds) {
// try{DeployHandler.deploy(config);}catch(Throwable t){ExceptionUtil.rethrowIfNecessary(t);}
}
// every Minute
if (doMinute) {
if (config == null) {
config = cfmlFactory.getConfig();
}
ThreadLocalConfig.register(config);
try {
((SchedulerImpl) ((ConfigWebImpl) config).getScheduler()).startIfNecessary();
}
catch (Exception e) {
LogUtil.log(ThreadLocalPageContext.getConfig(configServer), Controler.class.getName(), e);
}
// double check templates
try {
((ConfigWebImpl) config).getCompiler().checkWatched();
}
catch (Exception e) {
LogUtil.log(ThreadLocalPageContext.getConfig(configServer), Controler.class.getName(), e);
}
// deploy extensions, archives ...
try {
DeployHandler.deploy(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// clear unused DB Connections
try {
((ConfigImpl) config).getDatasourceConnectionPool().clear(false);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// clear all unused scopes
try {
cfmlFactory.getScopeContext().clearUnused();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// Memory usage
// clear Query Cache
/*
* try{ ConfigWebUtil.getCacheHandlerFactories(config).query.clean(null);
* ConfigWebUtil.getCacheHandlerFactories(config).include.clean(null);
* ConfigWebUtil.getCacheHandlerFactories(config).function.clean(null);
* //cfmlFactory.getDefaultQueryCache().clearUnused(null); }catch(Throwable
* t){ExceptionUtil.rethrowIfNecessary(t);}
*/
// contract Page Pool
try {
doClearPagePools((ConfigWebImpl) config);
}
catch (Exception e) {}
// try{checkPermGenSpace((ConfigWebImpl) config);}catch(Throwable t)
// {ExceptionUtil.rethrowIfNecessary(t);}
try {
doCheckMappings(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
try {
doClearMailConnections();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// clean LockManager
if (cfmlFactory.getUsedPageContextLength() == 0) try {
((LockManagerImpl) config.getLockManager()).clean();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
try {
XMLConfigAdmin.checkForChangesInConfigFile(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
// every hour
if (doHour) {
if (config == null) {
config = cfmlFactory.getConfig();
}
ThreadLocalConfig.register(config);
// time server offset
try {
config.reloadTimeServerOffset();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// check file based client/session scope
// try{checkStorageScopeFile(config,Session.SCOPE_CLIENT);}catch(Throwable t)
// {ExceptionUtil.rethrowIfNecessary(t);}
// try{checkStorageScopeFile(config,Session.SCOPE_SESSION);}catch(Throwable t)
// {ExceptionUtil.rethrowIfNecessary(t);}
// check temp directory
try {
checkTempDirectorySize(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
// check cache directory
try {
checkCacheFileSize(config);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
try {
configServer.checkPermGenSpace(true);
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
finally {
ThreadLocalConfig.release();
}
}
private void doClearPagePools(ConfigWebImpl config) {
PagePoolClear.clear(null, config, true);
}
private CFMLFactoryImpl[] toFactories(CFMLFactoryImpl[] factories, Map contextes) {
if (factories == null || factories.length != contextes.size()) factories = (CFMLFactoryImpl[]) contextes.values().toArray(new CFMLFactoryImpl[contextes.size()]);
return factories;
}
private void doClearMailConnections() {
SMTPConnectionPool.closeSessions();
}
private void checkOldClientFile(ConfigWeb config) {
ExtensionResourceFilter filter = new ExtensionResourceFilter(".script", false);
// move old structured file in new structure
try {
Resource dir = config.getClientScopeDir(), trgres;
Resource[] children = dir.listResources(filter);
String src, trg;
int index;
for (int i = 0; i < children.length; i++) {
src = children[i].getName();
index = src.indexOf('-');
trg = StorageScopeFile.getFolderName(src.substring(0, index), src.substring(index + 1), false);
trgres = dir.getRealResource(trg);
if (!trgres.exists()) {
trgres.createFile(true);
ResourceUtil.copy(children[i], trgres);
}
// children[i].moveTo(trgres);
children[i].delete();
}
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
}
}
private void checkCacheFileSize(ConfigWeb config) {
checkSize(config, config.getCacheDir(), config.getCacheDirSize(), new ExtensionResourceFilter(".cache"));
}
private void checkTempDirectorySize(ConfigWeb config) {
checkSize(config, config.getTempDirectory(), 1024 * 1024 * 1024, null);
}
private void checkSize(ConfigWeb config, Resource dir, long maxSize, ResourceFilter filter) {
if (!dir.exists()) return;
Resource res = null;
int count = ArrayUtil.size(filter == null ? dir.list() : dir.list(filter));
long size = ResourceUtil.getRealSize(dir, filter);
LogUtil.log(ThreadLocalPageContext.getConfig(config), Log.LEVEL_INFO, Controler.class.getName(),
"check size of directory [" + dir + "]; current size [" + size + "];max size [" + maxSize + "]");
int len = -1;
while (count > 100000 || size > maxSize) {
Resource[] files = filter == null ? dir.listResources() : dir.listResources(filter);
if (len == files.length) break;// protect from inifinti loop
len = files.length;
for (int i = 0; i < files.length; i++) {
if (res == null || res.lastModified() > files[i].lastModified()) {
res = files[i];
}
}
if (res != null) {
size -= res.length();
try {
res.remove(true);
count--;
}
catch (IOException e) {
LogUtil.log(ThreadLocalPageContext.getConfig(config), Log.LEVEL_ERROR, Controler.class.getName(), "cannot remove resource " + res.getAbsolutePath());
break;
}
}
res = null;
}
}
private void doCheckMappings(ConfigWeb config) {
Mapping[] mappings = config.getMappings();
for (int i = 0; i < mappings.length; i++) {
Mapping mapping = mappings[i];
mapping.check();
}
}
private PageSourcePool[] getPageSourcePools(ConfigWeb config) {
return getPageSourcePools(config.getMappings());
}
private PageSourcePool[] getPageSourcePools(Mapping... mappings) {
PageSourcePool[] pools = new PageSourcePool[mappings.length];
// int size=0;
for (int i = 0; i < mappings.length; i++) {
pools[i] = ((MappingImpl) mappings[i]).getPageSourcePool();
// size+=pools[i].size();
}
return pools;
}
private int getPageSourcePoolSize(PageSourcePool[] pools) {
int size = 0;
for (int i = 0; i < pools.length; i++)
size += pools[i].size();
return size;
}
private void removeOldest(PageSourcePool[] pools) {
PageSourcePool pool = null;
String key = null;
PageSource ps = null;
long date = -1;
for (int i = 0; i < pools.length; i++) {
try {
String[] keys = pools[i].keys();
for (int y = 0; y < keys.length; y++) {
ps = pools[i].getPageSource(keys[y], false);
if (date == -1 || date > ps.getLastAccessTime()) {
pool = pools[i];
key = keys[y];
date = ps.getLastAccessTime();
}
}
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
pools[i].clear();
}
}
if (pool != null) pool.remove(key);
}
private void clear(PageSourcePool[] pools) {
for (int i = 0; i < pools.length; i++) {
pools[i].clear();
}
}
public void close() {
state = INACTIVE;
SystemUtil.notify(this);
}
/*
* private void doLogMemoryUsage(ConfigWeb config) { if(config.logMemoryUsage()&&
* config.getMemoryLogger()!=null) config.getMemoryLogger().write(); }
*/
static class ExpiresFilter implements ResourceFilter {
private long time;
private boolean allowDir;
public ExpiresFilter(long time, boolean allowDir) {
this.allowDir = allowDir;
this.time = time;
}
@Override
public boolean accept(Resource res) {
if (res.isDirectory()) return allowDir;
// load content
String str = null;
try {
str = IOUtil.toString(res, "UTF-8");
}
catch (IOException e) {
return false;
}
int index = str.indexOf(':');
if (index != -1) {
long expires = Caster.toLongValue(str.substring(0, index), -1L);
// check is for backward compatibility, old files have no expires date inside. they do ot expire
if (expires != -1) {
if (expires < System.currentTimeMillis()) {
return true;
}
str = str.substring(index + 1);
return false;
}
}
// old files not having a timestamp inside
else if (res.lastModified() <= time) {
return true;
}
return false;
}
}
} | fix spelling mistakes | core/src/main/java/lucee/runtime/engine/Controler.java | fix spelling mistakes |
|
Java | lgpl-2.1 | 1088eeb939ee15ce701b24cc2ff8980222bb21e5 | 0 | jolie/jolie,jolie/jolie,jolie/jolie | /***************************************************************************
* Copyright (C) by Fabrizio Montesi *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU Library General Public License as *
* published by the Free Software Foundation; either version 2 of the *
* License, or (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU Library General Public *
* License along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
* *
* For details about the authors of this software, see the AUTHORS file. *
***************************************************************************/
package jolie.net;
import cx.ath.matthew.unix.UnixSocket;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import jolie.Interpreter;
import jolie.net.protocols.CommProtocol;
public class LocalSocketCommChannel extends StreamingCommChannel implements PollableCommChannel
{
private final UnixSocket socket;
private final PreBufferedInputStream bufferedInputStream;
private final InputStream socketInputStream;
private final OutputStream socketOutputStream;
public LocalSocketCommChannel( UnixSocket socket, URI location, CommProtocol protocol )
throws IOException
{
super( location, protocol );
this.socket = socket;
this.socketInputStream = socket.getInputStream();
this.socketOutputStream = socket.getOutputStream();
this.bufferedInputStream = new PreBufferedInputStream( socketInputStream );
setToBeClosed( false ); // LocalSocket connections are kept open by default
}
protected void sendImpl( CommMessage message )
throws IOException
{
protocol().send( socketOutputStream, message, bufferedInputStream );
socketOutputStream.flush();
}
protected CommMessage recvImpl()
throws IOException
{
return protocol().recv( bufferedInputStream, socketOutputStream );
}
protected void closeImpl()
throws IOException
{
socket.close();
}
public synchronized boolean isReady()
throws IOException
{
boolean ret = false;
if ( bufferedInputStream.hasCachedData() ) {
ret = true;
} else {
byte[] r = new byte[1];
if ( socketInputStream.read( r ) > 0 ) {
bufferedInputStream.append( r[0] );
ret = true;
}
}
return ret;
}
@Override
public void disposeForInputImpl()
throws IOException
{
Interpreter.getInstance().commCore().registerForPolling( this );
}
}
| extensions/localsocket/src/jolie/net/LocalSocketCommChannel.java | /***************************************************************************
* Copyright (C) by Fabrizio Montesi *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU Library General Public License as *
* published by the Free Software Foundation; either version 2 of the *
* License, or (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU Library General Public *
* License along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
* *
* For details about the authors of this software, see the AUTHORS file. *
***************************************************************************/
package jolie.net;
import cx.ath.matthew.unix.UnixSocket;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import jolie.Interpreter;
import jolie.net.protocols.CommProtocol;
public class LocalSocketCommChannel extends StreamingCommChannel implements PollableCommChannel
{
private final UnixSocket socket;
private PreBufferedInputStream bufferedInputStream;
private final InputStream socketInputStream;
public LocalSocketCommChannel( UnixSocket socket, URI location, CommProtocol protocol )
throws IOException
{
super( location, protocol );
this.socket = socket;
this.socketInputStream = socket.getInputStream();
this.bufferedInputStream = new PreBufferedInputStream( socketInputStream );
setToBeClosed( false ); // LocalSocket connections are kept open by default
}
protected void sendImpl( CommMessage message )
throws IOException
{
protocol().send( socket.getOutputStream(), message, bufferedInputStream );
}
protected CommMessage recvImpl()
throws IOException
{
return protocol().recv( bufferedInputStream, socket.getOutputStream() );
}
protected void closeImpl()
throws IOException
{
socket.close();
}
public synchronized boolean isReady()
throws IOException
{
boolean ret = false;
byte[] r = new byte[1];
if ( socketInputStream.read( r ) > 0 ) {
bufferedInputStream.append( r[0] );
ret = true;
}
return ret;
}
@Override
public void disposeForInputImpl()
throws IOException
{
Interpreter.getInstance().commCore().registerForPolling( this );
}
}
| Use the shared prebuffered socket from jolie.
Former-commit-id: 73bc4312b3708766663e2f7b8394b5ec58c25d39 | extensions/localsocket/src/jolie/net/LocalSocketCommChannel.java | Use the shared prebuffered socket from jolie. |
|
Java | unlicense | 6e4abba026bb64cbc146eb11627c06e00972d35b | 0 | stefvanschie/buildinggame | package me.stefvanschie.buildinggame.utils.guis.buildmenu;
import java.util.ArrayList;
import java.util.List;
import me.stefvanschie.buildinggame.managers.files.SettingsManager;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.entity.Player;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
public class TimeMenu {
public TimeMenu() {}
public void show(Player player) {
YamlConfiguration messages = SettingsManager.getInstance().getMessages();
Inventory inventory = Bukkit.createInventory(null, 18, messages.getString("gui.time.title")
.replaceAll("&", "§"));
//midnight
ItemStack midnight = new ItemStack(Material.WATCH, 1);
{
ItemMeta midnightMeta = midnight.getItemMeta();
midnightMeta.setDisplayName(messages.getString("gui.time.midnight.name")
.replaceAll("&", "§"));
{
List<String> midnightLores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.midnight.lores")) {
midnightLores.add(lore
.replaceAll("&", "§"));
}
midnightMeta.setLore(midnightLores);
}
midnight.setItemMeta(midnightMeta);
}
//2 AM
ItemStack am2 = new ItemStack(Material.WATCH, 1);
{
ItemMeta am2Meta = am2.getItemMeta();
am2Meta.setDisplayName(messages.getString("gui.time.2am.name")
.replaceAll("&", "§"));
{
List<String> am2Lores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.2am.lores")) {
am2Lores.add(lore
.replaceAll("&", "§"));
}
am2Meta.setLore(am2Lores);
}
am2.setItemMeta(am2Meta);
}
//4 AM
ItemStack am4 = new ItemStack(Material.WATCH, 1);
{
ItemMeta am4Meta = am4.getItemMeta();
am4Meta.setDisplayName(messages.getString("gui.time.4am.name")
.replaceAll("&", "§"));
{
List<String> am4Lores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.4am.lores")) {
am4Lores.add(lore
.replaceAll("&", "§"));
}
am4Meta.setLore(am4Lores);
}
am4.setItemMeta(am4Meta);
}
//6 AM
ItemStack am6 = new ItemStack(Material.WATCH, 1);
{
ItemMeta am6Meta = am6.getItemMeta();
am6Meta.setDisplayName(messages.getString("gui.time.6am.name")
.replaceAll("&", "§"));
{
List<String> am6Lores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.6am.lores")) {
am6Lores.add(lore
.replaceAll("&", "§"));
}
am6Meta.setLore(am6Lores);
}
am6.setItemMeta(am6Meta);
}
//8 AM
ItemStack am8 = new ItemStack(Material.WATCH, 1);
{
ItemMeta am8Meta = am8.getItemMeta();
am8Meta.setDisplayName(messages.getString("gui.time.8am.name")
.replaceAll("&", "§"));
{
List<String> am8Lores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.8am.lores")) {
am8Lores.add(lore
.replaceAll("&", "§"));
}
am8Meta.setLore(am8Lores);
}
am8.setItemMeta(am8Meta);
}
//10 AM
ItemStack am10 = new ItemStack(Material.WATCH, 1);
{
ItemMeta am10Meta = am10.getItemMeta();
am10Meta.setDisplayName(messages.getString("gui.time.10am.name")
.replaceAll("&", "§"));
{
List<String> am10Lores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.10am.lores")) {
am10Lores.add(lore
.replaceAll("&", "§"));
}
am10Meta.setLore(am10Lores);
}
am10.setItemMeta(am10Meta);
}
//Midday
ItemStack midday = new ItemStack(Material.WATCH, 1);
{
ItemMeta middayMeta = midday.getItemMeta();
middayMeta.setDisplayName(messages.getString("gui.time.midday.name")
.replaceAll("&", "§"));
{
List<String> middayLores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.midday.lores")) {
middayLores.add(lore
.replaceAll("&", "§"));
}
middayMeta.setLore(middayLores);
}
midday.setItemMeta(middayMeta);
}
//2 PM
ItemStack pm2 = new ItemStack(Material.WATCH, 1);
{
ItemMeta pm2Meta = pm2.getItemMeta();
pm2Meta.setDisplayName(messages.getString("gui.time.2pm.name")
.replaceAll("&", "§"));
{
List<String> pm2Lores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.2pm.lores")) {
pm2Lores.add(lore
.replaceAll("&", "§"));
}
pm2Meta.setLore(pm2Lores);
}
pm2.setItemMeta(pm2Meta);
}
//4 PM
ItemStack pm4 = new ItemStack(Material.WATCH, 1);
{
ItemMeta pm4Meta = pm4.getItemMeta();
pm4Meta.setDisplayName(messages.getString("gui.time.4pm.name")
.replaceAll("&", "§"));
{
List<String> pm4Lores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.4pm.lores")) {
pm4Lores.add(lore
.replaceAll("&", "§"));
}
pm4Meta.setLore(pm4Lores);
}
pm4.setItemMeta(pm4Meta);
}
//6 PM
ItemStack pm6 = new ItemStack(Material.WATCH, 1);
{
ItemMeta pm6Meta = pm6.getItemMeta();
pm6Meta.setDisplayName(messages.getString("gui.time.6pm.name")
.replaceAll("&", "§"));
{
List<String> pm6Lores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.6pm.lores")) {
pm6Lores.add(lore
.replaceAll("&", "§"));
}
pm6Meta.setLore(pm6Lores);
}
pm6.setItemMeta(pm6Meta);
}
//8 PM
ItemStack pm8 = new ItemStack(Material.WATCH, 1);
{
ItemMeta pm8Meta = pm8.getItemMeta();
pm8Meta.setDisplayName(messages.getString("gui.time.8pm.name")
.replaceAll("&", "§"));
{
List<String> pm8Lores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.8pm.lores")) {
pm8Lores.add(lore
.replaceAll("&", "§"));
}
pm8Meta.setLore(pm8Lores);
}
pm8.setItemMeta(pm8Meta);
}
//10 PM
ItemStack pm10 = new ItemStack(Material.WATCH, 1);
{
ItemMeta pm10Meta = pm10.getItemMeta();
pm10Meta.setDisplayName(messages.getString("gui.time.10pm.name")
.replaceAll("&", "§"));
{
List<String> pm10Lores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.10pm.lores")) {
pm10Lores.add(lore
.replaceAll("&", "§"));
}
pm10Meta.setLore(pm10Lores);
}
pm10.setItemMeta(pm10Meta);
}
//back
ItemStack back = new ItemStack(Material.BOOK, 1);
{
ItemMeta backMeta = back.getItemMeta();
backMeta.setDisplayName(messages.getString("gui.time.back.name")
.replaceAll("&", "§"));
{
List<String> backLores = new ArrayList<String>();
for (String lore : messages.getStringList("gui.time.back.lores")) {
backLores.add(lore
.replaceAll("&", "§"));
}
backMeta.setLore(backLores);
}
back.setItemMeta(backMeta);
}
inventory.setItem(0, midnight);
inventory.setItem(1, am2);
inventory.setItem(2, am4);
inventory.setItem(3, am6);
inventory.setItem(4, am8);
inventory.setItem(5, am10);
inventory.setItem(6, midday);
inventory.setItem(7, pm2);
inventory.setItem(8, pm4);
inventory.setItem(9, pm6);
inventory.setItem(10, pm8);
inventory.setItem(11, pm10);
inventory.setItem(17, back);
player.openInventory(inventory);
}
}
| me/stefvanschie/buildinggame/utils/guis/buildmenu/TimeMenu.java | package me.stefvanschie.buildinggame.utils.guis;
import java.util.ArrayList;
import java.util.List;
import me.stefvanschie.buildinggame.managers.arenas.ArenaManager;
import me.stefvanschie.buildinggame.utils.Time;
import me.stefvanschie.buildinggame.utils.plot.Plot;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Material;
import org.bukkit.entity.Player;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
public class TimeMenu {
public TimeMenu() {}
public void show(Player player) {
Inventory inventory = Bukkit.createInventory(null, 18, ChatColor.GREEN + "Time selection");
Plot plot = ArenaManager.getInstance().getArena(player).getPlot(player);
//midnight
ItemStack midnight = new ItemStack(Material.WATCH, 1);
{
ItemMeta midnightMeta = midnight.getItemMeta();
midnightMeta.setDisplayName(ChatColor.GREEN + "Midnight");
{
List<String> midnightLores = new ArrayList<String>();
midnightLores.add(ChatColor.GRAY + "Set the time of your build to Midnight");
if (plot.getTime() == Time.MIDNIGHT) {
midnightLores.add(ChatColor.GOLD + "Currently selected!");
}
midnightMeta.setLore(midnightLores);
}
midnight.setItemMeta(midnightMeta);
}
//2 AM
ItemStack am2 = new ItemStack(Material.WATCH, 1);
{
ItemMeta am2Meta = am2.getItemMeta();
am2Meta.setDisplayName(ChatColor.GREEN + "2 AM");
{
List<String> am2Lores = new ArrayList<String>();
am2Lores.add(ChatColor.GRAY + "Set the time of your build to 2 AM");
if (plot.getTime() == Time.AM2) {
am2Lores.add(ChatColor.GOLD + "Currently selected!");
}
am2Meta.setLore(am2Lores);
}
am2.setItemMeta(am2Meta);
}
//4 AM
ItemStack am4 = new ItemStack(Material.WATCH, 1);
{
ItemMeta am4Meta = am4.getItemMeta();
am4Meta.setDisplayName(ChatColor.GREEN + "4 AM");
{
List<String> am4Lores = new ArrayList<String>();
am4Lores.add(ChatColor.GRAY + "Set the time of your build to 4 AM");
if (plot.getTime() == Time.AM4) {
am4Lores.add(ChatColor.GOLD + "Currently selected!");
}
am4Meta.setLore(am4Lores);
}
am4.setItemMeta(am4Meta);
}
//6 AM
ItemStack am6 = new ItemStack(Material.WATCH, 1);
{
ItemMeta am6Meta = am6.getItemMeta();
am6Meta.setDisplayName(ChatColor.GREEN + "6 AM");
{
List<String> am6Lores = new ArrayList<String>();
am6Lores.add(ChatColor.GRAY + "Set the time of your build to 6 AM");
if (plot.getTime() == Time.AM6) {
am6Lores.add(ChatColor.GOLD + "Currently selected!");
}
am6Meta.setLore(am6Lores);
}
am6.setItemMeta(am6Meta);
}
//8 AM
ItemStack am8 = new ItemStack(Material.WATCH, 1);
{
ItemMeta am8Meta = am8.getItemMeta();
am8Meta.setDisplayName(ChatColor.GREEN + "8 AM");
{
List<String> am8Lores = new ArrayList<String>();
am8Lores.add(ChatColor.GRAY + "Set the time of your build to 8 AM");
if (plot.getTime() == Time.AM8) {
am8Lores.add(ChatColor.GOLD + "Currently selected!");
}
am8Meta.setLore(am8Lores);
}
am8.setItemMeta(am8Meta);
}
//10 AM
ItemStack am10 = new ItemStack(Material.WATCH, 1);
{
ItemMeta am10Meta = am10.getItemMeta();
am10Meta.setDisplayName(ChatColor.GREEN + "10 AM");
{
List<String> am10Lores = new ArrayList<String>();
am10Lores.add(ChatColor.GRAY + "Set the time of your build to 10 AM");
if (plot.getTime() == Time.AM10) {
am10Lores.add(ChatColor.GOLD + "Currently selected!");
}
am10Meta.setLore(am10Lores);
}
am10.setItemMeta(am10Meta);
}
//Midday
ItemStack midday = new ItemStack(Material.WATCH, 1);
{
ItemMeta middayMeta = midday.getItemMeta();
middayMeta.setDisplayName(ChatColor.GREEN + "Midday");
{
List<String> middayLores = new ArrayList<String>();
middayLores.add(ChatColor.GRAY + "Set the time of your build to Midday");
if (plot.getTime() == Time.MIDDAY) {
middayLores.add(ChatColor.GOLD + "Currently selected!");
}
middayMeta.setLore(middayLores);
}
midday.setItemMeta(middayMeta);
}
//2 PM
ItemStack pm2 = new ItemStack(Material.WATCH, 1);
{
ItemMeta pm2Meta = pm2.getItemMeta();
pm2Meta.setDisplayName(ChatColor.GREEN + "2 PM");
{
List<String> pm2Lores = new ArrayList<String>();
pm2Lores.add(ChatColor.GRAY + "Set the time of your build to 2 PM");
if (plot.getTime() == Time.PM2) {
pm2Lores.add(ChatColor.GOLD + "Currently selected!");
}
pm2Meta.setLore(pm2Lores);
}
pm2.setItemMeta(pm2Meta);
}
//4 PM
ItemStack pm4 = new ItemStack(Material.WATCH, 1);
{
ItemMeta pm4Meta = pm4.getItemMeta();
pm4Meta.setDisplayName(ChatColor.GREEN + "4 PM");
{
List<String> pm4Lores = new ArrayList<String>();
pm4Lores.add(ChatColor.GRAY + "Set the time of your build to 4 PM");
if (plot.getTime() == Time.PM4) {
pm4Lores.add(ChatColor.GOLD + "Currently selected!");
}
pm4Meta.setLore(pm4Lores);
}
pm4.setItemMeta(pm4Meta);
}
//6 PM
ItemStack pm6 = new ItemStack(Material.WATCH, 1);
{
ItemMeta pm6Meta = pm6.getItemMeta();
pm6Meta.setDisplayName(ChatColor.GREEN + "6 PM");
{
List<String> pm6Lores = new ArrayList<String>();
pm6Lores.add(ChatColor.GRAY + "Set the time of your build to 6 PM");
if (plot.getTime() == Time.PM6) {
pm6Lores.add(ChatColor.GOLD + "Currently selected!");
}
pm6Meta.setLore(pm6Lores);
}
pm6.setItemMeta(pm6Meta);
}
//8 PM
ItemStack pm8 = new ItemStack(Material.WATCH, 1);
{
ItemMeta pm8Meta = pm8.getItemMeta();
pm8Meta.setDisplayName(ChatColor.GREEN + "8 PM");
{
List<String> pm8Lores = new ArrayList<String>();
pm8Lores.add(ChatColor.GRAY + "Set the time of your build to 8 PM");
if (plot.getTime() == Time.PM8) {
pm8Lores.add(ChatColor.GOLD + "Currently selected!");
}
pm8Meta.setLore(pm8Lores);
}
pm8.setItemMeta(pm8Meta);
}
//10 PM
ItemStack pm10 = new ItemStack(Material.WATCH, 1);
{
ItemMeta pm10Meta = pm10.getItemMeta();
pm10Meta.setDisplayName(ChatColor.GREEN + "10 PM");
{
List<String> pm10Lores = new ArrayList<String>();
pm10Lores.add(ChatColor.GRAY + "Set the time of your build to 10 PM");
if (plot.getTime() == Time.PM10) {
pm10Lores.add(ChatColor.GOLD + "Currently selected!");
}
pm10Meta.setLore(pm10Lores);
}
pm10.setItemMeta(pm10Meta);
}
//back
ItemStack back = new ItemStack(Material.BOOK, 1);
{
ItemMeta backMeta = back.getItemMeta();
backMeta.setDisplayName(ChatColor.GREEN + "Back");
{
List<String> backLores = new ArrayList<String>();
backLores.add(ChatColor.GRAY + "Go back to the options menu");
backMeta.setLore(backLores);
}
back.setItemMeta(backMeta);
}
inventory.setItem(0, midnight);
inventory.setItem(1, am2);
inventory.setItem(2, am4);
inventory.setItem(3, am6);
inventory.setItem(4, am8);
inventory.setItem(5, am10);
inventory.setItem(6, midday);
inventory.setItem(7, pm2);
inventory.setItem(8, pm4);
inventory.setItem(9, pm6);
inventory.setItem(10, pm8);
inventory.setItem(11, pm10);
inventory.setItem(17, back);
player.openInventory(inventory);
}
}
| TimeMenu.java | me/stefvanschie/buildinggame/utils/guis/buildmenu/TimeMenu.java | TimeMenu.java |
|
Java | apache-2.0 | 37ccab62392f77afdbb2d1e9deba36cc61a8a43b | 0 | facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho | /**
* Copyright (c) 2014-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.litho;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Deque;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.support.annotation.VisibleForTesting;
import android.support.v4.util.LongSparseArray;
import android.support.v4.view.ViewCompat;
import android.text.TextUtils;
import android.util.SparseArray;
import android.view.View;
import android.view.View.MeasureSpec;
import android.view.ViewGroup;
import com.facebook.R;
import com.facebook.litho.config.ComponentsConfiguration;
import com.facebook.litho.reference.Reference;
import static android.support.v4.view.ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO;
import static android.view.View.MeasureSpec.makeMeasureSpec;
import static com.facebook.litho.Component.isHostSpec;
import static com.facebook.litho.Component.isMountViewSpec;
import static com.facebook.litho.ComponentHostUtils.maybeInvalidateAccessibilityState;
import static com.facebook.litho.ComponentHostUtils.maybeSetDrawableState;
import static com.facebook.litho.ComponentsLogger.ACTION_SUCCESS;
import static com.facebook.litho.ComponentsLogger.EVENT_MOUNT;
import static com.facebook.litho.ComponentsLogger.EVENT_PREPARE_MOUNT;
import static com.facebook.litho.ComponentsLogger.EVENT_SHOULD_UPDATE_REFERENCE_LAYOUT_MISMATCH;
import static com.facebook.litho.ComponentsLogger.PARAM_IS_DIRTY;
import static com.facebook.litho.ComponentsLogger.PARAM_LOG_TAG;
import static com.facebook.litho.ComponentsLogger.PARAM_MOUNTED_COUNT;
import static com.facebook.litho.ComponentsLogger.PARAM_MOVED_COUNT;
import static com.facebook.litho.ComponentsLogger.PARAM_NO_OP_COUNT;
import static com.facebook.litho.ComponentsLogger.PARAM_UNCHANGED_COUNT;
import static com.facebook.litho.ComponentsLogger.PARAM_UNMOUNTED_COUNT;
import static com.facebook.litho.ComponentsLogger.PARAM_UPDATED_COUNT;
import static com.facebook.litho.ThreadUtils.assertMainThread;
/**
* Encapsulates the mounted state of a {@link Component}. Provides APIs to update state
* by recycling existing UI elements e.g. {@link Drawable}s.
*
* @see #mount(LayoutState, Rect)
* @see ComponentView
* @see LayoutState
*/
class MountState {
static final int ROOT_HOST_ID = 0;
// Holds the current list of mounted items.
// Should always be used within a draw lock.
private final LongSparseArray<MountItem> mIndexToItemMap;
// Holds a list with information about the components linked to the VisibilityOutputs that are
// stored in LayoutState. An item is inserted in this map if its corresponding component is
// visible. When the component exits the viewport, the item associated with it is removed from the
// map.
private final LongSparseArray<VisibilityItem> mVisibilityIdToItemMap;
// Holds a list of MountItems that are currently mounted which can mount incrementally.
private final LongSparseArray<MountItem> mCanMountIncrementallyMountItems;
// A map from test key to a list of one or more `TestItem`s which is only allocated
// and populated during test runs.
private final Map<String, Deque<TestItem>> mTestItemMap;
private long[] mLayoutOutputsIds;
// True if we are receiving a new LayoutState and we need to completely
// refresh the content of the HostComponent. Always set from the main thread.
private boolean mIsDirty;
// Holds the list of known component hosts during a mount pass.
private final LongSparseArray<ComponentHost> mHostsByMarker = new LongSparseArray<>();
private static final Rect sTempRect = new Rect();
private final ComponentContext mContext;
private final ComponentView mComponentView;
private final Rect mPreviousLocalVisibleRect = new Rect();
private final PrepareMountStats mPrepareMountStats = new PrepareMountStats();
private final MountStats mMountStats = new MountStats();
private TransitionManager mTransitionManager;
private int mPreviousTopsIndex;
private int mPreviousBottomsIndex;
private int mLastMountedComponentTreeId;
private final MountItem mRootHostMountItem;
public MountState(ComponentView view) {
mIndexToItemMap = new LongSparseArray<>();
mVisibilityIdToItemMap = new LongSparseArray<>();
mCanMountIncrementallyMountItems = new LongSparseArray<>();
mContext = (ComponentContext) view.getContext();
mComponentView = view;
mIsDirty = true;
mTestItemMap = ComponentsConfiguration.isEndToEndTestRun
? new HashMap<String, Deque<TestItem>>()
: null;
// The mount item representing the top-level ComponentView which
// is always automatically mounted.
mRootHostMountItem = ComponentsPools.acquireRootHostMountItem(
HostComponent.create(),
mComponentView,
mComponentView);
}
/**
* To be called whenever the components needs to start the mount process from scratch
* e.g. when the component's props or layout change or when the components
* gets attached to a host.
*/
void setDirty() {
assertMainThread();
mIsDirty = true;
mPreviousLocalVisibleRect.setEmpty();
}
boolean isDirty() {
assertMainThread();
return mIsDirty;
}
/**
* Mount the layoutState on the pre-set HostView.
* @param layoutState
* @param localVisibleRect If this variable is null, then mount everything, since incremental
* mount is not enabled.
* Otherwise mount only what the rect (in local coordinates) contains
*/
void mount(LayoutState layoutState, Rect localVisibleRect) {
assertMainThread();
ComponentsSystrace.beginSection("mount");
final ComponentTree componentTree = mComponentView.getComponent();
final ComponentsLogger logger = componentTree.getContext().getLogger();
if (logger != null) {
logger.eventStart(EVENT_MOUNT, componentTree);
}
prepareTransitionManager(layoutState);
if (mTransitionManager != null) {
if (mIsDirty) {
mTransitionManager.onNewTransitionContext(layoutState.getTransitionContext());
}
mTransitionManager.onMountStart();
recordMountedItemsWithTransitionKeys(
mTransitionManager,
mIndexToItemMap,
true /* isPreMount */);
}
if (mIsDirty) {
suppressInvalidationsOnHosts(true);
// Prepare the data structure for the new LayoutState and removes mountItems
// that are not present anymore if isUpdateMountInPlace is enabled.
prepareMount(layoutState);
}
mMountStats.reset();
final int componentTreeId = layoutState.getComponentTreeId();
final boolean isIncrementalMountEnabled = localVisibleRect != null;
if (!isIncrementalMountEnabled ||
!performIncrementalMount(layoutState, localVisibleRect)) {
for (int i = 0, size = layoutState.getMountableOutputCount(); i < size; i++) {
final LayoutOutput layoutOutput = layoutState.getMountableOutputAt(i);
final Component component = layoutOutput.getComponent();
ComponentsSystrace.beginSection(component.getSimpleName());
final MountItem currentMountItem = getItemAt(i);
final boolean isMounted = currentMountItem != null;
final boolean isMountable =
!isIncrementalMountEnabled ||
isMountedHostWithChildContent(currentMountItem) ||
Rect.intersects(localVisibleRect, layoutOutput.getBounds());
if (isMountable && !isMounted) {
mountLayoutOutput(i, layoutOutput, layoutState);
} else if (!isMountable && isMounted) {
unmountItem(mContext, i, mHostsByMarker);
} else if (isMounted) {
if (isIncrementalMountEnabled && canMountIncrementally(component)) {
mountItemIncrementally(currentMountItem, layoutOutput.getBounds(), localVisibleRect);
}
if (mIsDirty) {
final boolean useUpdateValueFromLayoutOutput =
(componentTreeId >= 0) && (componentTreeId == mLastMountedComponentTreeId);
final boolean itemUpdated = updateMountItemIfNeeded(
layoutOutput,
currentMountItem,
useUpdateValueFromLayoutOutput,
logger);
if (itemUpdated) {
mMountStats.updatedCount++;
} else {
mMountStats.noOpCount++;
}
}
}
ComponentsSystrace.endSection();
}
if (isIncrementalMountEnabled) {
setupPreviousMountableOutputData(layoutState, localVisibleRect);
}
}
mIsDirty = false;
if (localVisibleRect != null) {
mPreviousLocalVisibleRect.set(localVisibleRect);
}
processVisibilityOutputs(layoutState, localVisibleRect);
if (mTransitionManager != null) {
recordMountedItemsWithTransitionKeys(
mTransitionManager,
mIndexToItemMap,
false /* isPreMount */);
mTransitionManager.processTransitions();
}
processTestOutputs(layoutState);
suppressInvalidationsOnHosts(false);
mLastMountedComponentTreeId = componentTreeId;
if (logger != null) {
final String logTag = componentTree.getContext().getLogTag();
logMountEnd(logger, logTag, componentTree, mMountStats);
}
ComponentsSystrace.endSection();
}
private void processVisibilityOutputs(LayoutState layoutState, Rect localVisibleRect) {
if (localVisibleRect == null) {
return;
}
for (int j = 0, size = layoutState.getVisibilityOutputCount(); j < size; j++) {
final VisibilityOutput visibilityOutput = layoutState.getVisibilityOutputAt(j);
final EventHandler visibleHandler = visibilityOutput.getVisibleEventHandler();
final EventHandler focusedHandler = visibilityOutput.getFocusedEventHandler();
final EventHandler fullImpressionHandler = visibilityOutput.getFullImpressionEventHandler();
final EventHandler invisibleHandler = visibilityOutput.getInvisibleEventHandler();
final long visibilityOutputId = visibilityOutput.getId();
final Rect visibilityOutputBounds = visibilityOutput.getBounds();
sTempRect.set(visibilityOutputBounds);
final boolean isCurrentlyVisible = sTempRect.intersect(localVisibleRect);
VisibilityItem visibilityItem = mVisibilityIdToItemMap.get(visibilityOutputId);
if (isCurrentlyVisible) {
// The component is visible now, but used to be outside the viewport.
if (visibilityItem == null) {
visibilityItem = ComponentsPools.acquireVisibilityItem(invisibleHandler);
mVisibilityIdToItemMap.put(visibilityOutputId, visibilityItem);
if (visibleHandler != null) {
EventDispatcherUtils.dispatchOnVisible(visibleHandler);
}
}
// Check if the component has entered the focused range.
if (focusedHandler != null && !visibilityItem.isInFocusedRange()) {
final View parent = (View) mComponentView.getParent();
if (hasEnteredFocusedRange(
parent.getWidth(),
parent.getHeight(),
visibilityOutputBounds,
sTempRect)) {
visibilityItem.setIsInFocusedRange();
EventDispatcherUtils.dispatchOnFocused(focusedHandler);
}
}
// If the component has not entered the full impression range yet, make sure to update the
// information about the visible edges.
if (fullImpressionHandler != null && !visibilityItem.isInFullImpressionRange()) {
visibilityItem.setVisibleEdges(visibilityOutputBounds, sTempRect);
if (visibilityItem.isInFullImpressionRange()) {
EventDispatcherUtils.dispatchOnFullImpression(fullImpressionHandler);
}
}
} else if (visibilityItem != null) {
// The component is invisible now, but used to be visible.
if (invisibleHandler != null) {
EventDispatcherUtils.dispatchOnInvisible(invisibleHandler);
}
mVisibilityIdToItemMap.remove(visibilityOutputId);
ComponentsPools.release(visibilityItem);
}
}
}
/**
* Clears and re-populates the test item map if we are in e2e test mode.
*/
private void processTestOutputs(LayoutState layoutState) {
if (mTestItemMap == null) {
return;
}
for (Collection<TestItem> items : mTestItemMap.values()) {
for (TestItem item : items) {
ComponentsPools.release(item);
}
}
mTestItemMap.clear();
for (int i = 0, size = layoutState.getTestOutputCount(); i < size; i++) {
final TestOutput testOutput = layoutState.getTestOutputAt(i);
final long hostMarker = testOutput.getHostMarker();
final long layoutOutputId = testOutput.getLayoutOutputId();
final MountItem mountItem =
layoutOutputId == -1 ? null : mIndexToItemMap.get(layoutOutputId);
final TestItem testItem = ComponentsPools.acquireTestItem();
testItem.setHost(hostMarker == -1 ? null : mHostsByMarker.get(hostMarker));
testItem.setBounds(testOutput.getBounds());
testItem.setTestKey(testOutput.getTestKey());
testItem.setContent(mountItem == null ? null : mountItem.getContent());
final Deque<TestItem> items = mTestItemMap.get(testOutput.getTestKey());
final Deque<TestItem> updatedItems =
items == null ? new LinkedList<TestItem>() : items;
updatedItems.add(testItem);
mTestItemMap.put(testOutput.getTestKey(), updatedItems);
}
}
private boolean isMountedHostWithChildContent(MountItem mountItem) {
if (mountItem == null) {
return false;
}
final Object content = mountItem.getContent();
if (!(content instanceof ComponentHost)) {
return false;
}
final ComponentHost host = (ComponentHost) content;
return host.getMountItemCount() > 0;
}
private void setupPreviousMountableOutputData(LayoutState layoutState, Rect localVisibleRect) {
if (localVisibleRect.isEmpty()) {
return;
}
final ArrayList<LayoutOutput> layoutOutputTops = layoutState.getMountableOutputTops();
final ArrayList<LayoutOutput> layoutOutputBottoms = layoutState.getMountableOutputBottoms();
final int mountableOutputCount = layoutState.getMountableOutputCount();
mPreviousTopsIndex = layoutState.getMountableOutputCount();
for (int i = 0; i < mountableOutputCount; i++) {
if (localVisibleRect.bottom <= layoutOutputTops.get(i).getBounds().top) {
mPreviousTopsIndex = i;
break;
}
}
mPreviousBottomsIndex = layoutState.getMountableOutputCount();
for (int i = 0; i < mountableOutputCount; i++) {
if (localVisibleRect.top < layoutOutputBottoms.get(i).getBounds().bottom) {
mPreviousBottomsIndex = i;
break;
}
}
}
private void clearVisibilityItems() {
for (int i = mVisibilityIdToItemMap.size() - 1; i >= 0; i--) {
final VisibilityItem visibilityItem = mVisibilityIdToItemMap.valueAt(i);
final EventHandler invisibleHandler = visibilityItem.getInvisibleHandler();
if (invisibleHandler != null) {
EventDispatcherUtils.dispatchOnInvisible(invisibleHandler);
}
mVisibilityIdToItemMap.removeAt(i);
ComponentsPools.release(visibilityItem);
}
}
private void registerHost(long id, ComponentHost host) {
host.suppressInvalidations(true);
mHostsByMarker.put(id, host);
}
/**
* Returns true if the component has entered the focused visible range.
*/
static boolean hasEnteredFocusedRange(
int viewportWidth,
int viewportHeight,
Rect componentBounds,
Rect componentVisibleBounds) {
final int halfViewportArea = viewportWidth * viewportHeight / 2;
final int totalComponentArea = computeRectArea(componentBounds);
final int visibleComponentArea = computeRectArea(componentVisibleBounds);
// The component has entered the focused range either if it is larger than half of the viewport
// and it occupies at least half of the viewport or if it is smaller than half of the viewport
// and it is fully visible.
return (totalComponentArea >= halfViewportArea)
? (visibleComponentArea >= halfViewportArea)
: componentBounds.equals(componentVisibleBounds);
}
private static int computeRectArea(Rect rect) {
return rect.isEmpty() ? 0 : (rect.width() * rect.height());
}
private void suppressInvalidationsOnHosts(boolean suppressInvalidations) {
for (int i = mHostsByMarker.size() - 1; i >= 0; i--) {
mHostsByMarker.valueAt(i).suppressInvalidations(suppressInvalidations);
}
}
private boolean updateMountItemIfNeeded(
LayoutOutput layoutOutput,
MountItem currentMountItem,
boolean useUpdateValueFromLayoutOutput,
ComponentsLogger logger) {
final Component layoutOutputComponent = layoutOutput.getComponent();
final Component itemComponent = currentMountItem.getComponent();
// 1. Check if the mount item generated from the old component should be updated.
final boolean shouldUpdate = shouldUpdateMountItem(
layoutOutput,
currentMountItem,
useUpdateValueFromLayoutOutput,
mIndexToItemMap,
mLayoutOutputsIds,
logger);
// 2. Reset all the properties like click handler, content description and tags related to
// this item if it needs to be updated. the update mount item will re-set the new ones.
if (shouldUpdate) {
unsetViewAttributes(currentMountItem);
}
// 3. We will re-bind this later in 7 regardless so let's make sure it's currently unbound.
if (currentMountItem.isBound()) {
itemComponent.getLifecycle().onUnbind(
itemComponent.getScopedContext(),
currentMountItem.getContent(),
itemComponent);
currentMountItem.setIsBound(false);
}
// 4. Re initialize the MountItem internal state with the new attributes from LayoutOutput
currentMountItem.init(layoutOutput.getComponent(), currentMountItem, layoutOutput);
// 5. If the mount item is not valid for this component update its content and view attributes.
if (shouldUpdate) {
updateMountedContent(currentMountItem, layoutOutput, itemComponent);
setViewAttributes(currentMountItem);
}
final Object currentContent = currentMountItem.getContent();
// 6. Set the mounted content on the Component and call the bind callback.
layoutOutputComponent.getLifecycle().bind(
layoutOutputComponent.getScopedContext(),
currentContent,
layoutOutputComponent);
currentMountItem.setIsBound(true);
// 7. Update the bounds of the mounted content. This needs to be done regardless of whether
// the component has been updated or not since the mounted item might might have the same
// size and content but a different position.
updateBoundsForMountedLayoutOutput(layoutOutput, currentMountItem);
maybeInvalidateAccessibilityState(currentMountItem);
if (currentMountItem.getContent() instanceof Drawable) {
maybeSetDrawableState(
currentMountItem.getHost(),
(Drawable) currentMountItem.getContent(),
currentMountItem.getFlags(),
currentMountItem.getNodeInfo());
}
if (currentMountItem.getDisplayListDrawable() != null) {
currentMountItem.getDisplayListDrawable().suppressInvalidations(false);
}
return shouldUpdate;
}
private static boolean shouldUpdateMountItem(
LayoutOutput layoutOutput,
MountItem currentMountItem,
boolean useUpdateValueFromLayoutOutput,
LongSparseArray<MountItem> indexToItemMap,
long[] layoutOutputsIds,
ComponentsLogger logger) {
final @LayoutOutput.UpdateState int updateState = layoutOutput.getUpdateState();
final Component currentComponent = currentMountItem.getComponent();
final ComponentLifecycle currentLifecycle = currentComponent.getLifecycle();
final Component nextComponent = layoutOutput.getComponent();
final ComponentLifecycle nextLifecycle = nextComponent.getLifecycle();
// If the two components have different sizes and the mounted content depends on the size we
// just return true immediately.
if (!sameSize(layoutOutput, currentMountItem) && nextLifecycle.isMountSizeDependent()) {
return true;
}
if (useUpdateValueFromLayoutOutput) {
if (updateState == LayoutOutput.STATE_UPDATED) {
// Check for incompatible ReferenceLifecycle.
if (currentLifecycle instanceof DrawableComponent
&& nextLifecycle instanceof DrawableComponent
&& currentLifecycle.shouldComponentUpdate(currentComponent, nextComponent)) {
if (logger != null) {
ComponentsLogger.LayoutOutputLog logObj = new ComponentsLogger.LayoutOutputLog();
logObj.currentId = indexToItemMap.keyAt(
indexToItemMap.indexOfValue(currentMountItem));
logObj.currentLifecycle = currentLifecycle.toString();
logObj.nextId = layoutOutput.getId();
logObj.nextLifecycle = nextLifecycle.toString();
for (int i = 0; i < layoutOutputsIds.length; i++) {
if (layoutOutputsIds[i] == logObj.currentId) {
if (logObj.currentIndex == -1) {
logObj.currentIndex = i;
}
logObj.currentLastDuplicatedIdIndex = i;
}
}
if (logObj.nextId == logObj.currentId) {
logObj.nextIndex = logObj.currentIndex;
logObj.nextLastDuplicatedIdIndex = logObj.currentLastDuplicatedIdIndex;
} else {
for (int i = 0; i < layoutOutputsIds.length; i++) {
if (layoutOutputsIds[i] == logObj.nextId) {
if (logObj.nextIndex == -1) {
logObj.nextIndex = i;
}
logObj.nextLastDuplicatedIdIndex = i;
}
}
}
logger.eventStart(EVENT_SHOULD_UPDATE_REFERENCE_LAYOUT_MISMATCH, logObj);
logger
.eventEnd(EVENT_SHOULD_UPDATE_REFERENCE_LAYOUT_MISMATCH, logObj, ACTION_SUCCESS);
}
return true;
}
return false;
} else if (updateState == LayoutOutput.STATE_DIRTY) {
return true;
}
}
if (!currentLifecycle.callsShouldUpdateOnMount()) {
return true;
}
return currentLifecycle.shouldComponentUpdate(
currentComponent,
nextComponent);
}
private static boolean sameSize(LayoutOutput layoutOutput, MountItem item) {
final Rect layoutOutputBounds = layoutOutput.getBounds();
final Object mountedContent = item.getContent();
return layoutOutputBounds.width() == getWidthForMountedContent(mountedContent) &&
layoutOutputBounds.height() == getHeightForMountedContent(mountedContent);
}
private static int getWidthForMountedContent(Object content) {
return content instanceof Drawable ?
((Drawable) content).getBounds().width() :
((View) content).getWidth();
}
private static int getHeightForMountedContent(Object content) {
return content instanceof Drawable ?
((Drawable) content).getBounds().height() :
((View) content).getHeight();
}
private void updateBoundsForMountedLayoutOutput(LayoutOutput layoutOutput, MountItem item) {
// MountState should never update the bounds of the top-level host as this
// should be done by the ViewGroup containing the ComponentView.
if (layoutOutput.getId() == ROOT_HOST_ID) {
return;
}
layoutOutput.getMountBounds(sTempRect);
final boolean forceTraversal = Component.isMountViewSpec(layoutOutput.getComponent())
&& ((View) item.getContent()).isLayoutRequested();
applyBoundsToMountContent(
item.getContent(),
sTempRect.left,
sTempRect.top,
sTempRect.right,
sTempRect.bottom,
forceTraversal /* force */);
}
/**
* Prepare the {@link MountState} to mount a new {@link LayoutState}.
*/
@SuppressWarnings("unchecked")
private void prepareMount(LayoutState layoutState) {
final ComponentTree component = mComponentView.getComponent();
final ComponentsLogger logger = component.getContext().getLogger();
final String logTag = component.getContext().getLogTag();
if (logger != null) {
logger.eventStart(EVENT_PREPARE_MOUNT, component);
}
PrepareMountStats stats = unmountOrMoveOldItems(layoutState);
if (logger != null) {
logPrepareMountParams(logger, logTag, component, stats);
}
if (mHostsByMarker.get(ROOT_HOST_ID) == null) {
// Mounting always starts with the root host.
registerHost(ROOT_HOST_ID, mComponentView);
// Root host is implicitly marked as mounted.
mIndexToItemMap.put(ROOT_HOST_ID, mRootHostMountItem);
}
int outputCount = layoutState.getMountableOutputCount();
if (mLayoutOutputsIds == null || outputCount != mLayoutOutputsIds.length) {
mLayoutOutputsIds = new long[layoutState.getMountableOutputCount()];
}
for (int i = 0; i < outputCount; i++) {
mLayoutOutputsIds[i] = layoutState.getMountableOutputAt(i).getId();
}
if (logger != null) {
logger.eventEnd(EVENT_PREPARE_MOUNT, component, ACTION_SUCCESS);
}
}
/**
* Determine whether to apply disappear animation to the given {@link MountItem}
*/
private static boolean isItemDisappearing(
MountItem mountItem,
TransitionContext transitionContext) {
if (mountItem == null
|| mountItem.getViewNodeInfo() == null
|| transitionContext == null) {
return false;
}
return transitionContext.isDisappearingKey(mountItem.getViewNodeInfo().getTransitionKey());
}
/**
* Go over all the mounted items from the leaves to the root and unmount only the items that are
* not present in the new LayoutOutputs.
* If an item is still present but in a new position move the item inside its host.
* The condition where an item changed host doesn't need any special treatment here since we
* mark them as removed and re-added when calculating the new LayoutOutputs
*/
private PrepareMountStats unmountOrMoveOldItems(LayoutState newLayoutState) {
mPrepareMountStats.reset();
if (mLayoutOutputsIds == null) {
return mPrepareMountStats;
}
// Traversing from the beginning since mLayoutOutputsIds unmounting won't remove entries there
// but only from mIndexToItemMap. If an host changes we're going to unmount it and recursively
// all its mounted children.
for (int i = 0; i < mLayoutOutputsIds.length; i++) {
final int newPosition = newLayoutState.getLayoutOutputPositionForId(mLayoutOutputsIds[i]);
final MountItem oldItem = getItemAt(i);
if (isItemDisappearing(oldItem, newLayoutState.getTransitionContext())) {
startUnmountDisappearingItem(i, oldItem.getViewNodeInfo().getTransitionKey());
final int lastDescendantOfItem = findLastDescendantOfItem(i, oldItem);
// Disassociate disappearing items from current mounted items. The layout tree will not
// contain disappearing items anymore, however they are kept separately in their hosts.
removeDisappearingItemMappings(i, lastDescendantOfItem);
// Skip this disappearing item and all its descendants. Do not unmount or move them yet.
// We will unmount them after animation is completed.
i = lastDescendantOfItem;
continue;
}
if (newPosition == -1) {
unmountItem(mContext, i, mHostsByMarker);
mPrepareMountStats.unmountedCount++;
} else {
final long newHostMarker = newLayoutState.getMountableOutputAt(newPosition).getHostMarker();
if (oldItem == null) {
// This was previously unmounted.
mPrepareMountStats.unmountedCount++;
} else if (oldItem.getHost() != mHostsByMarker.get(newHostMarker)) {
// If the id is the same but the parent host is different we simply unmount the item and
// re-mount it later. If the item to unmount is a ComponentHost, all the children will be
// recursively unmounted.
unmountItem(mContext, i, mHostsByMarker);
mPrepareMountStats.unmountedCount++;
} else if (newPosition != i) {
// If a MountItem for this id exists and the hostMarker has not changed but its position
// in the outputs array has changed we need to update the position in the Host to ensure
// the z-ordering.
oldItem.getHost().moveItem(oldItem, i, newPosition);
mPrepareMountStats.movedCount++;
} else {
mPrepareMountStats.unchangedCount++;
}
}
}
return mPrepareMountStats;
}
private void removeDisappearingItemMappings(int fromIndex, int toIndex) {
for (int i = fromIndex; i <= toIndex; i++) {
final MountItem item = getItemAt(i);
// We do not need this mapping for disappearing items.
mIndexToItemMap.remove(mLayoutOutputsIds[i]);
// Likewise we no longer need host mapping for disappearing items.
if (isHostSpec(item.getComponent())) {
mHostsByMarker
.removeAt(mHostsByMarker.indexOfValue((ComponentHost) item.getContent()));
}
}
}
/**
* Find the index of last descendant of given {@link MountItem}
*/
private int findLastDescendantOfItem(int disappearingItemIndex, MountItem item) {
for (int i = disappearingItemIndex + 1; i < mLayoutOutputsIds.length; i++) {
if (!ComponentHostUtils.hasAncestorHost(
getItemAt(i).getHost(),
(ComponentHost) item.getContent())) {
// No need to go further as the items that have common ancestor hosts are co-located.
// This is the first non-descendant of given MountItem, therefore last descendant is the
// item before.
return i - 1;
}
}
return mLayoutOutputsIds.length - 1;
}
private void updateMountedContent(
MountItem item,
LayoutOutput layoutOutput,
Component previousComponent) {
final Component<?> component = layoutOutput.getComponent();
if (isHostSpec(component)) {
return;
}
final Object previousContent = item.getContent();
final ComponentLifecycle lifecycle = component.getLifecycle();
// Call unmount and mount in sequence to make sure all the the resources are correctly
// de-allocated. It's possible for previousContent to equal null - when the root is
// interactive we create a LayoutOutput without content in order to set up click handling.
lifecycle.unmount(previousComponent.getScopedContext(), previousContent, previousComponent);
lifecycle.mount(component.getScopedContext(), previousContent, component);
}
private void mountLayoutOutput(int index, LayoutOutput layoutOutput, LayoutState layoutState) {
// 1. Resolve the correct host to mount our content to.
ComponentHost host = resolveComponentHost(layoutOutput, mHostsByMarker);
if (host == null) {
// Host has not yet been mounted - mount it now.
for (int hostMountIndex = 0, size = mLayoutOutputsIds.length;
hostMountIndex < size;
hostMountIndex++) {
if (mLayoutOutputsIds[hostMountIndex] == layoutOutput.getHostMarker()) {
final LayoutOutput hostLayoutOutput = layoutState.getMountableOutputAt(hostMountIndex);
mountLayoutOutput(hostMountIndex, hostLayoutOutput, layoutState);
break;
}
}
host = resolveComponentHost(layoutOutput, mHostsByMarker);
}
final Component<?> component = layoutOutput.getComponent();
final ComponentLifecycle lifecycle = component.getLifecycle();
// 2. Generate the component's mount state (this might also be a ComponentHost View).
Object content = acquireMountContent(component, host);
if (content == null) {
content = lifecycle.createMountContent(mContext);
}
lifecycle.mount(
component.getScopedContext(),
content,
component);
// 3. If it's a ComponentHost, add the mounted View to the list of Hosts.
if (isHostSpec(component)) {
ComponentHost componentHost = (ComponentHost) content;
componentHost.setParentHostMarker(layoutOutput.getHostMarker());
registerHost(layoutOutput.getId(), componentHost);
}
// 4. Mount the content into the selected host.
final MountItem item = mountContent(index, component, content, host, layoutOutput);
// 5. Notify the component that mounting has completed
lifecycle.bind(component.getScopedContext(), content, component);
item.setIsBound(true);
// 6. Apply the bounds to the Mount content now. It's important to do so after bind as calling
// bind might have triggered a layout request within a View.
layoutOutput.getMountBounds(sTempRect);
applyBoundsToMountContent(
content,
sTempRect.left,
sTempRect.top,
sTempRect.right,
sTempRect.bottom,
true /* force */);
if (item.getDisplayListDrawable() != null) {
item.getDisplayListDrawable().suppressInvalidations(false);
}
// 6. Update the mount stats
mMountStats.mountedCount++;
}
// The content might be null because it's the LayoutSpec for the root host
// (the very first LayoutOutput).
private MountItem mountContent(
int index,
Component<?> component,
Object content,
ComponentHost host,
LayoutOutput layoutOutput) {
final MountItem item = ComponentsPools.acquireMountItem(
component,
host,
content,
layoutOutput);
// Create and keep a MountItem even for the layoutSpec with null content
// that sets the root host interactions.
mIndexToItemMap.put(mLayoutOutputsIds[index], item);
if (component.getLifecycle().canMountIncrementally()) {
mCanMountIncrementallyMountItems.put(index, item);
}
layoutOutput.getMountBounds(sTempRect);
host.mount(index, item, sTempRect);
setViewAttributes(item);
return item;
}
private Object acquireMountContent(Component<?> component, ComponentHost host) {
final ComponentLifecycle lifecycle = component.getLifecycle();
if (isHostSpec(component)) {
return host.recycleHost();
}
return ComponentsPools.acquireMountContent(mContext, lifecycle.getId());
}
private static void applyBoundsToMountContent(
Object content,
int left,
int top,
int right,
int bottom,
boolean force) {
assertMainThread();
if (content instanceof View) {
View view = (View) content;
int width = right - left;
int height = bottom - top;
if (force || view.getMeasuredHeight() != height || view.getMeasuredWidth() != width) {
view.measure(
makeMeasureSpec(right - left, MeasureSpec.EXACTLY),
makeMeasureSpec(bottom - top, MeasureSpec.EXACTLY));
}
if (force ||
view.getLeft() != left ||
view.getTop() != top ||
view.getRight() != right ||
view.getBottom() != bottom) {
view.layout(left, top, right, bottom);
}
} else if (content instanceof Drawable) {
((Drawable) content).setBounds(left, top, right, bottom);
} else {
throw new IllegalStateException("Unsupported mounted content " + content);
}
}
private static boolean canMountIncrementally(Component<?> component) {
return component.getLifecycle().canMountIncrementally();
}
/**
* Resolves the component host that will be used for the given layout output
* being mounted.
*/
private static ComponentHost resolveComponentHost(
LayoutOutput layoutOutput,
LongSparseArray<ComponentHost> hostsByMarker) {
final long hostMarker = layoutOutput.getHostMarker();
return hostsByMarker.get(hostMarker);
}
private static void setViewAttributes(MountItem item) {
final Component<?> component = item.getComponent();
if (!isMountViewSpec(component)) {
return;
}
final View view = (View) item.getContent();
final NodeInfo nodeInfo = item.getNodeInfo();
if (nodeInfo != null) {
// 1. Setup click handler for the component, if applicable.
setClickHandler(nodeInfo.getClickHandler(), view);
// 2. Setup long click handler for the component, if applicable.
setLongClickHandler(nodeInfo.getLongClickHandler(), view);
// 3. Setup click handler for the component, if applicable.
setTouchHandler(nodeInfo.getTouchHandler(), view);
// 4. Set listeners for AccessibilityDelegate methods
setAccessibilityDelegate(view, nodeInfo);
// 5. Setup view tags for the component, if applicable.
setViewTag(view, nodeInfo.getViewTag());
setViewTags(view, nodeInfo.getViewTags());
// 6. Set content description.
setContentDescription(view, nodeInfo.getContentDescription());
// 7. Set setFocusable flag.
setFocusable(view, nodeInfo.getFocusState());
}
// 8. Set important for accessibility flag
setImportantForAccessibility(view, item.getImportantForAccessibility());
final ViewNodeInfo viewNodeInfo = item.getViewNodeInfo();
if (viewNodeInfo != null && !isHostSpec(component)) {
// 9. Set view background, if applicable. Do this before padding
// as it otherwise overrides the padding.
setViewBackground(view, viewNodeInfo);
// 10. Set view padding, if applicable.
setViewPadding(view, viewNodeInfo);
// 11. Set view foreground, if applicable.
setViewForeground(view, viewNodeInfo);
// 12. Set view layout direction, if applicable.
setViewLayoutDirection(view, viewNodeInfo);
}
}
private static void unsetViewAttributes(MountItem item) {
final Component<?> component = item.getComponent();
if (!isMountViewSpec(component)) {
return;
}
final View view = (View) item.getContent();
final NodeInfo nodeInfo = item.getNodeInfo();
if (nodeInfo != null) {
// Reset the click handler.
if (nodeInfo.getClickHandler() != null) {
unsetClickHandler(view);
}
// Reset the click handler.
if (nodeInfo.getLongClickHandler() != null) {
unsetLongClickHandler(view);
}
// Reset the touch handler.
if (nodeInfo.getTouchHandler() != null) {
unsetTouchHandler(view);
}
// Reset the view tags.
unsetViewTag(view);
unsetViewTags(view, nodeInfo.getViewTags());
// Reset content description.
if (!TextUtils.isEmpty(nodeInfo.getContentDescription())) {
unsetContentDescription(view);
}
}
// Reset isClickable flag.
view.setClickable(MountItem.isViewClickable(item.getFlags()));
// Reset isLongClickable flag.
view.setLongClickable(MountItem.isViewLongClickable(item.getFlags()));
// Reset setFocusable flag.
unsetFocusable(view, item);
if (item.getImportantForAccessibility() != IMPORTANT_FOR_ACCESSIBILITY_AUTO) {
unsetImportantForAccessibility(view);
}
unsetAccessibilityDelegate(view);
final ViewNodeInfo viewNodeInfo = item.getViewNodeInfo();
if (viewNodeInfo != null && !isHostSpec(component)) {
unsetViewPadding(view, viewNodeInfo);
unsetViewBackground(view, viewNodeInfo);
unsetViewForeground(view, viewNodeInfo);
unsetViewLayoutDirection(view, viewNodeInfo);
}
}
/**
* Store a {@link ComponentAccessibilityDelegate} as a tag in {@code view}. {@link ComponentView}
* contains the logic for setting/unsetting it whenever accessibility is enabled/disabled
*
* For non {@link ComponentHost}s
* this is only done if any {@link EventHandler}s for accessibility events have been implemented,
* we want to preserve the original behaviour since {@code view} might have had
* a default delegate.
*/
private static void setAccessibilityDelegate(View view, NodeInfo nodeInfo) {
if (!(view instanceof ComponentHost) && !nodeInfo.hasAccessibilityHandlers()) {
return;
}
view.setTag(
R.id.component_node_info,
nodeInfo);
}
private static void unsetAccessibilityDelegate(View view) {
if (!(view instanceof ComponentHost)
&& view.getTag(R.id.component_node_info) == null) {
return;
}
view.setTag(R.id.component_node_info, null);
if (!(view instanceof ComponentHost)) {
ViewCompat.setAccessibilityDelegate(view, null);
}
}
/**
* Installs the click listeners that will dispatch the click handler
* defined in the component's props. Unconditionally set the clickable
* flag on the view.
*/
private static void setClickHandler(EventHandler clickHandler, View view) {
if (clickHandler == null) {
return;
}
ComponentClickListener listener = getComponentClickListener(view);
if (listener == null) {
listener = new ComponentClickListener();
setComponentClickListener(view, listener);
}
listener.setEventHandler(clickHandler);
view.setClickable(true);
}
private static void unsetClickHandler(View view) {
final ComponentClickListener listener = getComponentClickListener(view);
if (listener != null) {
listener.setEventHandler(null);
}
}
static ComponentClickListener getComponentClickListener(View v) {
if (v instanceof ComponentHost) {
return ((ComponentHost) v).getComponentClickListener();
} else {
return (ComponentClickListener) v.getTag(R.id.component_click_listener);
}
}
static void setComponentClickListener(View v, ComponentClickListener listener) {
if (v instanceof ComponentHost) {
((ComponentHost) v).setComponentClickListener(listener);
} else {
v.setOnClickListener(listener);
v.setTag(R.id.component_click_listener, listener);
}
}
/**
* Installs the long click listeners that will dispatch the click handler
* defined in the component's props. Unconditionally set the clickable
* flag on the view.
*/
private static void setLongClickHandler(EventHandler longClickHandler, View view) {
if (longClickHandler != null) {
ComponentLongClickListener listener = getComponentLongClickListener(view);
if (listener == null) {
listener = new ComponentLongClickListener();
setComponentLongClickListener(view, listener);
}
listener.setEventHandler(longClickHandler);
view.setLongClickable(true);
}
}
private static void unsetLongClickHandler(View view) {
final ComponentLongClickListener listener = getComponentLongClickListener(view);
if (listener != null) {
listener.setEventHandler(null);
}
}
static ComponentLongClickListener getComponentLongClickListener(View v) {
if (v instanceof ComponentHost) {
return ((ComponentHost) v).getComponentLongClickListener();
} else {
return (ComponentLongClickListener) v.getTag(R.id.component_long_click_listener);
}
}
static void setComponentLongClickListener(View v, ComponentLongClickListener listener) {
if (v instanceof ComponentHost) {
((ComponentHost) v).setComponentLongClickListener(listener);
} else {
v.setOnLongClickListener(listener);
v.setTag(R.id.component_long_click_listener, listener);
}
}
/**
* Installs the touch listeners that will dispatch the touch handler
* defined in the component's props.
*/
private static void setTouchHandler(EventHandler touchHandler, View view) {
if (touchHandler != null) {
ComponentTouchListener listener = getComponentTouchListener(view);
if (listener == null) {
listener = new ComponentTouchListener();
setComponentTouchListener(view, listener);
}
listener.setEventHandler(touchHandler);
}
}
private static void unsetTouchHandler(View view) {
final ComponentTouchListener listener = getComponentTouchListener(view);
if (listener != null) {
listener.setEventHandler(null);
}
}
static ComponentTouchListener getComponentTouchListener(View v) {
if (v instanceof ComponentHost) {
return ((ComponentHost) v).getComponentTouchListener();
} else {
return (ComponentTouchListener) v.getTag(R.id.component_touch_listener);
}
}
static void setComponentTouchListener(View v, ComponentTouchListener listener) {
if (v instanceof ComponentHost) {
((ComponentHost) v).setComponentTouchListener(listener);
} else {
v.setOnTouchListener(listener);
v.setTag(R.id.component_touch_listener, listener);
}
}
private static void setViewTag(View view, Object viewTag) {
if (view instanceof ComponentHost) {
final ComponentHost host = (ComponentHost) view;
host.setViewTag(viewTag);
} else {
view.setTag(viewTag);
}
}
private static void setViewTags(View view, SparseArray<Object> viewTags) {
if (viewTags == null) {
return;
}
if (view instanceof ComponentHost) {
final ComponentHost host = (ComponentHost) view;
host.setViewTags(viewTags);
} else {
for (int i = 0, size = viewTags.size(); i < size; i++) {
view.setTag(viewTags.keyAt(i), viewTags.valueAt(i));
}
}
}
private static void unsetViewTag(View view) {
if (view instanceof ComponentHost) {
final ComponentHost host = (ComponentHost) view;
host.setViewTag(null);
} else {
view.setTag(null);
}
}
private static void unsetViewTags(View view, SparseArray<Object> viewTags) {
if (view instanceof ComponentHost) {
final ComponentHost host = (ComponentHost) view;
host.setViewTags(null);
} else {
if (viewTags != null) {
for (int i = 0, size = viewTags.size(); i < size; i++) {
view.setTag(viewTags.keyAt(i), null);
}
}
}
}
private static void setContentDescription(View view, CharSequence contentDescription) {
if (TextUtils.isEmpty(contentDescription)) {
return;
}
view.setContentDescription(contentDescription);
}
private static void unsetContentDescription(View view) {
view.setContentDescription(null);
}
private static void setImportantForAccessibility(View view, int importantForAccessibility) {
if (importantForAccessibility == IMPORTANT_FOR_ACCESSIBILITY_AUTO) {
return;
}
ViewCompat.setImportantForAccessibility(view, importantForAccessibility);
}
private static void unsetImportantForAccessibility(View view) {
ViewCompat.setImportantForAccessibility(view, IMPORTANT_FOR_ACCESSIBILITY_AUTO);
}
private static void setFocusable(View view, @NodeInfo.FocusState short focusState) {
if (focusState == NodeInfo.FOCUS_SET_TRUE) {
view.setFocusable(true);
} else if (focusState == NodeInfo.FOCUS_SET_FALSE) {
view.setFocusable(false);
}
}
private static void unsetFocusable(View view, MountItem mountItem) {
view.setFocusable(MountItem.isViewFocusable(mountItem.getFlags()));
}
private static void setViewPadding(View view, ViewNodeInfo viewNodeInfo) {
if (!viewNodeInfo.hasPadding()) {
| src/main/java/com/facebook/components/MountState.java | /**
* Copyright (c) 2014-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.litho;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Deque;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.support.annotation.VisibleForTesting;
import android.support.v4.util.LongSparseArray;
import android.support.v4.view.ViewCompat;
import android.text.TextUtils;
import android.util.SparseArray;
import android.view.View;
import android.view.View.MeasureSpec;
import android.view.ViewGroup;
import com.facebook.R;
import com.facebook.litho.config.ComponentsConfiguration;
import com.facebook.litho.reference.Reference;
import static android.support.v4.view.ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO;
import static android.view.View.MeasureSpec.makeMeasureSpec;
import static com.facebook.litho.Component.isHostSpec;
import static com.facebook.litho.Component.isMountViewSpec;
import static com.facebook.litho.ComponentHostUtils.maybeInvalidateAccessibilityState;
import static com.facebook.litho.ComponentHostUtils.maybeSetDrawableState;
import static com.facebook.litho.ComponentsLogger.ACTION_SUCCESS;
import static com.facebook.litho.ComponentsLogger.EVENT_MOUNT;
import static com.facebook.litho.ComponentsLogger.EVENT_PREPARE_MOUNT;
import static com.facebook.litho.ComponentsLogger.EVENT_SHOULD_UPDATE_REFERENCE_LAYOUT_MISMATCH;
import static com.facebook.litho.ComponentsLogger.PARAM_IS_DIRTY;
import static com.facebook.litho.ComponentsLogger.PARAM_LOG_TAG;
import static com.facebook.litho.ComponentsLogger.PARAM_MOUNTED_COUNT;
import static com.facebook.litho.ComponentsLogger.PARAM_MOVED_COUNT;
import static com.facebook.litho.ComponentsLogger.PARAM_NO_OP_COUNT;
import static com.facebook.litho.ComponentsLogger.PARAM_UNCHANGED_COUNT;
import static com.facebook.litho.ComponentsLogger.PARAM_UNMOUNTED_COUNT;
import static com.facebook.litho.ComponentsLogger.PARAM_UPDATED_COUNT;
import static com.facebook.litho.ThreadUtils.assertMainThread;
/**
* Encapsulates the mounted state of a {@link Component}. Provides APIs to update state
* by recycling existing UI elements e.g. {@link Drawable}s.
*
* @see #mount(LayoutState, Rect)
* @see ComponentView
* @see LayoutState
*/
class MountState {
static final int ROOT_HOST_ID = 0;
// Holds the current list of mounted items.
// Should always be used within a draw lock.
private final LongSparseArray<MountItem> mIndexToItemMap;
// Holds a list with information about the components linked to the VisibilityOutputs that are
// stored in LayoutState. An item is inserted in this map if its corresponding component is
// visible. When the component exits the viewport, the item associated with it is removed from the
// map.
private final LongSparseArray<VisibilityItem> mVisibilityIdToItemMap;
// Holds a list of MountItems that are currently mounted which can mount incrementally.
private final LongSparseArray<MountItem> mCanMountIncrementallyMountItems;
// A map from test key to a list of one or more `TestItem`s which is only allocated
// and populated during test runs.
private final Map<String, Deque<TestItem>> mTestItemMap;
private long[] mLayoutOutputsIds;
// True if we are receiving a new LayoutState and we need to completely
// refresh the content of the HostComponent. Always set from the main thread.
private boolean mIsDirty;
// Holds the list of known component hosts during a mount pass.
private final LongSparseArray<ComponentHost> mHostsByMarker = new LongSparseArray<>();
private static final Rect sTempRect = new Rect();
private final ComponentContext mContext;
private final ComponentView mComponentView;
private final Rect mPreviousLocalVisibleRect = new Rect();
private final PrepareMountStats mPrepareMountStats = new PrepareMountStats();
private final MountStats mMountStats = new MountStats();
private TransitionManager mTransitionManager;
private int mPreviousTopsIndex;
private int mPreviousBottomsIndex;
private int mLastMountedComponentTreeId;
private final MountItem mRootHostMountItem;
public MountState(ComponentView view) {
mIndexToItemMap = new LongSparseArray<>();
mVisibilityIdToItemMap = new LongSparseArray<>();
mCanMountIncrementallyMountItems = new LongSparseArray<>();
mContext = (ComponentContext) view.getContext();
mComponentView = view;
mIsDirty = true;
mTestItemMap = ComponentsConfiguration.isEndToEndTestRun
? new HashMap<String, Deque<TestItem>>()
: null;
// The mount item representing the top-level ComponentView which
// is always automatically mounted.
mRootHostMountItem = ComponentsPools.acquireRootHostMountItem(
HostComponent.create(),
mComponentView,
mComponentView);
}
/**
* To be called whenever the components needs to start the mount process from scratch
* e.g. when the component's props or layout change or when the components
* gets attached to a host.
*/
void setDirty() {
assertMainThread();
mIsDirty = true;
mPreviousLocalVisibleRect.setEmpty();
}
boolean isDirty() {
assertMainThread();
return mIsDirty;
}
/**
* Mount the layoutState on the pre-set HostView.
* @param layoutState
* @param localVisibleRect If this variable is null, then mount everything, since incremental
* mount is not enabled.
* Otherwise mount only what the rect (in local coordinates) contains
*/
void mount(LayoutState layoutState, Rect localVisibleRect) {
assertMainThread();
ComponentsSystrace.beginSection("mount");
final ComponentTree componentTree = mComponentView.getComponent();
final ComponentsLogger logger = componentTree.getContext().getLogger();
if (logger != null) {
logger.eventStart(EVENT_MOUNT, componentTree);
}
prepareTransitionManager(layoutState);
if (mTransitionManager != null) {
if (mIsDirty) {
mTransitionManager.onNewTransitionContext(layoutState.getTransitionContext());
}
mTransitionManager.onMountStart();
recordMountedItemsWithTransitionKeys(
mTransitionManager,
mIndexToItemMap,
true /* isPreMount */);
}
if (mIsDirty) {
suppressInvalidationsOnHosts(true);
// Prepare the data structure for the new LayoutState and removes mountItems
// that are not present anymore if isUpdateMountInPlace is enabled.
prepareMount(layoutState);
}
mMountStats.reset();
final int componentTreeId = layoutState.getComponentTreeId();
final boolean isIncrementalMountEnabled = localVisibleRect != null;
if (!isIncrementalMountEnabled ||
!performIncrementalMount(layoutState, localVisibleRect)) {
for (int i = 0, size = layoutState.getMountableOutputCount(); i < size; i++) {
final LayoutOutput layoutOutput = layoutState.getMountableOutputAt(i);
final Component component = layoutOutput.getComponent();
ComponentsSystrace.beginSection(component.getSimpleName());
final MountItem currentMountItem = getItemAt(i);
final boolean isMounted = currentMountItem != null;
final boolean isMountable =
!isIncrementalMountEnabled ||
isMountedHostWithChildContent(currentMountItem) ||
Rect.intersects(localVisibleRect, layoutOutput.getBounds());
if (isMountable && !isMounted) {
mountLayoutOutput(i, layoutOutput, layoutState);
} else if (!isMountable && isMounted) {
unmountItem(mContext, i, mHostsByMarker);
} else if (isMounted) {
if (isIncrementalMountEnabled && canMountIncrementally(component)) {
mountItemIncrementally(currentMountItem, layoutOutput.getBounds(), localVisibleRect);
}
if (mIsDirty) {
final boolean useUpdateValueFromLayoutOutput =
(componentTreeId >= 0) && (componentTreeId == mLastMountedComponentTreeId);
final boolean itemUpdated = updateMountItemIfNeeded(
layoutOutput,
currentMountItem,
useUpdateValueFromLayoutOutput,
logger);
if (itemUpdated) {
mMountStats.updatedCount++;
} else {
mMountStats.noOpCount++;
}
}
}
ComponentsSystrace.endSection();
}
if (isIncrementalMountEnabled) {
setupPreviousMountableOutputData(layoutState, localVisibleRect);
}
}
mIsDirty = false;
if (localVisibleRect != null) {
mPreviousLocalVisibleRect.set(localVisibleRect);
}
processVisibilityOutputs(layoutState, localVisibleRect);
if (mTransitionManager != null) {
recordMountedItemsWithTransitionKeys(
mTransitionManager,
mIndexToItemMap,
false /* isPreMount */);
mTransitionManager.processTransitions();
}
processTestOutputs(layoutState);
suppressInvalidationsOnHosts(false);
mLastMountedComponentTreeId = componentTreeId;
if (logger != null) {
final String logTag = componentTree.getContext().getLogTag();
logMountEnd(logger, logTag, componentTree, mMountStats);
}
ComponentsSystrace.endSection();
}
private void processVisibilityOutputs(LayoutState layoutState, Rect localVisibleRect) {
if (localVisibleRect == null) {
return;
}
for (int j = 0, size = layoutState.getVisibilityOutputCount(); j < size; j++) {
final VisibilityOutput visibilityOutput = layoutState.getVisibilityOutputAt(j);
final EventHandler visibleHandler = visibilityOutput.getVisibleEventHandler();
final EventHandler focusedHandler = visibilityOutput.getFocusedEventHandler();
final EventHandler fullImpressionHandler = visibilityOutput.getFullImpressionEventHandler();
final EventHandler invisibleHandler = visibilityOutput.getInvisibleEventHandler();
final long visibilityOutputId = visibilityOutput.getId();
final Rect visibilityOutputBounds = visibilityOutput.getBounds();
sTempRect.set(visibilityOutputBounds);
final boolean isCurrentlyVisible = sTempRect.intersect(localVisibleRect);
VisibilityItem visibilityItem = mVisibilityIdToItemMap.get(visibilityOutputId);
if (isCurrentlyVisible) {
// The component is visible now, but used to be outside the viewport.
if (visibilityItem == null) {
visibilityItem = ComponentsPools.acquireVisibilityItem(invisibleHandler);
mVisibilityIdToItemMap.put(visibilityOutputId, visibilityItem);
if (visibleHandler != null) {
EventDispatcherUtils.dispatchOnVisible(visibleHandler);
}
}
// Check if the component has entered the focused range.
if (focusedHandler != null && !visibilityItem.isInFocusedRange()) {
final View parent = (View) mComponentView.getParent();
if (hasEnteredFocusedRange(
parent.getWidth(),
parent.getHeight(),
visibilityOutputBounds,
sTempRect)) {
visibilityItem.setIsInFocusedRange();
EventDispatcherUtils.dispatchOnFocused(focusedHandler);
}
}
// If the component has not entered the full impression range yet, make sure to update the
// information about the visible edges.
if (fullImpressionHandler != null && !visibilityItem.isInFullImpressionRange()) {
visibilityItem.setVisibleEdges(visibilityOutputBounds, sTempRect);
if (visibilityItem.isInFullImpressionRange()) {
EventDispatcherUtils.dispatchOnFullImpression(fullImpressionHandler);
}
}
} else if (visibilityItem != null) {
// The component is invisible now, but used to be visible.
if (invisibleHandler != null) {
EventDispatcherUtils.dispatchOnInvisible(invisibleHandler);
}
mVisibilityIdToItemMap.remove(visibilityOutputId);
ComponentsPools.release(visibilityItem);
}
}
}
/**
* Clears and re-populates the test item map if we are in e2e test mode.
*/
private void processTestOutputs(LayoutState layoutState) {
if (mTestItemMap == null) {
return;
}
for (Collection<TestItem> items : mTestItemMap.values()) {
for (TestItem item : items) {
ComponentsPools.release(item);
}
}
mTestItemMap.clear();
for (int i = 0, size = layoutState.getTestOutputCount(); i < size; i++) {
final TestOutput testOutput = layoutState.getTestOutputAt(i);
final long hostMarker = testOutput.getHostMarker();
final long layoutOutputId = testOutput.getLayoutOutputId();
final MountItem mountItem =
layoutOutputId == -1 ? null : mIndexToItemMap.get(layoutOutputId);
final TestItem testItem = ComponentsPools.acquireTestItem();
testItem.setHost(hostMarker == -1 ? null : mHostsByMarker.get(hostMarker));
testItem.setBounds(testOutput.getBounds());
testItem.setTestKey(testOutput.getTestKey());
testItem.setContent(mountItem == null ? null : mountItem.getContent());
final Deque<TestItem> items = mTestItemMap.get(testOutput.getTestKey());
final Deque<TestItem> updatedItems =
items == null ? new LinkedList<TestItem>() : items;
updatedItems.add(testItem);
mTestItemMap.put(testOutput.getTestKey(), updatedItems);
}
}
private boolean isMountedHostWithChildContent(MountItem mountItem) {
if (mountItem == null) {
return false;
}
final Object content = mountItem.getContent();
if (!(content instanceof ComponentHost)) {
return false;
}
final ComponentHost host = (ComponentHost) content;
return host.getMountItemCount() > 0;
}
private void setupPreviousMountableOutputData(LayoutState layoutState, Rect localVisibleRect) {
if (localVisibleRect.isEmpty()) {
return;
}
final ArrayList<LayoutOutput> layoutOutputTops = layoutState.getMountableOutputTops();
final ArrayList<LayoutOutput> layoutOutputBottoms = layoutState.getMountableOutputBottoms();
final int mountableOutputCount = layoutState.getMountableOutputCount();
mPreviousTopsIndex = layoutState.getMountableOutputCount();
for (int i = 0; i < mountableOutputCount; i++) {
if (localVisibleRect.bottom <= layoutOutputTops.get(i).getBounds().top) {
mPreviousTopsIndex = i;
break;
}
}
mPreviousBottomsIndex = layoutState.getMountableOutputCount();
for (int i = 0; i < mountableOutputCount; i++) {
if (localVisibleRect.top < layoutOutputBottoms.get(i).getBounds().bottom) {
mPreviousBottomsIndex = i;
break;
}
}
}
private void clearVisibilityItems() {
for (int i = mVisibilityIdToItemMap.size() - 1; i >= 0; i--) {
final VisibilityItem visibilityItem = mVisibilityIdToItemMap.valueAt(i);
final EventHandler invisibleHandler = visibilityItem.getInvisibleHandler();
if (invisibleHandler != null) {
EventDispatcherUtils.dispatchOnInvisible(invisibleHandler);
}
mVisibilityIdToItemMap.removeAt(i);
ComponentsPools.release(visibilityItem);
}
}
private void registerHost(long id, ComponentHost host) {
host.suppressInvalidations(true);
mHostsByMarker.put(id, host);
}
/**
* Returns true if the component has entered the focused visible range.
*/
static boolean hasEnteredFocusedRange(
int viewportWidth,
int viewportHeight,
Rect componentBounds,
Rect componentVisibleBounds) {
final int halfViewportArea = viewportWidth * viewportHeight / 2;
final int totalComponentArea = computeRectArea(componentBounds);
final int visibleComponentArea = computeRectArea(componentVisibleBounds);
// The component has entered the focused range either if it is larger than half of the viewport
// and it occupies at least half of the viewport or if it is smaller than half of the viewport
// and it is fully visible.
return (totalComponentArea >= halfViewportArea)
? (visibleComponentArea >= halfViewportArea)
: componentBounds.equals(componentVisibleBounds);
}
private static int computeRectArea(Rect rect) {
return rect.isEmpty() ? 0 : (rect.width() * rect.height());
}
private void suppressInvalidationsOnHosts(boolean suppressInvalidations) {
for (int i = mHostsByMarker.size() - 1; i >= 0; i--) {
mHostsByMarker.valueAt(i).suppressInvalidations(suppressInvalidations);
}
}
private boolean updateMountItemIfNeeded(
LayoutOutput layoutOutput,
MountItem currentMountItem,
boolean useUpdateValueFromLayoutOutput,
ComponentsLogger logger) {
final Component layoutOutputComponent = layoutOutput.getComponent();
final Component itemComponent = currentMountItem.getComponent();
// 1. Check if the mount item generated from the old component should be updated.
final boolean shouldUpdate = shouldUpdateMountItem(
layoutOutput,
currentMountItem,
useUpdateValueFromLayoutOutput,
mIndexToItemMap,
mLayoutOutputsIds,
logger);
// 2. Reset all the properties like click handler, content description and tags related to
// this item if it needs to be updated. the update mount item will re-set the new ones.
if (shouldUpdate) {
unsetViewAttributes(currentMountItem);
}
// 3. We will re-bind this later in 7 regardless so let's make sure it's currently unbound.
if (currentMountItem.isBound()) {
itemComponent.getLifecycle().onUnbind(
itemComponent.getScopedContext(),
currentMountItem.getContent(),
itemComponent);
currentMountItem.setIsBound(false);
}
// 4. Re initialize the MountItem internal state with the new attributes from LayoutOutput
currentMountItem.init(layoutOutput.getComponent(), currentMountItem, layoutOutput);
// 5. If the mount item is not valid for this component update its content and view attributes.
if (shouldUpdate) {
updateMountedContent(currentMountItem, layoutOutput, itemComponent);
setViewAttributes(currentMountItem);
}
final Object currentContent = currentMountItem.getContent();
// 6. Set the mounted content on the Component and call the bind callback.
layoutOutputComponent.getLifecycle().bind(
layoutOutputComponent.getScopedContext(),
currentContent,
layoutOutputComponent);
currentMountItem.setIsBound(true);
// 7. Update the bounds of the mounted content. This needs to be done regardless of whether
// the component has been updated or not since the mounted item might might have the same
// size and content but a different position.
updateBoundsForMountedLayoutOutput(layoutOutput, currentMountItem);
maybeInvalidateAccessibilityState(currentMountItem);
if (currentMountItem.getContent() instanceof Drawable) {
maybeSetDrawableState(
currentMountItem.getHost(),
(Drawable) currentMountItem.getContent(),
currentMountItem.getFlags(),
currentMountItem.getNodeInfo());
}
if (currentMountItem.getDisplayListDrawable() != null) {
currentMountItem.getDisplayListDrawable().suppressInvalidations(false);
}
return shouldUpdate;
}
private static boolean shouldUpdateMountItem(
LayoutOutput layoutOutput,
MountItem currentMountItem,
boolean useUpdateValueFromLayoutOutput,
LongSparseArray<MountItem> indexToItemMap,
long[] layoutOutputsIds,
ComponentsLogger logger) {
final @LayoutOutput.UpdateState int updateState = layoutOutput.getUpdateState();
final Component currentComponent = currentMountItem.getComponent();
final ComponentLifecycle currentLifecycle = currentComponent.getLifecycle();
final Component nextComponent = layoutOutput.getComponent();
final ComponentLifecycle nextLifecycle = nextComponent.getLifecycle();
// If the two components have different sizes and the mounted content depends on the size we
// just return true immediately.
if (!sameSize(layoutOutput, currentMountItem) && nextLifecycle.isMountSizeDependent()) {
return true;
}
if (useUpdateValueFromLayoutOutput) {
if (updateState == LayoutOutput.STATE_UPDATED) {
// Check for incompatible ReferenceLifecycle.
if (currentLifecycle instanceof DrawableComponent
&& nextLifecycle instanceof DrawableComponent
&& currentLifecycle.shouldComponentUpdate(currentComponent, nextComponent)) {
if (logger != null) {
ComponentsLogger.LayoutOutputLog logObj = new ComponentsLogger.LayoutOutputLog();
logObj.currentId = indexToItemMap.keyAt(
indexToItemMap.indexOfValue(currentMountItem));
logObj.currentLifecycle = currentLifecycle.toString();
logObj.nextId = layoutOutput.getId();
logObj.nextLifecycle = nextLifecycle.toString();
for (int i = 0; i < layoutOutputsIds.length; i++) {
if (layoutOutputsIds[i] == logObj.currentId) {
if (logObj.currentIndex == -1) {
logObj.currentIndex = i;
}
logObj.currentLastDuplicatedIdIndex = i;
}
}
if (logObj.nextId == logObj.currentId) {
logObj.nextIndex = logObj.currentIndex;
logObj.nextLastDuplicatedIdIndex = logObj.currentLastDuplicatedIdIndex;
} else {
for (int i = 0; i < layoutOutputsIds.length; i++) {
if (layoutOutputsIds[i] == logObj.nextId) {
if (logObj.nextIndex == -1) {
logObj.nextIndex = i;
}
logObj.nextLastDuplicatedIdIndex = i;
}
}
}
logger.eventStart(EVENT_SHOULD_UPDATE_REFERENCE_LAYOUT_MISMATCH, logObj);
logger
.eventEnd(EVENT_SHOULD_UPDATE_REFERENCE_LAYOUT_MISMATCH, logObj, ACTION_SUCCESS);
}
return true;
}
return false;
} else if (updateState == LayoutOutput.STATE_DIRTY) {
return true;
}
}
if (!currentLifecycle.callsShouldUpdateOnMount()) {
return true;
}
return currentLifecycle.shouldComponentUpdate(
currentComponent,
nextComponent);
}
private static boolean sameSize(LayoutOutput layoutOutput, MountItem item) {
final Rect layoutOutputBounds = layoutOutput.getBounds();
final Object mountedContent = item.getContent();
return layoutOutputBounds.width() == getWidthForMountedContent(mountedContent) &&
layoutOutputBounds.height() == getHeightForMountedContent(mountedContent);
}
private static int getWidthForMountedContent(Object content) {
return content instanceof Drawable ?
((Drawable) content).getBounds().width() :
((View) content).getWidth();
}
private static int getHeightForMountedContent(Object content) {
return content instanceof Drawable ?
((Drawable) content).getBounds().height() :
((View) content).getHeight();
}
private void updateBoundsForMountedLayoutOutput(LayoutOutput layoutOutput, MountItem item) {
// MountState should never update the bounds of the top-level host as this
// should be done by the ViewGroup containing the ComponentView.
if (layoutOutput.getId() == ROOT_HOST_ID) {
return;
}
layoutOutput.getMountBounds(sTempRect);
final boolean forceTraversal = Component.isMountViewSpec(layoutOutput.getComponent())
&& ((View) item.getContent()).isLayoutRequested();
applyBoundsToMountContent(
item.getContent(),
sTempRect.left,
sTempRect.top,
sTempRect.right,
sTempRect.bottom,
forceTraversal /* force */);
}
/**
* Prepare the {@link MountState} to mount a new {@link LayoutState}.
*/
@SuppressWarnings("unchecked")
private void prepareMount(LayoutState layoutState) {
final ComponentTree component = mComponentView.getComponent();
final ComponentsLogger logger = component.getContext().getLogger();
final String logTag = component.getContext().getLogTag();
if (logger != null) {
logger.eventStart(EVENT_PREPARE_MOUNT, component);
}
PrepareMountStats stats = unmountOrMoveOldItems(layoutState);
if (logger != null) {
logPrepareMountParams(logger, logTag, component, stats);
}
if (mHostsByMarker.get(ROOT_HOST_ID) == null) {
// Mounting always starts with the root host.
registerHost(ROOT_HOST_ID, mComponentView);
// Root host is implicitly marked as mounted.
mIndexToItemMap.put(ROOT_HOST_ID, mRootHostMountItem);
}
int outputCount = layoutState.getMountableOutputCount();
if (mLayoutOutputsIds == null || outputCount != mLayoutOutputsIds.length) {
mLayoutOutputsIds = new long[layoutState.getMountableOutputCount()];
}
for (int i = 0; i < outputCount; i++) {
mLayoutOutputsIds[i] = layoutState.getMountableOutputAt(i).getId();
}
if (logger != null) {
logger.eventEnd(EVENT_PREPARE_MOUNT, component, ACTION_SUCCESS);
}
}
/**
* Determine whether to apply disappear animation to the given {@link MountItem}
*/
private static boolean isItemDisappearing(
MountItem mountItem,
TransitionContext transitionContext) {
if (mountItem == null
|| mountItem.getViewNodeInfo() == null
|| transitionContext == null) {
return false;
}
return transitionContext.isDisappearingKey(mountItem.getViewNodeInfo().getTransitionKey());
}
/**
* Go over all the mounted items from the leaves to the root and unmount only the items that are
* not present in the new LayoutOutputs.
* If an item is still present but in a new position move the item inside its host.
* The condition where an item changed host doesn't need any special treatment here since we
* mark them as removed and re-added when calculating the new LayoutOutputs
*/
private PrepareMountStats unmountOrMoveOldItems(LayoutState newLayoutState) {
mPrepareMountStats.reset();
if (mLayoutOutputsIds == null) {
return mPrepareMountStats;
}
// Traversing from the beginning since mLayoutOutputsIds unmounting won't remove entries there
// but only from mIndexToItemMap. If an host changes we're going to unmount it and recursively
// all its mounted children.
for (int i = 0; i < mLayoutOutputsIds.length; i++) {
final int newPosition = newLayoutState.getLayoutOutputPositionForId(mLayoutOutputsIds[i]);
final MountItem oldItem = getItemAt(i);
if (isItemDisappearing(oldItem, newLayoutState.getTransitionContext())) {
startUnmountDisappearingItem(i, oldItem.getViewNodeInfo().getTransitionKey());
final int lastDescendantOfItem = findLastDescendantOfItem(i, oldItem);
// Disassociate disappearing items from current mounted items. The layout tree will not
// contain disappearing items anymore, however they are kept separately in their hosts.
removeDisappearingItemMappings(i, lastDescendantOfItem);
// Skip this disappearing item and all its descendants. Do not unmount or move them yet.
// We will unmount them after animation is completed.
i = lastDescendantOfItem;
continue;
}
if (newPosition == -1) {
unmountItem(mContext, i, mHostsByMarker);
mPrepareMountStats.unmountedCount++;
} else {
final long newHostMarker = newLayoutState.getMountableOutputAt(newPosition).getHostMarker();
if (oldItem == null) {
// This was previously unmounted.
mPrepareMountStats.unmountedCount++;
} else if (oldItem.getHost() != mHostsByMarker.get(newHostMarker)) {
// If the id is the same but the parent host is different we simply unmount the item and
// re-mount it later. If the item to unmount is a ComponentHost, all the children will be
// recursively unmounted.
unmountItem(mContext, i, mHostsByMarker);
mPrepareMountStats.unmountedCount++;
} else if (newPosition != i) {
// If a MountItem for this id exists and the hostMarker has not changed but its position
// in the outputs array has changed we need to update the position in the Host to ensure
// the z-ordering.
oldItem.getHost().moveItem(oldItem, i, newPosition);
mPrepareMountStats.movedCount++;
} else {
mPrepareMountStats.unchangedCount++;
}
}
}
return mPrepareMountStats;
}
private void removeDisappearingItemMappings(int fromIndex, int toIndex) {
for (int i = fromIndex; i <= toIndex; i++) {
final MountItem item = getItemAt(i);
// We do not need this mapping for disappearing items.
mIndexToItemMap.remove(mLayoutOutputsIds[i]);
// Likewise we no longer need host mapping for disappearing items.
if (isHostSpec(item.getComponent())) {
mHostsByMarker
.removeAt(mHostsByMarker.indexOfValue((ComponentHost) item.getContent()));
}
}
}
/**
* Find the index of last descendant of given {@link MountItem}
*/
private int findLastDescendantOfItem(int disappearingItemIndex, MountItem item) {
for (int i = disappearingItemIndex + 1; i < mLayoutOutputsIds.length; i++) {
if (!ComponentHostUtils.hasAncestorHost(
getItemAt(i).getHost(),
(ComponentHost) item.getContent())) {
// No need to go further as the items that have common ancestor hosts are co-located.
// This is the first non-descendant of given MountItem, therefore last descendant is the
// item before.
return i - 1;
}
}
return mLayoutOutputsIds.length - 1;
}
private void updateMountedContent(
MountItem item,
LayoutOutput layoutOutput,
Component previousComponent) {
final Component<?> component = layoutOutput.getComponent();
if (isHostSpec(component)) {
return;
}
final Object previousContent = item.getContent();
final ComponentLifecycle lifecycle = component.getLifecycle();
// Call unmount and mount in sequence to make sure all the the resources are correctly
// de-allocated. It's possible for previousContent to equal null - when the root is
// interactive we create a LayoutOutput without content in order to set up click handling.
lifecycle.unmount(previousComponent.getScopedContext(), previousContent, previousComponent);
lifecycle.mount(component.getScopedContext(), previousContent, component);
}
private void mountLayoutOutput(int index, LayoutOutput layoutOutput, LayoutState layoutState) {
// 1. Resolve the correct host to mount our content to.
ComponentHost host = resolveComponentHost(layoutOutput, mHostsByMarker);
if (host == null) {
// Host has not yet been mounted - mount it now.
for (int hostMountIndex = 0, size = mLayoutOutputsIds.length;
hostMountIndex < size;
hostMountIndex++) {
if (mLayoutOutputsIds[hostMountIndex] == layoutOutput.getHostMarker()) {
final LayoutOutput hostLayoutOutput = layoutState.getMountableOutputAt(hostMountIndex);
mountLayoutOutput(hostMountIndex, hostLayoutOutput, layoutState);
break;
}
}
host = resolveComponentHost(layoutOutput, mHostsByMarker);
}
final Component<?> component = layoutOutput.getComponent();
final ComponentLifecycle lifecycle = component.getLifecycle();
// 2. Generate the component's mount state (this might also be a ComponentHost View).
Object content = acquireMountContent(component, host);
if (content == null) {
content = lifecycle.createMountContent(mContext);
}
lifecycle.mount(
component.getScopedContext(),
content,
component);
// 3. If it's a ComponentHost, add the mounted View to the list of Hosts.
if (isHostSpec(component)) {
ComponentHost componentHost = (ComponentHost) content;
componentHost.setParentHostMarker(layoutOutput.getHostMarker());
registerHost(layoutOutput.getId(), componentHost);
}
// 4. Mount the content into the selected host.
final MountItem item = mountContent(index, component, content, host, layoutOutput);
// 5. Notify the component that mounting has completed
lifecycle.bind(component.getScopedContext(), content, component);
item.setIsBound(true);
// 6. Apply the bounds to the Mount content now. It's important to do so after bind as calling
// bind might have triggered a layout request within a View.
layoutOutput.getMountBounds(sTempRect);
applyBoundsToMountContent(
content,
sTempRect.left,
sTempRect.top,
sTempRect.right,
sTempRect.bottom,
true /* force */);
if (item.getDisplayListDrawable() != null) {
item.getDisplayListDrawable().suppressInvalidations(false);
}
// 6. Update the mount stats
mMountStats.mountedCount++;
}
// The content might be null because it's the LayoutSpec for the root host
// (the very first LayoutOutput).
private MountItem mountContent(
int index,
Component<?> component,
Object content,
ComponentHost host,
LayoutOutput layoutOutput) {
final MountItem item = ComponentsPools.acquireMountItem(
component,
host,
content,
layoutOutput);
// Create and keep a MountItem even for the layoutSpec with null content
// that sets the root host interactions.
mIndexToItemMap.put(mLayoutOutputsIds[index], item);
if (component.getLifecycle().canMountIncrementally()) {
mCanMountIncrementallyMountItems.put(index, item);
}
layoutOutput.getMountBounds(sTempRect);
host.mount(index, item, sTempRect);
setViewAttributes(item);
return item;
}
private Object acquireMountContent(Component<?> component, ComponentHost host) {
final ComponentLifecycle lifecycle = component.getLifecycle();
if (isHostSpec(component)) {
return host.recycleHost();
}
return ComponentsPools.acquireMountContent(mContext, lifecycle.getId());
}
private static void applyBoundsToMountContent(
Object content,
int left,
int top,
int right,
int bottom,
boolean force) {
assertMainThread();
if (content instanceof View) {
View view = (View) content;
int width = right - left;
int height = bottom - top;
if (force || view.getMeasuredHeight() != height || view.getMeasuredWidth() != width) {
view.measure(
makeMeasureSpec(right - left, MeasureSpec.EXACTLY),
makeMeasureSpec(bottom - top, MeasureSpec.EXACTLY));
}
if (force ||
view.getLeft() != left ||
view.getTop() != top ||
view.getRight() != right ||
view.getBottom() != bottom) {
view.layout(left, top, right, bottom);
}
} else if (content instanceof Drawable) {
((Drawable) content).setBounds(left, top, right, bottom);
} else {
throw new IllegalStateException("Unsupported mounted content " + content);
}
}
private static boolean canMountIncrementally(Component<?> component) {
return component.getLifecycle().canMountIncrementally();
}
/**
* Resolves the component host that will be used for the given layout output
* being mounted.
*/
private static ComponentHost resolveComponentHost(
LayoutOutput layoutOutput,
LongSparseArray<ComponentHost> hostsByMarker) {
final long hostMarker = layoutOutput.getHostMarker();
return hostsByMarker.get(hostMarker);
}
private static void setViewAttributes(MountItem item) {
final Component<?> component = item.getComponent();
if (!isMountViewSpec(component)) {
return;
}
final View view = (View) item.getContent();
final NodeInfo nodeInfo = item.getNodeInfo();
if (nodeInfo != null) {
// 1. Setup click handler for the component, if applicable.
setClickHandler(nodeInfo.getClickHandler(), view);
// 2. Setup long click handler for the component, if applicable.
setLongClickHandler(nodeInfo.getLongClickHandler(), view);
// 3. Setup click handler for the component, if applicable.
setTouchHandler(nodeInfo.getTouchHandler(), view);
// 4. Set listeners for AccessibilityDelegate methods
setAccessibilityDelegate(view, nodeInfo);
// 5. Setup view tags for the component, if applicable.
setViewTag(view, nodeInfo.getViewTag());
setViewTags(view, nodeInfo.getViewTags());
// 6. Set content description.
setContentDescription(view, nodeInfo.getContentDescription());
// 7. Set setFocusable flag.
setFocusable(view, nodeInfo.getFocusState());
}
// 8. Set important for accessibility flag
setImportantForAccessibility(view, item.getImportantForAccessibility());
final ViewNodeInfo viewNodeInfo = item.getViewNodeInfo();
if (viewNodeInfo != null && !isHostSpec(component)) {
// 9. Set view background, if applicable. Do this before padding
// as it otherwise overrides the padding.
setViewBackground(view, viewNodeInfo);
// 10. Set view padding, if applicable.
setViewPadding(view, viewNodeInfo);
// 11. Set view foreground, if applicable.
setViewForeground(view, viewNodeInfo);
// 12. Set view layout direction, if applicable.
setViewLayoutDirection(view, viewNodeInfo);
}
}
private static void unsetViewAttributes(MountItem item) {
final Component<?> component = item.getComponent();
if (!isMountViewSpec(component)) {
return;
}
final View view = (View) item.getContent();
final NodeInfo nodeInfo = item.getNodeInfo();
if (nodeInfo != null) {
// Reset the click handler.
if (nodeInfo.getClickHandler() != null) {
unsetClickHandler(view);
}
// Reset the click handler.
if (nodeInfo.getLongClickHandler() != null) {
unsetLongClickHandler(view);
}
// Reset the touch handler.
if (nodeInfo.getTouchHandler() != null) {
unsetTouchHandler(view);
}
// Reset the view tags.
unsetViewTag(view);
unsetViewTags(view, nodeInfo.getViewTags());
// Reset content description.
if (!TextUtils.isEmpty(nodeInfo.getContentDescription())) {
unsetContentDescription(view);
}
}
// Reset isClickable flag.
view.setClickable(MountItem.isViewClickable(item.getFlags()));
// Reset isLongClickable flag.
view.setLongClickable(MountItem.isViewLongClickable(item.getFlags()));
// Reset setFocusable flag.
unsetFocusable(view, item);
if (item.getImportantForAccessibility() != IMPORTANT_FOR_ACCESSIBILITY_AUTO) {
unsetImportantForAccessibility(view);
}
unsetAccessibilityDelegate(view);
final ViewNodeInfo viewNodeInfo = item.getViewNodeInfo();
if (viewNodeInfo != null && !isHostSpec(component)) {
unsetViewPadding(view, viewNodeInfo);
unsetViewBackground(view, viewNodeInfo);
unsetViewForeground(view, viewNodeInfo);
unsetViewLayoutDirection(view, viewNodeInfo);
}
}
/**
* Store a {@link ComponentAccessibilityDelegate} as a tag in {@code view}. {@link ComponentView}
* contains the logic for setting/unsetting it whenever accessibility is enabled/disabled
*
* For non {@link ComponentHost}s
* this is only done if any {@link EventHandler}s for accessibility events have been implemented,
* we want to preserve the original behaviour since {@code view} might have had
* a default delegate.
*/
private static void setAccessibilityDelegate(View view, NodeInfo nodeInfo) {
if (!(view instanceof ComponentHost) && !nodeInfo.hasAccessibilityHandlers()) {
return;
}
view.setTag(
R.id.component_node_info,
nodeInfo);
}
private static void unsetAccessibilityDelegate(View view) {
if (!(view instanceof ComponentHost)
&& view.getTag(R.id.component_node_info) == null) {
return;
}
view.setTag(R.id.component_node_info, null);
if (!(view instanceof ComponentHost)) {
ViewCompat.setAccessibilityDelegate(view, null);
}
}
/**
* Installs the click listeners that will dispatch the click handler
* defined in the component's props. Unconditionally set the clickable
* flag on the view.
*/
private static void setClickHandler(EventHandler clickHandler, View view) {
if (clickHandler == null) {
return;
}
ComponentClickListener listener = getComponentClickListener(view);
if (listener == null) {
listener = new ComponentClickListener();
setComponentClickListener(view, listener);
}
listener.setEventHandler(clickHandler);
view.setClickable(true);
}
private static void unsetClickHandler(View view) {
final ComponentClickListener listener = getComponentClickListener(view);
if (listener != null) {
listener.setEventHandler(null);
}
}
static ComponentClickListener getComponentClickListener(View v) {
if (v instanceof ComponentHost) {
return ((ComponentHost) v).getComponentClickListener();
} else {
return (ComponentClickListener) v.getTag(R.id.component_click_listener);
}
}
static void setComponentClickListener(View v, ComponentClickListener listener) {
if (v instanceof ComponentHost) {
((ComponentHost) v).setComponentClickListener(listener);
} else {
v.setOnClickListener(listener);
v.setTag(R.id.component_click_listener, listener);
}
}
/**
* Installs the long click listeners that will dispatch the click handler
* defined in the component's props. Unconditionally set the clickable
* flag on the view.
*/
private static void setLongClickHandler(EventHandler longClickHandler, View view) {
if (longClickHandler != null) {
ComponentLongClickListener listener = getComponentLongClickListener(view);
if (listener == null) {
listener = new ComponentLongClickListener();
setComponentLongClickListener(view, listener);
}
listener.setEventHandler(longClickHandler);
view.setLongClickable(true);
}
}
private static void unsetLongClickHandler(View view) {
final ComponentLongClickListener listener = getComponentLongClickListener(view);
if (listener != null) {
listener.setEventHandler(null);
}
}
static ComponentLongClickListener getComponentLongClickListener(View v) {
if (v instanceof ComponentHost) {
return ((ComponentHost) v).getComponentLongClickListener();
} else {
return (ComponentLongClickListener) v.getTag(R.id.component_long_click_listener);
}
}
static void setComponentLongClickListener(View v, ComponentLongClickListener listener) {
if (v instanceof ComponentHost) {
((ComponentHost) v).setComponentLongClickListener(listener);
} else {
v.setOnLongClickListener(listener);
v.setTag(R.id.component_long_click_listener, listener);
}
}
/**
* Installs the touch listeners that will dispatch the touch handler
* defined in the component's props.
*/
private static void setTouchHandler(EventHandler touchHandler, View view) {
if (touchHandler != null) {
ComponentTouchListener listener = getComponentTouchListener(view);
if (listener == null) {
listener = new ComponentTouchListener();
setComponentTouchListener(view, listener);
}
listener.setEventHandler(touchHandler);
}
}
private static void unsetTouchHandler(View view) {
final ComponentTouchListener listener = getComponentTouchListener(view);
if (listener != null) {
listener.setEventHandler(null);
}
}
static ComponentTouchListener getComponentTouchListener(View v) {
if (v instanceof ComponentHost) {
return ((ComponentHost) v).getComponentTouchListener();
} else {
return (ComponentTouchListener) v.getTag(R.id.component_touch_listener);
}
}
static void setComponentTouchListener(View v, ComponentTouchListener listener) {
if (v instanceof ComponentHost) {
((ComponentHost) v).setComponentTouchListener(listener);
} else {
v.setOnTouchListener(listener);
v.setTag(R.id.component_touch_listener, listener);
}
}
private static void setViewTag(View view, Object viewTag) {
if (view instanceof ComponentHost) {
final ComponentHost host = (ComponentHost) view;
host.setViewTag(viewTag);
} else {
view.setTag(viewTag);
}
}
private static void setViewTags(View view, SparseArray<Object> viewTags) {
if (viewTags == null) {
return;
}
if (view instanceof ComponentHost) {
final ComponentHost host = (ComponentHost) view;
host.setViewTags(viewTags);
} else {
for (int i = 0, size = viewTags.size(); i < size; i++) {
view.setTag(viewTags.keyAt(i), viewTags.valueAt(i));
}
}
}
private static void unsetViewTag(View view) {
if (view instanceof ComponentHost) {
final ComponentHost host = (ComponentHost) view;
host.setViewTag(null);
} else {
view.setTag(null);
}
}
private static void unsetViewTags(View view, SparseArray<Object> viewTags) {
if (view instanceof ComponentHost) {
final ComponentHost host = (ComponentHost) view;
host.setViewTags(null);
} else {
if (viewTags != null) {
for (int i = 0, size = viewTags.size(); i < size; i++) {
view.setTag(viewTags.keyAt(i), null);
}
}
}
}
private static void setContentDescription(View view, CharSequence contentDescription) {
if (TextUtils.isEmpty(contentDescription)) {
return;
}
view.setContentDescription(contentDescription);
}
private static void unsetContentDescription(View view) {
view.setContentDescription(null);
}
private static void setImportantForAccessibility(View view, int importantForAccessibility) {
if (importantForAccessibility == IMPORTANT_FOR_ACCESSIBILITY_AUTO) {
return;
}
ViewCompat.setImportantForAccessibility(view, importantForAccessibility);
}
private static void unsetImportantForAccessibility(View view) {
ViewCompat.setImportantForAccessibility(view, IMPORTANT_FOR_ACCESSIBILITY_AUTO);
}
private static void setFocusable(View view, @NodeInfo.FocusState short focusState) {
if (focusState == NodeInfo.FOCUS_SET_TRUE) {
view.setFocusable(true);
} else if (focusState == NodeInfo.FOCUS_SET_FALSE) {
view.setFocusable(false);
}
}
private static void unsetFocusable(View view, MountItem mountItem) {
view.setFocusable(MountItem.isViewFocusable(mountItem.getFlags()));
}
| Lines authored by marcocova
This commit forms part of the blame-preserving initial commit suite.
| src/main/java/com/facebook/components/MountState.java | Lines authored by marcocova |
|
Java | apache-2.0 | 7aaf327496a04d15a8257b2fc554279792b46fa1 | 0 | Skarlso/gocd,GaneshSPatil/gocd,Skarlso/gocd,Skarlso/gocd,GaneshSPatil/gocd,ketan/gocd,GaneshSPatil/gocd,gocd/gocd,ketan/gocd,gocd/gocd,gocd/gocd,GaneshSPatil/gocd,ketan/gocd,ketan/gocd,gocd/gocd,GaneshSPatil/gocd,gocd/gocd,Skarlso/gocd,GaneshSPatil/gocd,ketan/gocd,ketan/gocd,Skarlso/gocd,gocd/gocd,Skarlso/gocd | /*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.rackhack;
import com.thoughtworks.go.server.util.ServletHelper;
import com.thoughtworks.go.util.ReflectionUtil;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.mock.web.MockServletConfig;
import org.springframework.mock.web.MockServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import static org.hamcrest.Matchers.isA;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class DelegatingServletTest {
private com.thoughtworks.go.server.util.ServletRequest servletRequestWrapper;
private HttpServletRequest httpServletRequest;
@BeforeEach
public void setUp() throws Exception {
ServletHelper servletHelper = mock(ServletHelper.class);
ReflectionUtil.setStaticField(ServletHelper.class, "instance", servletHelper);
servletRequestWrapper = mock(com.thoughtworks.go.server.util.ServletRequest.class);
httpServletRequest = mock(HttpServletRequest.class);
when(httpServletRequest.getRequestURI()).thenReturn("/go/rails/stuff/action");
when(servletHelper.getRequest(httpServletRequest)).thenReturn(servletRequestWrapper);
}
@Test
public void shouldDelegateToTheGivenServlet() throws IOException, ServletException {
MockServletContext ctx = new MockServletContext();
ctx.addInitParameter(DelegatingListener.DELEGATE_SERVLET, DummyServlet.class.getCanonicalName());
ServletContextEvent evt = new ServletContextEvent(ctx);
DelegatingListener listener = new DelegatingListener();
listener.contextInitialized(evt);
assertThat((DummyServlet) ctx.getAttribute(DelegatingListener.DELEGATE_SERVLET), isA(DummyServlet.class));
DelegatingServlet servlet = new DelegatingServlet();
servlet.init(new MockServletConfig(ctx));
servlet.service(httpServletRequest, new MockHttpServletResponse());
verify(servletRequestWrapper).setRequestURI("/go/stuff/action");
}
}
| rack_hack/src/test/java/com/thoughtworks/go/rackhack/DelegatingServletTest.java | /*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.rackhack;
import com.thoughtworks.go.server.util.ServletHelper;
import com.thoughtworks.go.util.ReflectionUtil;
import org.junit.Before;
import org.junit.Test;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.mock.web.MockServletConfig;
import org.springframework.mock.web.MockServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import static org.hamcrest.Matchers.isA;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class DelegatingServletTest {
private com.thoughtworks.go.server.util.ServletRequest servletRequestWrapper;
private HttpServletRequest httpServletRequest;
@Before
public void setUp() throws Exception {
ServletHelper servletHelper = mock(ServletHelper.class);
ReflectionUtil.setStaticField(ServletHelper.class, "instance", servletHelper);
servletRequestWrapper = mock(com.thoughtworks.go.server.util.ServletRequest.class);
httpServletRequest = mock(HttpServletRequest.class);
when(httpServletRequest.getRequestURI()).thenReturn("/go/rails/stuff/action");
when(servletHelper.getRequest(httpServletRequest)).thenReturn(servletRequestWrapper);
}
@Test
public void shouldDelegateToTheGivenServlet() throws IOException, ServletException {
MockServletContext ctx = new MockServletContext();
ctx.addInitParameter(DelegatingListener.DELEGATE_SERVLET, DummyServlet.class.getCanonicalName());
ServletContextEvent evt = new ServletContextEvent(ctx);
DelegatingListener listener = new DelegatingListener();
listener.contextInitialized(evt);
assertThat((DummyServlet) ctx.getAttribute(DelegatingListener.DELEGATE_SERVLET), isA(DummyServlet.class));
DelegatingServlet servlet = new DelegatingServlet();
servlet.init(new MockServletConfig(ctx));
servlet.service(httpServletRequest, new MockHttpServletResponse());
verify(servletRequestWrapper).setRequestURI("/go/stuff/action");
}
}
| Migrate rack_hack to JUnit 5
| rack_hack/src/test/java/com/thoughtworks/go/rackhack/DelegatingServletTest.java | Migrate rack_hack to JUnit 5 |
|
Java | apache-2.0 | 1fbd7a506b7df482b7ae5f7650d31a3ad144835a | 0 | structurizr/java | package com.structurizr.view;
public enum Border {
Solid,
Dashed,
Dotted
}
| structurizr-core/src/com/structurizr/view/Border.java | package com.structurizr.view;
public enum Border {
Solid,
Dashed
}
| Added a "Dotted" border style.
| structurizr-core/src/com/structurizr/view/Border.java | Added a "Dotted" border style. |
|
Java | apache-2.0 | 78d33afb3084f6815b076a56b2dc75a540517d0b | 0 | Valkryst/VTerminal | package com.valkryst.VTerminal.component;
import com.valkryst.VTerminal.AsciiCharacter;
import com.valkryst.VTerminal.AsciiString;
import com.valkryst.VTerminal.font.Font;
import com.valkryst.VTerminal.font.FontLoader;
import com.valkryst.VTerminal.misc.ColoredImageCache;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.Optional;
public class ScreenTest {
private final Font font;
private Screen screen;
private final AsciiCharacter character = new AsciiCharacter('?');
private final AsciiString string = new AsciiString("?????");
public ScreenTest() throws IOException, URISyntaxException {
font = FontLoader.loadFontFromJar("Fonts/DejaVu Sans Mono/20pt/bitmap.png", "Fonts/DejaVu Sans Mono/20pt/data.fnt", 1);
}
@Before
public void initializeScreen() {
screen = new Screen(0, 0, 5, 5);
}
@Test
public void testConstructor_withValidParams() {
final Screen screen = new Screen(4, 6, 9, 10);
Assert.assertEquals(4, screen.getColumnIndex());
Assert.assertEquals(6, screen.getRowIndex());
Assert.assertEquals(9, screen.getWidth());
Assert.assertEquals(10, screen.getHeight());
}
@Test(expected=IllegalArgumentException.class)
public void testConstructor_withNegativeColumnIndex() {
new Screen(-1, 6, 9, 10);
}
@Test(expected=IllegalArgumentException.class)
public void testConstructor_withNegativeRowIndex() {
new Screen(4, -1, 9, 10);
}
@Test(expected=IllegalArgumentException.class)
public void testConstructor_withNegativeWidth() {
new Screen(4, 6, -1, 10);
}
@Test(expected=IllegalArgumentException.class)
public void testConstructor_withNegativeHeight() {
new Screen(4, 6, 9, -1);
}
@Test(expected=UnsupportedOperationException.class)
public void testDraw_withScreen() {
screen.draw(screen);
}
@Test(expected=NullPointerException.class)
public void testDraw_twoParams_withNullGraphicsContext() {
screen.draw(null, new ColoredImageCache(font));
}
@Test(expected=NullPointerException.class)
public void testDraw_twoParams_withNullImageCache() {
final BufferedImage image = new BufferedImage(10, 10, BufferedImage.TYPE_INT_ARGB);
screen.draw((Graphics2D) image.getGraphics(), null);
}
@Test
public void testDraw_twoParams_withValidInputs() {
final ColoredImageCache cache = new ColoredImageCache(font);
final int width = font.getWidth() * screen.getWidth();
final int height = font.getHeight() * screen.getHeight();
final BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
screen.draw((Graphics2D) image.getGraphics(), cache);
}
@Test
public void testClear_oneParam() {
screen.clear('?');
for (final AsciiString string : screen.getStrings()) {
for (final AsciiCharacter character : string.getCharacters()) {
Assert.assertEquals('?', character.getCharacter());
}
}
}
@Test
public void testClear_multipleParams_withValidParams() {
screen.clear('?', 2, 2, 2, 2);
for (int y = 0 ; y < screen.getStrings().length ; y++) {
for (int x = 0 ; x < screen.getString(y).length() ; x++) {
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(x, y);
Assert.assertTrue(optChar.isPresent());
if (x == 2 || x == 3) {
if (y == 2 || y == 3) {
Assert.assertEquals('?', optChar.get().getCharacter());
continue;
}
}
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
}
}
@Test
public void testClear_multipleParams_withNegativeColumnIndex() {
screen.clear('?', -1, 2, 2, 2);
for (int y = 0 ; y < screen.getStrings().length ; y++) {
for (int x = 0 ; x < screen.getString(y).length() ; x++) {
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(x, y);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
}
}
@Test
public void testClear_multipleParams_withNegativeRowIndex() {
screen.clear('?', 2, -1, 2, 2);
for (int y = 0 ; y < screen.getStrings().length ; y++) {
for (int x = 0 ; x < screen.getString(y).length() ; x++) {
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(x, y);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
}
}
@Test
public void testClear_multipleParams_withNegativeWidth() {
screen.clear('?', 2, 2, -1, 2);
for (int y = 0 ; y < screen.getStrings().length ; y++) {
for (int x = 0 ; x < screen.getString(y).length() ; x++) {
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(x, y);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
}
}
@Test
public void testClear_multipleParams_withNegativeHeight() {
screen.clear('?', 2, 2, 2, -1);
for (int y = 0 ; y < screen.getStrings().length ; y++) {
for (int x = 0 ; x < screen.getString(y).length() ; x++) {
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(x, y);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
}
}
@Test
public void testWrite_charObj_withValidParams() {
screen.write(character, 3, 3);
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(3, 3);
Assert.assertTrue(optChar.isPresent());
Assert.assertEquals('?', optChar.get().getCharacter());
}
@Test
public void testWrite_charObj_allCharPositions() {
for (int y = 0 ; y < screen.getHeight() ; y++) {
for (int x = 0 ; x < screen.getWidth() ; x++) {
screen.write(character, x, y);
}
}
for (final AsciiString string : screen.getStrings()) {
for (final AsciiCharacter chr : string.getCharacters()) {
Assert.assertEquals('?', chr.getCharacter());
}
}
}
@Test(expected=NullPointerException.class)
public void testWrite_charObj_withNullCharacter() {
screen.write((AsciiCharacter) null, 3, 3);
}
@Test
public void testWrite_charObj_withNegativeColumnIndex() {
screen.write(character, -3, 3);
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(3, 3);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
@Test
public void testWrite_charObj_withNegativeRowIndex() {
screen.write(character, 3, -3);
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(3, 3);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
@Test
public void testWrite_charPrim_withValidParams() {
screen.write('?', 3, 3);
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(3, 3);
Assert.assertTrue(optChar.isPresent());
Assert.assertEquals('?', optChar.get().getCharacter());
}
@Test
public void testWrite_charPrim_allCharPositions() {
for (int y = 0 ; y < screen.getHeight() ; y++) {
for (int x = 0 ; x < screen.getWidth() ; x++) {
screen.write('?', x, y);
}
}
for (final AsciiString string : screen.getStrings()) {
for (final AsciiCharacter character : string.getCharacters()) {
Assert.assertEquals('?', character.getCharacter());
}
}
}
@Test
public void testWrite_charPrim_withNegativeColumnIndex() {
screen.write('?', -3, 3);
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(3, 3);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
@Test
public void testWrite_charPrim_withNegativeRowIndex() {
screen.write('?', 3, -3);
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(3, 3);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
@Test(expected=NullPointerException.class)
public void testWrite_stringObj_withNullString() {
screen.write((AsciiString) null, 0, 0);
}
@Test
public void testWrite_stringObj_withNegativeColumnIndex() {
screen.write(string, -3, 3);
Assert.assertNotEquals(string, screen.getString(0));
}
@Test
public void testWrite_stringObj_withNegativeRowIndex() {
screen.write(string, 3, -3);
Assert.assertNotEquals(string, screen.getString(0));
}
@Test
public void testSetBackgroundColor_withValidColor() {
screen.setBackgroundColor(Color.PINK);
for (final AsciiString string : screen.getStrings()) {
for (final AsciiCharacter character : string.getCharacters()) {
Assert.assertEquals(Color.PINK, character.getBackgroundColor());
}
}
}
@Test(expected=NullPointerException.class)
public void testSetBackgroundColor_withNullColor() {
screen.setBackgroundColor(null);
}
@Test
public void testSetForegroundColor_withValidColor() {
screen.setForegroundColor(Color.PINK);
for (final AsciiString string : screen.getStrings()) {
for (final AsciiCharacter character : string.getCharacters()) {
Assert.assertEquals(Color.PINK, character.getForegroundColor());
}
}
}
@Test(expected=NullPointerException.class)
public void testSetForegroundColor_withNullColor() {
screen.setForegroundColor(null);
}
@Test
public void testSetBackgroundAndForegroundColor_withValidColors() {
screen.setBackgroundAndForegroundColor(Color.PINK, Color.GREEN);
for (final AsciiString string : screen.getStrings()) {
for (final AsciiCharacter character : string.getCharacters()) {
Assert.assertEquals(Color.PINK, character.getBackgroundColor());
Assert.assertEquals(Color.GREEN, character.getForegroundColor());
}
}
}
@Test(expected=NullPointerException.class)
public void testSetBackgroundAndForegroundColor_withNullBackgroundColor() {
screen.setBackgroundAndForegroundColor(null, Color.GREEN);
}
@Test(expected=NullPointerException.class)
public void testSetBackgroundAndForegroundColor_withNullForegroundColor() {
screen.setBackgroundAndForegroundColor(Color.PINK, null);
}
@Test(expected=NullPointerException.class)
public void testAddComponent_withNullComponent() {
screen.addComponent(null);
Assert.assertEquals(0, screen.totalComponents());
}
@Test(expected=IllegalArgumentException.class)
public void testAddComponent_withSelf() {
screen.addComponent(screen);
}
@Test
public void testAddComponent_withLayer() {
final Layer otherComponent = new Layer(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertEquals(1, screen.totalComponents());
}
@Test
public void testAddComponent_withComponent() {
final Component otherComponent = new Component(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
}
@Test
public void testAddComponent_addSameComponentTwice() {
final Layer otherComponent = new Layer(0, 0, 2, 2);
screen.addComponent(otherComponent);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
Assert.assertEquals(1, screen.totalComponents());
}
@Test
public void testRemoveComponent_withLayer() {
final Layer otherComponent = new Layer(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
Assert.assertEquals(1, screen.totalComponents());
screen.removeComponent(otherComponent);
Assert.assertFalse(screen.containsComponent(otherComponent));
Assert.assertEquals(0, screen.totalComponents());
}
@Test
public void testRemoveComponent_withComponent() {
final Component otherComponent = new Component(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
Assert.assertEquals(1, screen.totalComponents());
screen.removeComponent(otherComponent);
Assert.assertFalse(screen.containsComponent(otherComponent));
Assert.assertEquals(0, screen.totalComponents());
}
@Test(expected=NullPointerException.class)
public void testRemoveComponent_withNullComponent() {
screen.removeComponent(null);
}
@Test(expected=IllegalArgumentException.class)
public void testRemoveComponent_withSelf() {
screen.removeComponent(screen);
}
@Test
public void testContainsComponent_withLayer() {
final Layer otherComponent = new Layer(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
}
@Test
public void testContainsComponent_withComponent() {
final Component otherComponent = new Component(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
}
@Test
public void testContainsComponent_withComponentThatIsntContained() {
final Layer otherComponent = new Layer(0, 0, 2, 2);
Assert.assertFalse(screen.containsComponent(otherComponent));
}
@Test(expected=NullPointerException.class)
public void testContainsComponent_withNullComponent() {
screen.containsComponent(null);
}
@Test
public void testContainsComponent_withSelf() {
Assert.assertFalse(screen.containsComponent(screen));
}
@Test(expected=NullPointerException.class)
public void testRecursiveContainsComponent_withNullComponent() {
screen.recursiveContainsComponent(null);
}
@Test
public void testRecursiveContainsComponent_withSelf() {
Assert.assertFalse(screen.recursiveContainsComponent(screen));
}
@Test
public void testRecursiveContainsComponent_whereSelfContainsComponent() {
final Component otherComponent = new Component(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.recursiveContainsComponent(otherComponent));
}
@Test
public void testRecursiveContainsComponent_withNonContainedComponent() {
final Component otherComponent = new Component(0, 0, 2, 2);
Assert.assertFalse(screen.recursiveContainsComponent(otherComponent));
}
@Test
public void testTotalComponents() {
for (int i = 1 ; i < 10 ; i++) {
final Component component = new Component(0, 0, 2, 2);
screen.addComponent(component);
Assert.assertEquals(i, screen.totalComponents());
}
}
}
| test/com/valkryst/VTerminal/component/ScreenTest.java | package com.valkryst.VTerminal.component;
import com.valkryst.VTerminal.AsciiCharacter;
import com.valkryst.VTerminal.AsciiString;
import com.valkryst.VTerminal.font.Font;
import com.valkryst.VTerminal.font.FontLoader;
import com.valkryst.VTerminal.misc.ColoredImageCache;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.Optional;
public class ScreenTest {
private final Font font;
private Screen screen;
private final AsciiCharacter character = new AsciiCharacter('?');
private final AsciiString string = new AsciiString("?????");
public ScreenTest() throws IOException, URISyntaxException {
font = FontLoader.loadFontFromJar("Fonts/DejaVu Sans Mono/20pt/bitmap.png", "Fonts/DejaVu Sans Mono/20pt/data.fnt", 1);
}
@Before
public void initializeScreen() {
screen = new Screen(0, 0, 5, 5);
}
@Test
public void testConstructor_withValidParams() {
final Screen screen = new Screen(4, 6, 9, 10);
Assert.assertEquals(4, screen.getColumnIndex());
Assert.assertEquals(6, screen.getRowIndex());
Assert.assertEquals(9, screen.getWidth());
Assert.assertEquals(10, screen.getHeight());
}
@Test(expected=IllegalArgumentException.class)
public void testConstructor_withNegativeColumnIndex() {
new Screen(-1, 6, 9, 10);
}
@Test(expected=IllegalArgumentException.class)
public void testConstructor_withNegativeRowIndex() {
new Screen(4, -1, 9, 10);
}
@Test(expected=IllegalArgumentException.class)
public void testConstructor_withNegativeWidth() {
new Screen(4, 6, -1, 10);
}
@Test(expected=IllegalArgumentException.class)
public void testConstructor_withNegativeHeight() {
new Screen(4, 6, 9, -1);
}
@Test(expected=UnsupportedOperationException.class)
public void testDraw_withScreen() {
screen.draw(screen);
}
@Test(expected=NullPointerException.class)
public void testDraw_twoParams_withNullGraphicsContext() {
screen.draw(null, new ColoredImageCache(font));
}
@Test(expected=NullPointerException.class)
public void testDraw_twoParams_withNullImageCache() {
final BufferedImage image = new BufferedImage(10, 10, BufferedImage.TYPE_INT_ARGB);
screen.draw((Graphics2D) image.getGraphics(), null);
}
@Test
public void testDraw_twoParams_withValidInputs() {
final ColoredImageCache cache = new ColoredImageCache(font);
final int width = font.getWidth() * screen.getWidth();
final int height = font.getHeight() * screen.getHeight();
final BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
screen.draw((Graphics2D) image.getGraphics(), cache);
}
@Test
public void testClear_oneParam() {
screen.clear('?');
for (final AsciiString string : screen.getStrings()) {
for (final AsciiCharacter character : string.getCharacters()) {
Assert.assertEquals('?', character.getCharacter());
}
}
}
@Test
public void testClear_multipleParams_withValidParams() {
screen.clear('?', 2, 2, 2, 2);
for (int y = 0 ; y < screen.getStrings().length ; y++) {
for (int x = 0 ; x < screen.getString(y).length() ; x++) {
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(x, y);
Assert.assertTrue(optChar.isPresent());
if (x == 2 || x == 3) {
if (y == 2 || y == 3) {
Assert.assertEquals('?', optChar.get().getCharacter());
continue;
}
}
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
}
}
@Test
public void testClear_multipleParams_withNegativeColumnIndex() {
screen.clear('?', -1, 2, 2, 2);
for (int y = 0 ; y < screen.getStrings().length ; y++) {
for (int x = 0 ; x < screen.getString(y).length() ; x++) {
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(x, y);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
}
}
@Test
public void testClear_multipleParams_withNegativeRowIndex() {
screen.clear('?', 2, -1, 2, 2);
for (int y = 0 ; y < screen.getStrings().length ; y++) {
for (int x = 0 ; x < screen.getString(y).length() ; x++) {
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(x, y);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
}
}
@Test
public void testClear_multipleParams_withNegativeWidth() {
screen.clear('?', 2, 2, -1, 2);
for (int y = 0 ; y < screen.getStrings().length ; y++) {
for (int x = 0 ; x < screen.getString(y).length() ; x++) {
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(x, y);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
}
}
@Test
public void testClear_multipleParams_withNegativeHeight() {
screen.clear('?', 2, 2, 2, -1);
for (int y = 0 ; y < screen.getStrings().length ; y++) {
for (int x = 0 ; x < screen.getString(y).length() ; x++) {
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(x, y);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
}
}
@Test
public void testWrite_charObj_withValidParams() {
screen.write(character, 3, 3);
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(3, 3);
Assert.assertTrue(optChar.isPresent());
Assert.assertEquals('?', optChar.get().getCharacter());
}
@Test
public void testWrite_charObj_allCharPositions() {
for (int y = 0 ; y < screen.getHeight() ; y++) {
for (int x = 0 ; x < screen.getWidth() ; x++) {
screen.write(character, x, y);
}
}
for (final AsciiString string : screen.getStrings()) {
for (final AsciiCharacter chr : string.getCharacters()) {
Assert.assertEquals('?', chr.getCharacter());
}
}
}
@Test(expected=NullPointerException.class)
public void testWrite_charObj_withNullCharacter() {
screen.write((AsciiCharacter) null, 3, 3);
}
@Test
public void testWrite_charObj_withNegativeColumnIndex() {
screen.write(character, -3, 3);
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(3, 3);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
@Test
public void testWrite_charObj_withNegativeRowIndex() {
screen.write(character, 3, -3);
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(3, 3);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
@Test
public void testWrite_charPrim_withValidParams() {
screen.write('?', 3, 3);
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(3, 3);
Assert.assertTrue(optChar.isPresent());
Assert.assertEquals('?', optChar.get().getCharacter());
}
@Test
public void testWrite_charPrim_allCharPositions() {
for (int y = 0 ; y < screen.getHeight() ; y++) {
for (int x = 0 ; x < screen.getWidth() ; x++) {
screen.write('?', x, y);
}
}
for (final AsciiString string : screen.getStrings()) {
for (final AsciiCharacter character : string.getCharacters()) {
Assert.assertEquals('?', character.getCharacter());
}
}
}
@Test
public void testWrite_charPrim_withNegativeColumnIndex() {
screen.write('?', -3, 3);
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(3, 3);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
@Test
public void testWrite_charPrim_withNegativeRowIndex() {
screen.write('?', 3, -3);
final Optional<AsciiCharacter> optChar = screen.getCharacterAt(3, 3);
Assert.assertTrue(optChar.isPresent());
Assert.assertNotEquals('?', optChar.get().getCharacter());
}
@Test(expected=NullPointerException.class)
public void testWrite_stringObj_withNullString() {
screen.write((AsciiString) null, 0, 0);
}
@Test
public void testWrite_stringObj_withNegativeColumnIndex() {
screen.write(string, -3, 3);
Assert.assertNotEquals(string, screen.getString(0));
}
@Test
public void testWrite_stringObj_withNegativeRowIndex() {
screen.write(string, 3, -3);
Assert.assertNotEquals(string, screen.getString(0));
}
@Test
public void testSetBackgroundColor_withValidColor() {
screen.setBackgroundColor(Color.PINK);
for (final AsciiString string : screen.getStrings()) {
for (final AsciiCharacter character : string.getCharacters()) {
Assert.assertEquals(Color.PINK, character.getBackgroundColor());
}
}
}
@Test(expected=NullPointerException.class)
public void testSetBackgroundColor_withNullColor() {
screen.setBackgroundColor(null);
}
@Test
public void testSetForegroundColor_withValidColor() {
screen.setForegroundColor(Color.PINK);
for (final AsciiString string : screen.getStrings()) {
for (final AsciiCharacter character : string.getCharacters()) {
Assert.assertEquals(Color.PINK, character.getForegroundColor());
}
}
}
@Test(expected=NullPointerException.class)
public void testSetForegroundColor_withNullColor() {
screen.setForegroundColor(null);
}
@Test
public void testSetBackgroundAndForegroundColor_withValidColors() {
screen.setBackgroundAndForegroundColor(Color.PINK, Color.GREEN);
for (final AsciiString string : screen.getStrings()) {
for (final AsciiCharacter character : string.getCharacters()) {
Assert.assertEquals(Color.PINK, character.getBackgroundColor());
Assert.assertEquals(Color.GREEN, character.getForegroundColor());
}
}
}
@Test(expected=NullPointerException.class)
public void testSetBackgroundAndForegroundColor_withNullBackgroundColor() {
screen.setBackgroundAndForegroundColor(null, Color.GREEN);
}
@Test(expected=NullPointerException.class)
public void testSetBackgroundAndForegroundColor_withNullForegroundColor() {
screen.setBackgroundAndForegroundColor(Color.PINK, null);
}
@Test(expected=NullPointerException.class)
public void testAddComponent_withNullComponent() {
screen.addComponent(null);
Assert.assertEquals(0, screen.totalComponents());
}
@Test(expected=IllegalArgumentException.class)
public void testAddComponent_withSelf() {
screen.addComponent(screen);
}
@Test
public void testAddComponent_withLayer() {
final Layer otherComponent = new Layer(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertEquals(1, screen.totalComponents());
}
@Test
public void testAddComponent_withScreen() {
final Screen otherComponent = new Screen(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
}
@Test
public void testAddComponent_withComponent() {
final Component otherComponent = new Component(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
}
@Test
public void testAddComponent_withScreenContainingMainScreen() {
final Screen otherComponent = new Screen(0, 0, 2, 2);
otherComponent.addComponent(screen);
screen.addComponent(otherComponent);
Assert.assertFalse(screen.containsComponent(otherComponent));
}
@Test
public void testAddComponent_addSameComponentTwice() {
final Layer otherComponent = new Layer(0, 0, 2, 2);
screen.addComponent(otherComponent);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
Assert.assertEquals(1, screen.totalComponents());
}
@Test
public void testRemoveComponent_withLayer() {
final Layer otherComponent = new Layer(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
Assert.assertEquals(1, screen.totalComponents());
screen.removeComponent(otherComponent);
Assert.assertFalse(screen.containsComponent(otherComponent));
Assert.assertEquals(0, screen.totalComponents());
}
@Test
public void testRemoveComponent_withScreen() {
final Screen otherComponent = new Screen(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
Assert.assertEquals(1, screen.totalComponents());
screen.removeComponent(otherComponent);
Assert.assertFalse(screen.containsComponent(otherComponent));
Assert.assertEquals(0, screen.totalComponents());
}
@Test
public void testRemoveComponent_withComponent() {
final Component otherComponent = new Component(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
Assert.assertEquals(1, screen.totalComponents());
screen.removeComponent(otherComponent);
Assert.assertFalse(screen.containsComponent(otherComponent));
Assert.assertEquals(0, screen.totalComponents());
}
@Test(expected=NullPointerException.class)
public void testRemoveComponent_withNullComponent() {
screen.removeComponent(null);
}
@Test(expected=IllegalArgumentException.class)
public void testRemoveComponent_withSelf() {
screen.removeComponent(screen);
}
@Test
public void testContainsComponent_withLayer() {
final Layer otherComponent = new Layer(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
}
@Test
public void testContainsComponent_withScreen() {
final Screen otherComponent = new Screen(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
}
@Test
public void testContainsComponent_withComponent() {
final Component otherComponent = new Component(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.containsComponent(otherComponent));
}
@Test
public void testContainsComponent_withComponentThatIsntContained() {
final Layer otherComponent = new Layer(0, 0, 2, 2);
Assert.assertFalse(screen.containsComponent(otherComponent));
}
@Test(expected=NullPointerException.class)
public void testContainsComponent_withNullComponent() {
screen.containsComponent(null);
}
@Test
public void testContainsComponent_withSelf() {
Assert.assertFalse(screen.containsComponent(screen));
}
@Test(expected=NullPointerException.class)
public void testRecursiveContainsComponent_withNullComponent() {
screen.recursiveContainsComponent(null);
}
@Test
public void testRecursiveContainsComponent_withSelf() {
Assert.assertFalse(screen.recursiveContainsComponent(screen));
}
@Test
public void testRecursiveContainsComponent_whereSelfContainsComponent() {
final Component otherComponent = new Component(0, 0, 2, 2);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.recursiveContainsComponent(otherComponent));
}
@Test
public void testRecursiveContainsComponent_whereOtherComponentIsScreenThatContainsCallingScreen() {
final Screen otherComponent = new Screen(0, 0, 2, 2);
otherComponent.addComponent(screen);
screen.addComponent(otherComponent);
Assert.assertTrue(screen.recursiveContainsComponent(otherComponent));
}
@Test
public void testRecursiveContainsComponent_whereSubscreenContainsComponent() {
final Component otherComponent = new Component(0, 0, 2, 2);
final Screen otherScreen = new Screen(0, 0, 2, 2);
otherScreen.addComponent(otherComponent);
screen.addComponent(otherScreen);
Assert.assertTrue(screen.recursiveContainsComponent(otherComponent));
}
@Test
public void testRecursiveContainsComponent_withNonContainedComponent() {
final Component otherComponent = new Component(0, 0, 2, 2);
Assert.assertFalse(screen.recursiveContainsComponent(otherComponent));
}
@Test
public void testTotalComponents() {
for (int i = 1 ; i < 10 ; i++) {
final Component component = new Component(0, 0, 2, 2);
screen.addComponent(component);
Assert.assertEquals(i, screen.totalComponents());
}
}
}
| Updates to match API changes.
| test/com/valkryst/VTerminal/component/ScreenTest.java | Updates to match API changes. |
|
Java | apache-2.0 | d674763e77065a11601f77a8353513c30581b695 | 0 | samaitra/jena,kidaa/jena,tr3vr/jena,CesarPantoja/jena,samaitra/jena,apache/jena,kidaa/jena,kamir/jena,apache/jena,kidaa/jena,kamir/jena,tr3vr/jena,samaitra/jena,kidaa/jena,kidaa/jena,tr3vr/jena,apache/jena,kamir/jena,CesarPantoja/jena,tr3vr/jena,apache/jena,kamir/jena,CesarPantoja/jena,tr3vr/jena,kidaa/jena,apache/jena,CesarPantoja/jena,samaitra/jena,samaitra/jena,samaitra/jena,tr3vr/jena,apache/jena,CesarPantoja/jena,apache/jena,kamir/jena,kidaa/jena,CesarPantoja/jena,apache/jena,tr3vr/jena,CesarPantoja/jena,samaitra/jena,kamir/jena,kamir/jena | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.engine.ref;
import static org.apache.jena.sparql.algebra.JoinType.* ;
import java.util.ArrayList ;
import java.util.Iterator ;
import java.util.List ;
import org.apache.jena.sparql.algebra.Algebra ;
import org.apache.jena.sparql.algebra.JoinType ;
import org.apache.jena.sparql.algebra.Table ;
import org.apache.jena.sparql.algebra.table.TableUnit ;
import org.apache.jena.sparql.engine.ExecutionContext ;
import org.apache.jena.sparql.engine.QueryIterator ;
import org.apache.jena.sparql.engine.binding.Binding ;
import org.apache.jena.sparql.engine.iterator.QueryIterFilterExpr ;
import org.apache.jena.sparql.engine.iterator.QueryIterNullIterator ;
import org.apache.jena.sparql.engine.iterator.QueryIterPlainWrapper ;
import org.apache.jena.sparql.expr.Expr ;
import org.apache.jena.sparql.expr.ExprList ;
/** Table join - this only happens if the patterns can not be streamed.
* This code is simple!
* Primarily for use in testing and also the reference query engine which is designed for simplicity, not performance.
*/
public class TableJoin
{
public static QueryIterator join(QueryIterator left, Table right, ExprList condition, ExecutionContext execCxt) {
return joinWorker(left, right, INNER, condition, execCxt) ;
}
public static QueryIterator leftJoin(QueryIterator left, Table right, ExprList condition, ExecutionContext execCxt) {
return joinWorker(left, right, LEFT, condition, execCxt) ;
}
public static QueryIterator joinWorker(QueryIterator left, Table right, JoinType joinType, ExprList conditions, ExecutionContext execCxt) {
if ( right.isEmpty() ) {
if ( joinType == INNER ) {
// No rows - no match
left.close() ;
return QueryIterNullIterator.create(execCxt) ;
}
else
// Left join - pass out left rows regardless of conditions.
return left ;
}
if ( TableUnit.isTableUnit(right) )
return applyConditions(left, conditions, execCxt) ;
return joinWorkerN(left, right, joinType, conditions, execCxt) ;
}
private static QueryIterator joinWorkerN(QueryIterator left, Table right, JoinType joinType, ExprList conditions, ExecutionContext execCxt) {
// We could hash the right except we don't know much about columns.
List<Binding> out = new ArrayList<>() ;
for ( ; left.hasNext() ; ) {
Binding bindingLeft = left.next() ;
int count = 0 ;
for (Iterator<Binding> iter = right.rows() ; iter.hasNext();) {
Binding bindingRight = iter.next() ;
Binding r = Algebra.merge(bindingLeft, bindingRight) ;
if ( r == null )
continue ;
// This does the conditional part. Theta-join.
if ( conditions == null || conditions.isSatisfied(r, execCxt) ) {
count ++ ;
out.add(r) ;
}
}
if ( count == 0 && ( joinType == LEFT) )
// Conditions on left?
out.add(bindingLeft) ;
}
return new QueryIterPlainWrapper(out.iterator(), execCxt) ;
}
private static QueryIterator applyConditions(QueryIterator qIter, ExprList conditions, ExecutionContext execCxt) {
if ( conditions == null )
return qIter ;
for (Expr expr : conditions)
qIter = new QueryIterFilterExpr(qIter, expr, execCxt) ;
return qIter ;
}
}
| jena-arq/src/main/java/org/apache/jena/sparql/engine/ref/TableJoin.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.engine.ref;
import static org.apache.jena.sparql.algebra.JoinType.* ;
import java.util.ArrayList ;
import java.util.Iterator ;
import java.util.List ;
import org.apache.jena.sparql.algebra.Algebra ;
import org.apache.jena.sparql.algebra.JoinType ;
import org.apache.jena.sparql.algebra.Table ;
import org.apache.jena.sparql.algebra.table.TableUnit ;
import org.apache.jena.sparql.engine.ExecutionContext ;
import org.apache.jena.sparql.engine.QueryIterator ;
import org.apache.jena.sparql.engine.binding.Binding ;
import org.apache.jena.sparql.engine.iterator.QueryIterFilterExpr ;
import org.apache.jena.sparql.engine.iterator.QueryIterNullIterator ;
import org.apache.jena.sparql.engine.iterator.QueryIterPlainWrapper ;
import org.apache.jena.sparql.expr.Expr ;
import org.apache.jena.sparql.expr.ExprList ;
/** Table join - this only happens if the patterns can not be streamed.
* This code is simple!
* Primarily for use in testing and also the reference query engine which is designed for simplicity, not performance.
*/
public class TableJoin
{
public static QueryIterator join(QueryIterator left, Table right, ExprList condition, ExecutionContext execCxt) {
return joinWorker(left, right, PLAIN, condition, execCxt) ;
}
public static QueryIterator leftJoin(QueryIterator left, Table right, ExprList condition, ExecutionContext execCxt) {
return joinWorker(left, right, LEFT, condition, execCxt) ;
}
public static QueryIterator joinWorker(QueryIterator left, Table right, JoinType joinType, ExprList conditions, ExecutionContext execCxt) {
if ( right.isEmpty() ) {
if ( joinType == PLAIN ) {
// No rows - no match
left.close() ;
return QueryIterNullIterator.create(execCxt) ;
}
else
// Left join - pass out left rows regardless of conditions.
return left ;
}
if ( TableUnit.isTableUnit(right) )
return applyConditions(left, conditions, execCxt) ;
return joinWorkerN(left, right, joinType, conditions, execCxt) ;
}
private static QueryIterator joinWorkerN(QueryIterator left, Table right, JoinType joinType, ExprList conditions, ExecutionContext execCxt) {
// We could hash the right except we don't know much about columns.
List<Binding> out = new ArrayList<>() ;
for ( ; left.hasNext() ; ) {
Binding bindingLeft = left.next() ;
int count = 0 ;
for (Iterator<Binding> iter = right.rows() ; iter.hasNext();) {
Binding bindingRight = iter.next() ;
Binding r = Algebra.merge(bindingLeft, bindingRight) ;
if ( r == null )
continue ;
// This does the conditional part. Theta-join.
if ( conditions == null || conditions.isSatisfied(r, execCxt) ) {
count ++ ;
out.add(r) ;
}
}
if ( count == 0 && ( joinType == LEFT) )
// Conditions on left?
out.add(bindingLeft) ;
}
return new QueryIterPlainWrapper(out.iterator(), execCxt) ;
}
private static QueryIterator applyConditions(QueryIterator qIter, ExprList conditions, ExecutionContext execCxt) {
if ( conditions == null )
return qIter ;
for (Expr expr : conditions)
qIter = new QueryIterFilterExpr(qIter, expr, execCxt) ;
return qIter ;
}
}
| Rename in JoinType. | jena-arq/src/main/java/org/apache/jena/sparql/engine/ref/TableJoin.java | Rename in JoinType. |
|
Java | apache-2.0 | a0edaf0bb3b13b6dfe2132a514c2f663112c064c | 0 | windbender/dropwizard-auth-jwt,mastermind1981/dropwizard-auth-jwt,kimble/dropwizard-auth-jwt,ToastShaman/dropwizard-auth-jwt,keyboardsamurai/dropwizard-auth-jwt | package com.github.toastshaman.dropwizard.auth.jwt.model;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.toastshaman.dropwizard.auth.jwt.exceptions.JsonWebTokenException;
import com.github.toastshaman.dropwizard.auth.jwt.exceptions.MalformedJsonWebTokenException;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.io.StringWriter;
import java.util.List;
import static com.github.toastshaman.dropwizard.auth.jwt.JsonWebTokenUtils.bytesOf;
import static com.github.toastshaman.dropwizard.auth.jwt.JsonWebTokenUtils.toBase64;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.String.format;
import static java.util.Arrays.copyOf;
import static org.apache.commons.lang.StringUtils.isNotBlank;
public class JsonWebToken {
private final JsonWebTokenHeader header;
private final JsonWebTokenClaims claim;
private Optional<byte[]> signature;
private Optional<List<String>> rawToken = Optional.absent();
private JsonWebToken(JsonWebTokenHeader header, JsonWebTokenClaims claim, Optional<byte[]> signature, Optional<List<String>> rawToken) {
this.header = header;
this.claim = claim;
this.signature = signature;
this.rawToken = rawToken;
}
public JsonWebTokenHeader getHeader() { return header; }
public JsonWebTokenClaims getClaim() { return claim; }
public byte[] getSignature() { return signature.orNull(); }
public String deserialize() { return Joiner.on(".").join(toBase64(bytesOf(toJson(header))), toBase64(bytesOf(toJson(claim)))); }
private String toJson(Object input) {
try {
ObjectMapper mapper = new ObjectMapper();
StringWriter output = new StringWriter();
mapper.writeValue(output, input);
return output.toString();
} catch (IOException e) {
throw new JsonWebTokenException(e.getMessage(), e);
}
}
public Optional<List<String>> getRawToken() { return rawToken; }
public static class DecoderBuilder {
private ObjectMapper mapper = new ObjectMapper();
private JsonWebTokenHeader header;
private JsonWebTokenClaims claim;
private Optional<byte[]> signature = Optional.absent();
private Optional<List<String>> rawToken = Optional.absent();
public JsonWebToken build() {
checkNotNull(header);
checkNotNull(claim);
checkNotNull(rawToken);
if (signature.isPresent()) { checkArgument(signature.get().length > 0); }
if (rawToken.isPresent()) { checkArgument(rawToken.get().size() == 3); };
return new JsonWebToken(header, claim, signature, rawToken);
}
public DecoderBuilder header(String header) {
checkArgument(isNotBlank(header));
try {
this.header = mapper.readValue(header, JsonWebTokenHeader.class);
return this;
} catch (Exception e) {
throw new MalformedJsonWebTokenException(format("The provided JWT header is malformed: [%s]", header), e);
}
}
public DecoderBuilder claim(String claim) {
checkArgument(isNotBlank(claim));
try {
this.claim = mapper.readValue(claim, JsonWebTokenClaims.class);
return this;
} catch (Exception e) {
throw new MalformedJsonWebTokenException(format("The provided JWT claim is malformed: [%s]", claim), e);
}
}
public DecoderBuilder signature(byte[] signature) {
checkNotNull(signature);
checkArgument(signature.length > 0);
this.signature = Optional.of(copyOf(signature, signature.length));
return this;
}
public DecoderBuilder rawToken(List<String> rawToken) {
checkNotNull(rawToken);
checkArgument(rawToken.size() == 3);
this.rawToken = Optional.of((List<String>) ImmutableList.copyOf(rawToken));
return this;
}
}
public static class EncoderBuilder {
private JsonWebTokenHeader header;
private JsonWebTokenClaims claim;
public EncoderBuilder header(JsonWebTokenHeader header) {
checkNotNull(header);
this.header = header;
return this;
}
public EncoderBuilder claim(JsonWebTokenClaims claim) {
checkNotNull(claim);
this.claim = claim;
return this;
}
public JsonWebToken build() {
checkNotNull(claim);
checkNotNull(header);
return new JsonWebToken(header, claim, Optional.<byte[]>absent(), Optional.<List<String>>absent());
}
}
public static DecoderBuilder decode() { return new DecoderBuilder(); }
public static EncoderBuilder encode() { return new EncoderBuilder(); }
}
| src/main/java/com/github/toastshaman/dropwizard/auth/jwt/model/JsonWebToken.java | package com.github.toastshaman.dropwizard.auth.jwt.model;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.toastshaman.dropwizard.auth.jwt.exceptions.JsonWebTokenException;
import com.github.toastshaman.dropwizard.auth.jwt.exceptions.MalformedJsonWebTokenException;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.io.BaseEncoding;
import java.io.IOException;
import java.io.StringWriter;
import java.nio.charset.Charset;
import java.util.List;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.String.format;
import static java.util.Arrays.copyOf;
import static org.apache.commons.lang.StringUtils.isNotBlank;
public class JsonWebToken {
private final JsonWebTokenHeader header;
private final JsonWebTokenClaims claim;
private Optional<byte[]> signature;
private Optional<List<String>> rawToken = Optional.absent();
private JsonWebToken(JsonWebTokenHeader header, JsonWebTokenClaims claim, Optional<byte[]> signature, Optional<List<String>> rawToken) {
this.header = header;
this.claim = claim;
this.signature = signature;
this.rawToken = rawToken;
}
public JsonWebTokenHeader getHeader() { return header; }
public JsonWebTokenClaims getClaim() { return claim; }
public byte[] getSignature() { return signature.orNull(); }
public String deserialize() { return Joiner.on(".").join(encode(toJson(header)), encode(toJson(claim))); }
private String encode(String input) {
return BaseEncoding.base64Url().omitPadding().encode(input.getBytes(Charset.forName("UTF-8")));
}
private String toJson(Object input) {
try {
ObjectMapper mapper = new ObjectMapper();
StringWriter output = new StringWriter();
mapper.writeValue(output, input);
return output.toString();
} catch (IOException e) {
throw new JsonWebTokenException(e.getMessage(), e);
}
}
public Optional<List<String>> getRawToken() { return rawToken; }
public static class DecoderBuilder {
private ObjectMapper mapper = new ObjectMapper();
private JsonWebTokenHeader header;
private JsonWebTokenClaims claim;
private Optional<byte[]> signature = Optional.absent();
private Optional<List<String>> rawToken = Optional.absent();
public JsonWebToken build() {
checkNotNull(header);
checkNotNull(claim);
checkNotNull(rawToken);
if (signature.isPresent()) { checkArgument(signature.get().length > 0); }
if (rawToken.isPresent()) { checkArgument(rawToken.get().size() == 3); };
return new JsonWebToken(header, claim, signature, rawToken);
}
public DecoderBuilder header(String header) {
checkArgument(isNotBlank(header));
try {
this.header = mapper.readValue(header, JsonWebTokenHeader.class);
return this;
} catch (Exception e) {
throw new MalformedJsonWebTokenException(format("The provided JWT header is malformed: [%s]", header), e);
}
}
public DecoderBuilder claim(String claim) {
checkArgument(isNotBlank(claim));
try {
this.claim = mapper.readValue(claim, JsonWebTokenClaims.class);
return this;
} catch (Exception e) {
throw new MalformedJsonWebTokenException(format("The provided JWT claim is malformed: [%s]", claim), e);
}
}
public DecoderBuilder signature(byte[] signature) {
checkNotNull(signature);
checkArgument(signature.length > 0);
this.signature = Optional.of(copyOf(signature, signature.length));
return this;
}
public DecoderBuilder rawToken(List<String> rawToken) {
checkNotNull(rawToken);
checkArgument(rawToken.size() == 3);
this.rawToken = Optional.of((List<String>) ImmutableList.copyOf(rawToken));
return this;
}
}
public static class EncoderBuilder {
private JsonWebTokenHeader header;
private JsonWebTokenClaims claim;
public EncoderBuilder header(JsonWebTokenHeader header) {
checkNotNull(header);
this.header = header;
return this;
}
public EncoderBuilder claim(JsonWebTokenClaims claim) {
checkNotNull(claim);
this.claim = claim;
return this;
}
public JsonWebToken build() {
checkNotNull(claim);
checkNotNull(header);
return new JsonWebToken(header, claim, Optional.<byte[]>absent(), Optional.<List<String>>absent());
}
}
public static DecoderBuilder decode() { return new DecoderBuilder(); }
public static EncoderBuilder encode() { return new EncoderBuilder(); }
}
| Removed code duplication
| src/main/java/com/github/toastshaman/dropwizard/auth/jwt/model/JsonWebToken.java | Removed code duplication |
|
Java | apache-2.0 | e31d119f3b23c9cf978345c5c02a2c94c3f351a4 | 0 | madanadit/alluxio,wwjiang007/alluxio,madanadit/alluxio,wwjiang007/alluxio,wwjiang007/alluxio,Alluxio/alluxio,wwjiang007/alluxio,Alluxio/alluxio,bf8086/alluxio,madanadit/alluxio,madanadit/alluxio,madanadit/alluxio,maobaolong/alluxio,bf8086/alluxio,maobaolong/alluxio,wwjiang007/alluxio,calvinjia/tachyon,Alluxio/alluxio,maobaolong/alluxio,maobaolong/alluxio,madanadit/alluxio,EvilMcJerkface/alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,calvinjia/tachyon,bf8086/alluxio,wwjiang007/alluxio,EvilMcJerkface/alluxio,bf8086/alluxio,calvinjia/tachyon,madanadit/alluxio,maobaolong/alluxio,maobaolong/alluxio,calvinjia/tachyon,Alluxio/alluxio,calvinjia/tachyon,Alluxio/alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,EvilMcJerkface/alluxio,calvinjia/tachyon,bf8086/alluxio,EvilMcJerkface/alluxio,wwjiang007/alluxio,maobaolong/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,calvinjia/tachyon,Alluxio/alluxio,bf8086/alluxio,bf8086/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,calvinjia/tachyon,maobaolong/alluxio,wwjiang007/alluxio,madanadit/alluxio,bf8086/alluxio,Alluxio/alluxio,Alluxio/alluxio,wwjiang007/alluxio,wwjiang007/alluxio | /*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master.file.activesync;
import alluxio.AlluxioURI;
import alluxio.ProcessUtils;
import alluxio.SyncInfo;
import alluxio.conf.PropertyKey;
import alluxio.conf.ServerConfiguration;
import alluxio.collections.Pair;
import alluxio.exception.ExceptionMessage;
import alluxio.exception.InvalidPathException;
import alluxio.heartbeat.HeartbeatContext;
import alluxio.heartbeat.HeartbeatThread;
import alluxio.master.file.FileSystemMaster;
import alluxio.master.file.meta.MountTable;
import alluxio.master.journal.checkpoint.CheckpointName;
import alluxio.master.journal.JournalContext;
import alluxio.master.journal.Journaled;
import alluxio.proto.journal.File;
import alluxio.proto.journal.File.AddSyncPointEntry;
import alluxio.proto.journal.File.RemoveSyncPointEntry;
import alluxio.proto.journal.Journal;
import alluxio.proto.journal.Journal.JournalEntry;
import alluxio.resource.CloseableResource;
import alluxio.resource.LockResource;
import alluxio.retry.RetryUtils;
import alluxio.underfs.UnderFileSystem;
import alluxio.util.io.PathUtils;
import alluxio.wire.SyncPointInfo;
import com.google.common.collect.Iterators;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Supplier;
import javax.annotation.concurrent.NotThreadSafe;
/**
* Manager for the Active UFS sync process.
* There are several threads cooperating to make the active sync process happen.
* 1. An active polling thread that polls HDFS for change events and aggregates these events.
* 2. A heartbeat thread that wakes up periodically to consume the aggregated events, and perform
* syncing if necessary.
* 3. For initial syncing, we launch a future to perform initial syncing asynchronously. This is
* stored in mSyncPathStatus.
*/
@NotThreadSafe
public class ActiveSyncManager implements Journaled {
private static final Logger LOG = LoggerFactory.getLogger(ActiveSyncManager.class);
// a reference to the mount table
private final MountTable mMountTable;
// a list of sync points
private final List<AlluxioURI> mSyncPathList;
// a map which maps mount id to a thread polling that UFS
private final Map<Long, Future<?>> mPollerMap;
// a map which maps each mount id to a list of paths being actively synced on mountpoint
private final Map<Long, List<AlluxioURI>> mFilterMap;
// a map which maps mount id to the latest txid synced on that mount point
private final Map<Long, Long> mStartingTxIdMap;
// Future.isDone = INITIALLY_SYNCED, !Future.isDone = SYNCING
// Future == null => NOT_INITIALLY_SYNCED
private final Map<AlluxioURI, Future<?>> mSyncPathStatus;
// a lock which protects the above data structures
private final Lock mSyncManagerLock;
// a reference to FSM
private FileSystemMaster mFileSystemMaster;
// a local executor service used to launch polling threads
private ExecutorService mExecutorService;
/**
* Constructs a Active Sync Manager.
*
* @param mountTable mount table
* @param fileSystemMaster file system master
*/
public ActiveSyncManager(MountTable mountTable, FileSystemMaster fileSystemMaster) {
mMountTable = mountTable;
mPollerMap = new ConcurrentHashMap<>();
mFilterMap = new ConcurrentHashMap<>();
mStartingTxIdMap = new ConcurrentHashMap<>();
mSyncPathList = new CopyOnWriteArrayList<>();
mFileSystemMaster = fileSystemMaster;
mSyncPathStatus = new ConcurrentHashMap<>();
// A lock used to protect the state stored in the above maps and lists
mSyncManagerLock = new ReentrantLock();
// Executor Service for active syncing
mExecutorService =
Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
}
/**
* Check if a URI is actively synced.
*
* @param path path to check
* @return true if a URI is being actively synced
*/
public boolean isActivelySynced(AlluxioURI path) {
for (AlluxioURI syncedPath : mSyncPathList) {
try {
if (PathUtils.hasPrefix(path.getPath(), syncedPath.getPath())) {
return true;
}
} catch (InvalidPathException e) {
return false;
}
}
return false;
}
/**
* Gets the lock protecting the syncManager.
*
* @return syncmanager lock
*/
public Lock getSyncManagerLock() {
return mSyncManagerLock;
}
/**
* start the polling threads.
*
*/
public void start() throws IOException {
// Initialize UFS states
for (AlluxioURI syncPoint : mSyncPathList) {
MountTable.Resolution resolution = null;
long mountId = 0;
try {
resolution = mMountTable.resolve(syncPoint);
mountId = resolution.getMountId();
} catch (InvalidPathException e) {
LOG.info("Invalid Path encountered during start up of ActiveSyncManager, "
+ "path {}, exception {}", syncPoint, e);
continue;
}
try (CloseableResource<UnderFileSystem> ufsResource = resolution.acquireUfsResource()) {
if (!ufsResource.get().supportsActiveSync()) {
throw new UnsupportedOperationException("Active Sync is not supported on this UFS type: "
+ ufsResource.get().getUnderFSType());
}
ufsResource.get().startSync(resolution.getUri());
}
}
// attempt to restart from a past txid, if this fails, it will result in MissingEventException
// therefore forces a sync
for (long mountId: mFilterMap.keySet()) {
long txId = mStartingTxIdMap.getOrDefault(mountId, SyncInfo.INVALID_TXID);
launchPollingThread(mountId, txId);
try {
if ((txId == SyncInfo.INVALID_TXID)
&& ServerConfiguration.getBoolean(PropertyKey.MASTER_ACTIVE_UFS_SYNC_INITIAL_SYNC)) {
mExecutorService.submit(
() -> mFilterMap.get(mountId).parallelStream().forEach(
syncPoint -> {
try {
RetryUtils.retry("active sync during start",
() -> mFileSystemMaster.activeSyncMetadata(syncPoint,
null, getExecutor()),
RetryUtils.defaultActiveSyncClientRetry(ServerConfiguration
.getMs(PropertyKey.MASTER_ACTIVE_UFS_POLL_TIMEOUT)));
} catch (IOException e) {
LOG.warn("IOException encountered during active sync while starting {}", e);
}
}
)).get();
}
} catch (Exception e) {
LOG.warn("exception encountered during initial sync {}", e);
}
}
}
/**
* Launches polling thread on a particular mount point with starting txId.
*
* @param mountId launch polling thread on a mount id
* @param txId specifies the transaction id to initialize the pollling thread
*/
public void launchPollingThread(long mountId, long txId) {
LOG.debug("launch polling thread for mount id {}, txId {}", mountId, txId);
if (!mPollerMap.containsKey(mountId)) {
try (CloseableResource<UnderFileSystem> ufsClient =
mMountTable.getUfsClient(mountId).acquireUfsResource()) {
ufsClient.get().startActiveSyncPolling(txId);
} catch (IOException e) {
LOG.warn("IO Exception trying to launch Polling thread {}", e);
}
ActiveSyncer syncer = new ActiveSyncer(mFileSystemMaster, this, mMountTable, mountId);
Future<?> future = getExecutor().submit(
new HeartbeatThread(HeartbeatContext.MASTER_ACTIVE_UFS_SYNC,
syncer, (int) ServerConfiguration.getMs(PropertyKey.MASTER_ACTIVE_UFS_SYNC_INTERVAL),
ServerConfiguration.global()));
mPollerMap.put(mountId, future);
}
}
/**
* Apply AddSyncPoint entry and journal the entry.
* @param context journal context
* @param entry addSyncPoint entry
*/
public void applyAndJournal(Supplier<JournalContext> context, AddSyncPointEntry entry) {
try {
apply(entry);
context.get().append(Journal.JournalEntry.newBuilder().setAddSyncPoint(entry).build());
} catch (Throwable t) {
ProcessUtils.fatalError(LOG, t, "Failed to apply %s", entry);
throw t; // fatalError will usually system.exit
}
}
/**
* Apply removeSyncPoint entry and journal the entry.
* @param context journal context
* @param entry removeSyncPoint entry
*/
public void applyAndJournal(Supplier<JournalContext> context, RemoveSyncPointEntry entry) {
try {
apply(entry);
context.get().append(Journal.JournalEntry.newBuilder().setRemoveSyncPoint(entry).build());
} catch (Throwable t) {
ProcessUtils.fatalError(LOG, t, "Failed to apply %s", entry);
throw t; // fatalError will usually system.exit
}
}
/**
* stop active sync on a mount id.
*
* @param mountId mountId to stop active sync
*/
public void stopSyncForMount(long mountId) throws InvalidPathException, IOException {
LOG.debug("Stop sync for mount id {}", mountId);
if (mFilterMap.containsKey(mountId)) {
List<Pair<AlluxioURI, MountTable.Resolution>> toBeDeleted = new ArrayList<>();
for (AlluxioURI uri : mFilterMap.get(mountId)) {
MountTable.Resolution resolution = resolveSyncPoint(uri);
if (resolution != null) {
toBeDeleted.add(new Pair<>(uri, resolution));
}
}
// Calling stopSyncInternal outside of the traversal of mFilterMap.get(mountId) to avoid
// ConcurrentModificationException
for (Pair<AlluxioURI, MountTable.Resolution> deleteInfo : toBeDeleted) {
stopSyncInternal(deleteInfo.getFirst(), deleteInfo.getSecond());
}
}
}
/**
* Perform various checks of stopping a sync point.
*
* @param syncPoint sync point to stop
* @return the path resolution result if successfully passed all checks
*/
public MountTable.Resolution resolveSyncPoint(AlluxioURI syncPoint) throws InvalidPathException {
if (!mSyncPathList.contains(syncPoint)) {
LOG.debug("syncPoint not found {}", syncPoint.getPath());
return null;
}
MountTable.Resolution resolution = mMountTable.resolve(syncPoint);
return resolution;
}
/**
* stop active sync on a URI.
*
* @param syncPoint sync point to be stopped
* @param resolution path resolution for the sync point
*/
public void stopSyncInternal(AlluxioURI syncPoint, MountTable.Resolution resolution) {
try (LockResource r = new LockResource(mSyncManagerLock)) {
LOG.debug("stop syncPoint {}", syncPoint.getPath());
RemoveSyncPointEntry removeSyncPoint = File.RemoveSyncPointEntry.newBuilder()
.setSyncpointPath(syncPoint.toString())
.setMountId(resolution.getMountId())
.build();
apply(removeSyncPoint);
try {
stopSyncPostJournal(syncPoint);
} catch (Throwable e) {
// revert state;
AddSyncPointEntry addSyncPoint =
File.AddSyncPointEntry.newBuilder()
.setSyncpointPath(syncPoint.toString()).build();
apply(addSyncPoint);
recoverFromStopSync(syncPoint, resolution.getMountId());
}
}
}
/**
* Get the filter list associated with mount Id.
*
* @param mountId mountId
* @return a list of URIs (sync points) associated with that mount id
*/
public List<AlluxioURI> getFilterList(long mountId) {
return mFilterMap.get(mountId);
}
/**
* Get the sync point list.
*
* @return a list of URIs (sync points)
*/
public List<SyncPointInfo> getSyncPathList() {
List<SyncPointInfo> returnList = new ArrayList<>();
for (AlluxioURI uri: mSyncPathList) {
SyncPointInfo.SyncStatus status;
Future<?> syncStatus = mSyncPathStatus.get(uri);
if (syncStatus == null) {
status = SyncPointInfo.SyncStatus.NOT_INITIALLY_SYNCED;
} else if (syncStatus.isDone()) {
status = SyncPointInfo.SyncStatus.INITIALLY_SYNCED;
} else {
status = SyncPointInfo.SyncStatus.SYNCING;
}
returnList.add(new SyncPointInfo(uri, status));
}
return returnList;
}
private Iterator<Journal.JournalEntry> getSyncPathIterator() {
final Iterator<AlluxioURI> it = mSyncPathList.iterator();
return new Iterator<Journal.JournalEntry>() {
private AlluxioURI mEntry = null;
@Override
public boolean hasNext() {
if (mEntry != null) {
return true;
}
if (it.hasNext()) {
mEntry = it.next();
return true;
}
return false;
}
@Override
public Journal.JournalEntry next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
String syncPointPath = mEntry.getPath();
long mountId = -1;
while (mountId == -1) {
try {
syncPointPath = mEntry.getPath();
MountTable.Resolution resolution = mMountTable.resolve(mEntry);
mountId = resolution.getMountId();
} catch (InvalidPathException e) {
LOG.info("Path resolution failed for {}, exception {}", syncPointPath, e);
mEntry = null;
if (!hasNext()) {
throw new NoSuchElementException();
}
}
}
mEntry = null;
File.AddSyncPointEntry addSyncPointEntry =
File.AddSyncPointEntry.newBuilder()
.setSyncpointPath(syncPointPath)
.setMountId(mountId)
.build();
return Journal.JournalEntry.newBuilder().setAddSyncPoint(addSyncPointEntry).build();
}
@Override
public void remove() {
throw new UnsupportedOperationException(
"ActiveSyncManager#Iterator#remove is not supported.");
}
};
}
private void apply(RemoveSyncPointEntry removeSyncPoint) {
AlluxioURI syncPoint = new AlluxioURI(removeSyncPoint.getSyncpointPath());
long mountId = removeSyncPoint.getMountId();
try (LockResource r = new LockResource(mSyncManagerLock)) {
LOG.debug("stop syncPoint {}", syncPoint.getPath());
if (mFilterMap.containsKey(mountId)) {
List list = mFilterMap.get(mountId);
if (list != null) {
list.remove(syncPoint);
}
mSyncPathList.remove(syncPoint);
} else {
mSyncPathList.remove(syncPoint);
// We should not be in this situation
throw new RuntimeException(
String.format("mountId for the syncPoint %s not found in the filterMap",
syncPoint.toString()));
}
}
}
private void apply(AddSyncPointEntry addSyncPoint) {
AlluxioURI syncPoint = new AlluxioURI(addSyncPoint.getSyncpointPath());
long mountId = addSyncPoint.getMountId();
LOG.debug("adding syncPoint {}, mount id {}", syncPoint.getPath(), mountId);
// Add the new sync point to the filter map
if (mFilterMap.containsKey(mountId)) {
mFilterMap.get(mountId).add(syncPoint);
} else {
ArrayList<AlluxioURI> list = new ArrayList<>();
list.add(syncPoint);
mFilterMap.put(mountId, list);
}
// Add to the sync point list
mSyncPathList.add(syncPoint);
}
/**
* Clean up tasks to stop sync point after we have journaled.
*
* @param syncPoint the sync point to stop
* @throws InvalidPathException
*/
public void stopSyncPostJournal(AlluxioURI syncPoint) throws InvalidPathException {
MountTable.Resolution resolution = mMountTable.resolve(syncPoint);
long mountId = resolution.getMountId();
// Remove initial sync thread
Future<?> syncFuture = mSyncPathStatus.remove(syncPoint);
if (syncFuture != null) {
syncFuture.cancel(true);
}
if (mFilterMap.get(mountId).isEmpty()) {
// syncPoint removed was the last syncPoint for the mountId
mFilterMap.remove(mountId);
Future<?> future = mPollerMap.remove(mountId);
if (future != null) {
future.cancel(true);
}
}
// Tell UFS to stop monitoring the path
try (CloseableResource<UnderFileSystem> ufs = resolution.acquireUfsResource()) {
ufs.get().stopSync(resolution.getUri());
} catch (IOException e) {
LOG.info("Ufs IOException for uri {}, exception is {}", syncPoint, e);
}
// Stop active sync polling on a particular UFS if it is the last sync point
if (mFilterMap.containsKey(mountId) && mFilterMap.get(mountId).isEmpty()) {
try (CloseableResource<UnderFileSystem> ufs = resolution.acquireUfsResource()) {
ufs.get().stopActiveSyncPolling();
} catch (IOException e) {
LOG.warn("Encountered IOException when trying to stop polling thread {}", e);
}
}
}
private Iterator<Journal.JournalEntry> getTxIdIterator() {
final Iterator<Map.Entry<Long, Long>> it = mStartingTxIdMap.entrySet().iterator();
return new Iterator<Journal.JournalEntry>() {
private Map.Entry<Long, Long> mEntry = null;
@Override
public boolean hasNext() {
if (mEntry != null) {
return true;
}
if (it.hasNext()) {
mEntry = it.next();
return true;
}
return false;
}
@Override
public Journal.JournalEntry next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
long mountId = mEntry.getKey();
long txId = mEntry.getValue();
mEntry = null;
File.ActiveSyncTxIdEntry txIdEntry =
File.ActiveSyncTxIdEntry.newBuilder().setMountId(mountId)
.setTxId(txId).build();
return Journal.JournalEntry.newBuilder().setActiveSyncTxId(txIdEntry).build();
}
@Override
public void remove() {
throw new UnsupportedOperationException(
"ActiveSyncManager#Iterator#remove is not supported.");
}
};
}
/**
* set the transaction id for a particular mountId.
*
* @param mountId mount id
* @param txId transaction id
*/
public void setTxId(long mountId, long txId) {
mStartingTxIdMap.put(mountId, txId);
}
/**
* Get SyncManager Executor.
*
* @return an executor for active syncing
*/
public ExecutorService getExecutor() {
return mExecutorService;
}
/**
* Stops the sync manager and any outstanding threads, does not change the sync points.
* This stops four things in the following order.
* 1. Stop any outstanding initial sync futures for the sync points. (syncFuture.cancel)
* 2. Stop the heartbeat thread that periodically wakes up to process events that have been
* recorded for the past heartbeat interval.
* 3. Tell the polling thread to stop monitoring the path for events
* 4. Stop the thread that is polling HDFS for events
*/
public void stop() {
for (AlluxioURI syncPoint : mSyncPathList) {
MountTable.Resolution resolution = null;
try {
resolution = mMountTable.resolve(syncPoint);
} catch (InvalidPathException e) {
LOG.warn("stop: InvalidPathException resolving syncPoint {}, exception {}",
syncPoint, e);
}
long mountId = resolution.getMountId();
// Remove initial sync thread
Future<?> syncFuture = mSyncPathStatus.remove(syncPoint);
if (syncFuture != null) {
syncFuture.cancel(true);
}
Future<?> future = mPollerMap.remove(mountId);
if (future != null) {
future.cancel(true);
}
// Tell UFS to stop monitoring the path
try (CloseableResource<UnderFileSystem> ufs = resolution.acquireUfsResource()) {
ufs.get().stopSync(resolution.getUri());
} catch (IOException e) {
LOG.warn("Ufs IOException for uri {}, exception is {}", syncPoint, e);
}
try (CloseableResource<UnderFileSystem> ufs = resolution.acquireUfsResource()) {
ufs.get().stopActiveSyncPolling();
} catch (IOException e) {
LOG.warn("Encountered IOException when trying to stop polling thread {}", e);
}
}
}
private void startInitSync(AlluxioURI uri, MountTable.Resolution resolution) {
try (CloseableResource<UnderFileSystem> ufsResource = resolution.acquireUfsResource()) {
Future<?> syncFuture = mExecutorService.submit(
() -> {
try {
// Notify ufs polling thread to keep track of events related to specified uri
ufsResource.get().startSync(resolution.getUri());
// Start the initial metadata sync between the ufs and alluxio for the specified uri
if (ServerConfiguration.getBoolean(PropertyKey.MASTER_ACTIVE_UFS_SYNC_INITIAL_SYNC)) {
mFileSystemMaster.activeSyncMetadata(uri, null, getExecutor());
}
} catch (IOException e) {
LOG.info(ExceptionMessage.FAILED_INITIAL_SYNC.getMessage(
resolution.getUri()), e);
}
});
mSyncPathStatus.put(uri, syncFuture);
}
}
/**
* Continue to start sync after we have journaled the operation.
*
* @param uri the sync point that we are trying to start
*/
public void startSyncPostJournal(AlluxioURI uri) throws InvalidPathException {
MountTable.Resolution resolution = mMountTable.resolve(uri);
startInitSync(uri, resolution);
launchPollingThread(resolution.getMountId(), SyncInfo.INVALID_TXID);
}
/**
* Recover from a stop sync operation.
*
* @param uri uri to stop sync
* @param mountId mount id of the uri
*/
public void recoverFromStopSync(AlluxioURI uri, long mountId) {
if (mSyncPathStatus.containsKey(uri)) {
// nothing to recover from, since the syncPathStatus still contains syncPoint
return;
}
try {
// the init sync thread has been removed, to reestablish sync, we need to sync again
MountTable.Resolution resolution = mMountTable.resolve(uri);
startInitSync(uri, resolution);
launchPollingThread(resolution.getMountId(), SyncInfo.INVALID_TXID);
} catch (Throwable t) {
LOG.warn("Recovering from stop syncing failed {}", t);
}
}
/**
* Recover from start sync operation.
*
* @param uri uri to start sync
* @param mountId mount id of the uri
*/
public void recoverFromStartSync(AlluxioURI uri, long mountId) {
// if the init sync has been launched, we need to stop it
if (mSyncPathStatus.containsKey(uri)) {
Future<?> syncFuture = mSyncPathStatus.remove(uri);
if (syncFuture != null) {
syncFuture.cancel(true);
}
}
// if the polling thread has been launched, we need to stop it
mFilterMap.remove(mountId);
Future<?> future = mPollerMap.remove(mountId);
if (future != null) {
future.cancel(true);
}
}
@Override
public boolean processJournalEntry(JournalEntry entry) {
if (entry.hasAddSyncPoint()) {
apply(entry.getAddSyncPoint());
return true;
} else if (entry.hasRemoveSyncPoint()) {
apply(entry.getRemoveSyncPoint());
return true;
} else if (entry.hasActiveSyncTxId()) {
File.ActiveSyncTxIdEntry activeSyncTxId = entry.getActiveSyncTxId();
setTxId(activeSyncTxId.getMountId(), activeSyncTxId.getTxId());
return true;
}
return false;
}
/**
* {@inheritDoc}
*
* It clears all sync points, and stops the polling thread.
*/
@Override
public void resetState() {
for (long mountId : mFilterMap.keySet()) {
try {
// stops sync point under this mount point. Note this clears the sync point and
// stops associated polling threads.
stopSyncForMount(mountId);
} catch (IOException | InvalidPathException e) {
LOG.info("Exception resetting mountId {}, exception: {}", mountId, e);
}
}
}
@Override
public CheckpointName getCheckpointName() {
return CheckpointName.ACTIVE_SYNC_MANAGER;
}
@Override
public Iterator<JournalEntry> getJournalEntryIterator() {
return Iterators.concat(getSyncPathIterator(), getTxIdIterator());
}
}
| core/server/master/src/main/java/alluxio/master/file/activesync/ActiveSyncManager.java | /*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master.file.activesync;
import alluxio.AlluxioURI;
import alluxio.ProcessUtils;
import alluxio.SyncInfo;
import alluxio.conf.PropertyKey;
import alluxio.conf.ServerConfiguration;
import alluxio.collections.Pair;
import alluxio.exception.ExceptionMessage;
import alluxio.exception.InvalidPathException;
import alluxio.heartbeat.HeartbeatContext;
import alluxio.heartbeat.HeartbeatThread;
import alluxio.master.file.FileSystemMaster;
import alluxio.master.file.meta.MountTable;
import alluxio.master.journal.checkpoint.CheckpointName;
import alluxio.master.journal.JournalContext;
import alluxio.master.journal.Journaled;
import alluxio.proto.journal.File;
import alluxio.proto.journal.File.AddSyncPointEntry;
import alluxio.proto.journal.File.RemoveSyncPointEntry;
import alluxio.proto.journal.Journal;
import alluxio.proto.journal.Journal.JournalEntry;
import alluxio.resource.CloseableResource;
import alluxio.resource.LockResource;
import alluxio.retry.RetryUtils;
import alluxio.underfs.UnderFileSystem;
import alluxio.util.io.PathUtils;
import alluxio.wire.SyncPointInfo;
import com.google.common.collect.Iterators;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Supplier;
import javax.annotation.concurrent.NotThreadSafe;
/**
* Manager for the Active UFS sync process.
* There are several threads cooperating to make the active sync process happen.
* 1. An active polling thread that polls HDFS for change events and aggregates these events.
* 2. A heartbeat thread that wakes up periodically to consume the aggregated events, and perform
* syncing if necessary.
* 3. For initial syncing, we launch a future to perform initial syncing asynchronously. This is
* stored in mSyncPathStatus.
*/
@NotThreadSafe
public class ActiveSyncManager implements Journaled {
private static final Logger LOG = LoggerFactory.getLogger(ActiveSyncManager.class);
// a reference to the mount table
private final MountTable mMountTable;
// a list of sync points
private final List<AlluxioURI> mSyncPathList;
// a map which maps mount id to a thread polling that UFS
private final Map<Long, Future<?>> mPollerMap;
// a map which maps each mount id to a list of paths being actively synced on mountpoint
private final Map<Long, List<AlluxioURI>> mFilterMap;
// a map which maps mount id to the latest txid synced on that mount point
private final Map<Long, Long> mStartingTxIdMap;
// Future.isDone = INITIALLY_SYNCED, !Future.isDone = SYNCING
// Future == null => NOT_INITIALLY_SYNCED
private final Map<AlluxioURI, Future<?>> mSyncPathStatus;
// a lock which protects the above data structures
private final Lock mSyncManagerLock;
// a reference to FSM
private FileSystemMaster mFileSystemMaster;
// a local executor service used to launch polling threads
private ExecutorService mExecutorService;
/**
* Constructs a Active Sync Manager.
*
* @param mountTable mount table
* @param fileSystemMaster file system master
*/
public ActiveSyncManager(MountTable mountTable, FileSystemMaster fileSystemMaster) {
mMountTable = mountTable;
mPollerMap = new ConcurrentHashMap<>();
mFilterMap = new ConcurrentHashMap<>();
mStartingTxIdMap = new ConcurrentHashMap<>();
mSyncPathList = new CopyOnWriteArrayList<>();
mFileSystemMaster = fileSystemMaster;
mSyncPathStatus = new ConcurrentHashMap<>();
// A lock used to protect the state stored in the above maps and lists
mSyncManagerLock = new ReentrantLock();
// Executor Service for active syncing
mExecutorService =
Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
}
/**
* Check if a URI is actively synced.
*
* @param path path to check
* @return true if a URI is being actively synced
*/
public boolean isActivelySynced(AlluxioURI path) {
for (AlluxioURI syncedPath : mSyncPathList) {
try {
if (PathUtils.hasPrefix(path.getPath(), syncedPath.getPath())) {
return true;
}
} catch (InvalidPathException e) {
return false;
}
}
return false;
}
/**
* Gets the lock protecting the syncManager.
*
* @return syncmanager lock
*/
public Lock getSyncManagerLock() {
return mSyncManagerLock;
}
/**
* start the polling threads.
*
*/
public void start() throws IOException {
// Initialize UFS states
for (AlluxioURI syncPoint : mSyncPathList) {
MountTable.Resolution resolution = null;
long mountId = 0;
try {
resolution = mMountTable.resolve(syncPoint);
mountId = resolution.getMountId();
} catch (InvalidPathException e) {
LOG.info("Invalid Path encountered during start up of ActiveSyncManager, "
+ "path {}, exception {}", syncPoint, e);
continue;
}
try (CloseableResource<UnderFileSystem> ufsResource = resolution.acquireUfsResource()) {
if (!ufsResource.get().supportsActiveSync()) {
throw new UnsupportedOperationException("Active Sync is not supported on this UFS type: "
+ ufsResource.get().getUnderFSType());
}
ufsResource.get().startSync(resolution.getUri());
}
}
// attempt to restart from a past txid, if this fails, it will result in MissingEventException
// therefore forces a sync
for (long mountId: mFilterMap.keySet()) {
long txId = mStartingTxIdMap.containsKey(mountId)
? mStartingTxIdMap.get(mountId) : SyncInfo.INVALID_TXID;
launchPollingThread(mountId, txId);
try {
if ((txId == SyncInfo.INVALID_TXID)
&& ServerConfiguration.getBoolean(PropertyKey.MASTER_ACTIVE_UFS_SYNC_INITIAL_SYNC)) {
mExecutorService.submit(
() -> mFilterMap.get(mountId).parallelStream().forEach(
syncPoint -> {
try {
RetryUtils.retry("active sync during start",
() -> mFileSystemMaster.activeSyncMetadata(syncPoint,
null, getExecutor()),
RetryUtils.defaultActiveSyncClientRetry(ServerConfiguration
.getMs(PropertyKey.MASTER_ACTIVE_UFS_POLL_TIMEOUT)));
} catch (IOException e) {
LOG.warn("IOException encountered during active sync while starting {}", e);
}
}
)).get();
}
} catch (Exception e) {
LOG.warn("exception encountered during initial sync {}", e);
}
}
}
/**
* Launches polling thread on a particular mount point with starting txId.
*
* @param mountId launch polling thread on a mount id
* @param txId specifies the transaction id to initialize the pollling thread
*/
public void launchPollingThread(long mountId, long txId) {
LOG.debug("launch polling thread for mount id {}, txId {}", mountId, txId);
if (!mPollerMap.containsKey(mountId)) {
try (CloseableResource<UnderFileSystem> ufsClient =
mMountTable.getUfsClient(mountId).acquireUfsResource()) {
ufsClient.get().startActiveSyncPolling(txId);
} catch (IOException e) {
LOG.warn("IO Exception trying to launch Polling thread {}", e);
}
ActiveSyncer syncer = new ActiveSyncer(mFileSystemMaster, this, mMountTable, mountId);
Future<?> future = getExecutor().submit(
new HeartbeatThread(HeartbeatContext.MASTER_ACTIVE_UFS_SYNC,
syncer, (int) ServerConfiguration.getMs(PropertyKey.MASTER_ACTIVE_UFS_SYNC_INTERVAL),
ServerConfiguration.global()));
mPollerMap.put(mountId, future);
}
}
/**
* Apply AddSyncPoint entry and journal the entry.
* @param context journal context
* @param entry addSyncPoint entry
*/
public void applyAndJournal(Supplier<JournalContext> context, AddSyncPointEntry entry) {
try {
apply(entry);
context.get().append(Journal.JournalEntry.newBuilder().setAddSyncPoint(entry).build());
} catch (Throwable t) {
ProcessUtils.fatalError(LOG, t, "Failed to apply %s", entry);
throw t; // fatalError will usually system.exit
}
}
/**
* Apply removeSyncPoint entry and journal the entry.
* @param context journal context
* @param entry removeSyncPoint entry
*/
public void applyAndJournal(Supplier<JournalContext> context, RemoveSyncPointEntry entry) {
try {
apply(entry);
context.get().append(Journal.JournalEntry.newBuilder().setRemoveSyncPoint(entry).build());
} catch (Throwable t) {
ProcessUtils.fatalError(LOG, t, "Failed to apply %s", entry);
throw t; // fatalError will usually system.exit
}
}
/**
* stop active sync on a mount id.
*
* @param mountId mountId to stop active sync
*/
public void stopSyncForMount(long mountId) throws InvalidPathException, IOException {
LOG.debug("Stop sync for mount id {}", mountId);
if (mFilterMap.containsKey(mountId)) {
List<Pair<AlluxioURI, MountTable.Resolution>> toBeDeleted = new ArrayList<>();
for (AlluxioURI uri : mFilterMap.get(mountId)) {
MountTable.Resolution resolution = resolveSyncPoint(uri);
if (resolution != null) {
toBeDeleted.add(new Pair<>(uri, resolution));
}
}
// Calling stopSyncInternal outside of the traversal of mFilterMap.get(mountId) to avoid
// ConcurrentModificationException
for (Pair<AlluxioURI, MountTable.Resolution> deleteInfo : toBeDeleted) {
stopSyncInternal(deleteInfo.getFirst(), deleteInfo.getSecond());
}
}
}
/**
* Perform various checks of stopping a sync point.
*
* @param syncPoint sync point to stop
* @return the path resolution result if successfully passed all checks
*/
public MountTable.Resolution resolveSyncPoint(AlluxioURI syncPoint) throws InvalidPathException {
if (!mSyncPathList.contains(syncPoint)) {
LOG.debug("syncPoint not found {}", syncPoint.getPath());
return null;
}
MountTable.Resolution resolution = mMountTable.resolve(syncPoint);
return resolution;
}
/**
* stop active sync on a URI.
*
* @param syncPoint sync point to be stopped
* @param resolution path resolution for the sync point
*/
public void stopSyncInternal(AlluxioURI syncPoint, MountTable.Resolution resolution) {
try (LockResource r = new LockResource(mSyncManagerLock)) {
LOG.debug("stop syncPoint {}", syncPoint.getPath());
RemoveSyncPointEntry removeSyncPoint = File.RemoveSyncPointEntry.newBuilder()
.setSyncpointPath(syncPoint.toString())
.setMountId(resolution.getMountId())
.build();
apply(removeSyncPoint);
try {
stopSyncPostJournal(syncPoint);
} catch (Throwable e) {
// revert state;
AddSyncPointEntry addSyncPoint =
File.AddSyncPointEntry.newBuilder()
.setSyncpointPath(syncPoint.toString()).build();
apply(addSyncPoint);
recoverFromStopSync(syncPoint, resolution.getMountId());
}
}
}
/**
* Get the filter list associated with mount Id.
*
* @param mountId mountId
* @return a list of URIs (sync points) associated with that mount id
*/
public List<AlluxioURI> getFilterList(long mountId) {
return mFilterMap.get(mountId);
}
/**
* Get the sync point list.
*
* @return a list of URIs (sync points)
*/
public List<SyncPointInfo> getSyncPathList() {
List<SyncPointInfo> returnList = new ArrayList<>();
for (AlluxioURI uri: mSyncPathList) {
SyncPointInfo.SyncStatus status;
Future<?> syncStatus = mSyncPathStatus.get(uri);
if (syncStatus == null) {
status = SyncPointInfo.SyncStatus.NOT_INITIALLY_SYNCED;
} else if (syncStatus.isDone()) {
status = SyncPointInfo.SyncStatus.INITIALLY_SYNCED;
} else {
status = SyncPointInfo.SyncStatus.SYNCING;
}
returnList.add(new SyncPointInfo(uri, status));
}
return returnList;
}
private Iterator<Journal.JournalEntry> getSyncPathIterator() {
final Iterator<AlluxioURI> it = mSyncPathList.iterator();
return new Iterator<Journal.JournalEntry>() {
private AlluxioURI mEntry = null;
@Override
public boolean hasNext() {
if (mEntry != null) {
return true;
}
if (it.hasNext()) {
mEntry = it.next();
return true;
}
return false;
}
@Override
public Journal.JournalEntry next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
String syncPointPath = mEntry.getPath();
long mountId = -1;
while (mountId == -1) {
try {
syncPointPath = mEntry.getPath();
MountTable.Resolution resolution = mMountTable.resolve(mEntry);
mountId = resolution.getMountId();
} catch (InvalidPathException e) {
LOG.info("Path resolution failed for {}, exception {}", syncPointPath, e);
mEntry = null;
if (!hasNext()) {
throw new NoSuchElementException();
}
}
}
mEntry = null;
File.AddSyncPointEntry addSyncPointEntry =
File.AddSyncPointEntry.newBuilder()
.setSyncpointPath(syncPointPath)
.setMountId(mountId)
.build();
return Journal.JournalEntry.newBuilder().setAddSyncPoint(addSyncPointEntry).build();
}
@Override
public void remove() {
throw new UnsupportedOperationException(
"ActiveSyncManager#Iterator#remove is not supported.");
}
};
}
private void apply(RemoveSyncPointEntry removeSyncPoint) {
AlluxioURI syncPoint = new AlluxioURI(removeSyncPoint.getSyncpointPath());
long mountId = removeSyncPoint.getMountId();
try (LockResource r = new LockResource(mSyncManagerLock)) {
LOG.debug("stop syncPoint {}", syncPoint.getPath());
if (mFilterMap.containsKey(mountId)) {
List list = mFilterMap.get(mountId);
if (list != null) {
list.remove(syncPoint);
}
mSyncPathList.remove(syncPoint);
} else {
mSyncPathList.remove(syncPoint);
// We should not be in this situation
throw new RuntimeException(
String.format("mountId for the syncPoint %s not found in the filterMap",
syncPoint.toString()));
}
}
}
private void apply(AddSyncPointEntry addSyncPoint) {
AlluxioURI syncPoint = new AlluxioURI(addSyncPoint.getSyncpointPath());
long mountId = addSyncPoint.getMountId();
LOG.debug("adding syncPoint {}, mount id {}", syncPoint.getPath(), mountId);
// Add the new sync point to the filter map
if (mFilterMap.containsKey(mountId)) {
mFilterMap.get(mountId).add(syncPoint);
} else {
ArrayList<AlluxioURI> list = new ArrayList<>();
list.add(syncPoint);
mFilterMap.put(mountId, list);
}
// Add to the sync point list
mSyncPathList.add(syncPoint);
}
/**
* Clean up tasks to stop sync point after we have journaled.
*
* @param syncPoint the sync point to stop
* @throws InvalidPathException
*/
public void stopSyncPostJournal(AlluxioURI syncPoint) throws InvalidPathException {
MountTable.Resolution resolution = mMountTable.resolve(syncPoint);
long mountId = resolution.getMountId();
// Remove initial sync thread
Future<?> syncFuture = mSyncPathStatus.remove(syncPoint);
if (syncFuture != null) {
syncFuture.cancel(true);
}
if (mFilterMap.get(mountId).isEmpty()) {
// syncPoint removed was the last syncPoint for the mountId
mFilterMap.remove(mountId);
Future<?> future = mPollerMap.remove(mountId);
if (future != null) {
future.cancel(true);
}
}
// Tell UFS to stop monitoring the path
try (CloseableResource<UnderFileSystem> ufs = resolution.acquireUfsResource()) {
ufs.get().stopSync(resolution.getUri());
} catch (IOException e) {
LOG.info("Ufs IOException for uri {}, exception is {}", syncPoint, e);
}
// Stop active sync polling on a particular UFS if it is the last sync point
if (mFilterMap.containsKey(mountId) && mFilterMap.get(mountId).isEmpty()) {
try (CloseableResource<UnderFileSystem> ufs = resolution.acquireUfsResource()) {
ufs.get().stopActiveSyncPolling();
} catch (IOException e) {
LOG.warn("Encountered IOException when trying to stop polling thread {}", e);
}
}
}
private Iterator<Journal.JournalEntry> getTxIdIterator() {
final Iterator<Map.Entry<Long, Long>> it = mStartingTxIdMap.entrySet().iterator();
return new Iterator<Journal.JournalEntry>() {
private Map.Entry<Long, Long> mEntry = null;
@Override
public boolean hasNext() {
if (mEntry != null) {
return true;
}
if (it.hasNext()) {
mEntry = it.next();
return true;
}
return false;
}
@Override
public Journal.JournalEntry next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
long mountId = mEntry.getKey();
long txId = mEntry.getValue();
mEntry = null;
File.ActiveSyncTxIdEntry txIdEntry =
File.ActiveSyncTxIdEntry.newBuilder().setMountId(mountId)
.setTxId(txId).build();
return Journal.JournalEntry.newBuilder().setActiveSyncTxId(txIdEntry).build();
}
@Override
public void remove() {
throw new UnsupportedOperationException(
"ActiveSyncManager#Iterator#remove is not supported.");
}
};
}
/**
* set the transaction id for a particular mountId.
*
* @param mountId mount id
* @param txId transaction id
*/
public void setTxId(long mountId, long txId) {
mStartingTxIdMap.put(mountId, txId);
}
/**
* Get SyncManager Executor.
*
* @return an executor for active syncing
*/
public ExecutorService getExecutor() {
return mExecutorService;
}
/**
* Stops the sync manager and any outstanding threads, does not change the sync points.
* This stops four things in the following order.
* 1. Stop any outstanding initial sync futures for the sync points. (syncFuture.cancel)
* 2. Stop the heartbeat thread that periodically wakes up to process events that have been
* recorded for the past heartbeat interval.
* 3. Tell the polling thread to stop monitoring the path for events
* 4. Stop the thread that is polling HDFS for events
*/
public void stop() {
for (AlluxioURI syncPoint : mSyncPathList) {
MountTable.Resolution resolution = null;
try {
resolution = mMountTable.resolve(syncPoint);
} catch (InvalidPathException e) {
LOG.warn("stop: InvalidPathException resolving syncPoint {}, exception {}",
syncPoint, e);
}
long mountId = resolution.getMountId();
// Remove initial sync thread
Future<?> syncFuture = mSyncPathStatus.remove(syncPoint);
if (syncFuture != null) {
syncFuture.cancel(true);
}
Future<?> future = mPollerMap.remove(mountId);
if (future != null) {
future.cancel(true);
}
// Tell UFS to stop monitoring the path
try (CloseableResource<UnderFileSystem> ufs = resolution.acquireUfsResource()) {
ufs.get().stopSync(resolution.getUri());
} catch (IOException e) {
LOG.warn("Ufs IOException for uri {}, exception is {}", syncPoint, e);
}
try (CloseableResource<UnderFileSystem> ufs = resolution.acquireUfsResource()) {
ufs.get().stopActiveSyncPolling();
} catch (IOException e) {
LOG.warn("Encountered IOException when trying to stop polling thread {}", e);
}
}
}
private void startInitSync(AlluxioURI uri, MountTable.Resolution resolution) {
try (CloseableResource<UnderFileSystem> ufsResource = resolution.acquireUfsResource()) {
Future<?> syncFuture = mExecutorService.submit(
() -> {
try {
// Notify ufs polling thread to keep track of events related to specified uri
ufsResource.get().startSync(resolution.getUri());
// Start the initial metadata sync between the ufs and alluxio for the specified uri
if (ServerConfiguration.getBoolean(PropertyKey.MASTER_ACTIVE_UFS_SYNC_INITIAL_SYNC)) {
mFileSystemMaster.activeSyncMetadata(uri, null, getExecutor());
}
} catch (IOException e) {
LOG.info(ExceptionMessage.FAILED_INITIAL_SYNC.getMessage(
resolution.getUri()), e);
}
});
mSyncPathStatus.put(uri, syncFuture);
}
}
/**
* Continue to start sync after we have journaled the operation.
*
* @param uri the sync point that we are trying to start
*/
public void startSyncPostJournal(AlluxioURI uri) throws InvalidPathException {
MountTable.Resolution resolution = mMountTable.resolve(uri);
startInitSync(uri, resolution);
launchPollingThread(resolution.getMountId(), SyncInfo.INVALID_TXID);
}
/**
* Recover from a stop sync operation.
*
* @param uri uri to stop sync
* @param mountId mount id of the uri
*/
public void recoverFromStopSync(AlluxioURI uri, long mountId) {
if (mSyncPathStatus.containsKey(uri)) {
// nothing to recover from, since the syncPathStatus still contains syncPoint
return;
}
try {
// the init sync thread has been removed, to reestablish sync, we need to sync again
MountTable.Resolution resolution = mMountTable.resolve(uri);
startInitSync(uri, resolution);
launchPollingThread(resolution.getMountId(), SyncInfo.INVALID_TXID);
} catch (Throwable t) {
LOG.warn("Recovering from stop syncing failed {}", t);
}
}
/**
* Recover from start sync operation.
*
* @param uri uri to start sync
* @param mountId mount id of the uri
*/
public void recoverFromStartSync(AlluxioURI uri, long mountId) {
// if the init sync has been launched, we need to stop it
if (mSyncPathStatus.containsKey(uri)) {
Future<?> syncFuture = mSyncPathStatus.remove(uri);
if (syncFuture != null) {
syncFuture.cancel(true);
}
}
// if the polling thread has been launched, we need to stop it
mFilterMap.remove(mountId);
Future<?> future = mPollerMap.remove(mountId);
if (future != null) {
future.cancel(true);
}
}
@Override
public boolean processJournalEntry(JournalEntry entry) {
if (entry.hasAddSyncPoint()) {
apply(entry.getAddSyncPoint());
return true;
} else if (entry.hasRemoveSyncPoint()) {
apply(entry.getRemoveSyncPoint());
return true;
} else if (entry.hasActiveSyncTxId()) {
File.ActiveSyncTxIdEntry activeSyncTxId = entry.getActiveSyncTxId();
setTxId(activeSyncTxId.getMountId(), activeSyncTxId.getTxId());
return true;
}
return false;
}
/**
* {@inheritDoc}
*
* It clears all sync points, and stops the polling thread.
*/
@Override
public void resetState() {
for (long mountId : mFilterMap.keySet()) {
try {
// stops sync point under this mount point. Note this clears the sync point and
// stops associated polling threads.
stopSyncForMount(mountId);
} catch (IOException | InvalidPathException e) {
LOG.info("Exception resetting mountId {}, exception: {}", mountId, e);
}
}
}
@Override
public CheckpointName getCheckpointName() {
return CheckpointName.ACTIVE_SYNC_MANAGER;
}
@Override
public Iterator<JournalEntry> getJournalEntryIterator() {
return Iterators.concat(getSyncPathIterator(), getTxIdIterator());
}
}
| [SMALLFIX] Replace with Map.getOrDefault
pr-link: Alluxio/alluxio#8718
change-id: cid-ca3fdb0109fbfa705063076d9416392f7aac0874 | core/server/master/src/main/java/alluxio/master/file/activesync/ActiveSyncManager.java | [SMALLFIX] Replace with Map.getOrDefault |
|
Java | apache-2.0 | 094b7711dce069a83b663a3d528dae57a1749912 | 0 | carrchang/vaadin,Scarlethue/vaadin,udayinfy/vaadin,asashour/framework,bmitc/vaadin,jdahlstrom/vaadin.react,jdahlstrom/vaadin.react,cbmeeks/vaadin,Legioth/vaadin,magi42/vaadin,sitexa/vaadin,magi42/vaadin,kironapublic/vaadin,bmitc/vaadin,mstahv/framework,Peppe/vaadin,mstahv/framework,travisfw/vaadin,sitexa/vaadin,magi42/vaadin,udayinfy/vaadin,oalles/vaadin,shahrzadmn/vaadin,kironapublic/vaadin,mstahv/framework,Peppe/vaadin,Darsstar/framework,cbmeeks/vaadin,fireflyc/vaadin,cbmeeks/vaadin,Scarlethue/vaadin,Legioth/vaadin,peterl1084/framework,jdahlstrom/vaadin.react,Scarlethue/vaadin,mittop/vaadin,synes/vaadin,travisfw/vaadin,bmitc/vaadin,Legioth/vaadin,synes/vaadin,fireflyc/vaadin,sitexa/vaadin,Flamenco/vaadin,synes/vaadin,travisfw/vaadin,synes/vaadin,asashour/framework,udayinfy/vaadin,Scarlethue/vaadin,mstahv/framework,oalles/vaadin,magi42/vaadin,Flamenco/vaadin,Darsstar/framework,bmitc/vaadin,udayinfy/vaadin,shahrzadmn/vaadin,asashour/framework,kironapublic/vaadin,Peppe/vaadin,Flamenco/vaadin,Legioth/vaadin,shahrzadmn/vaadin,jdahlstrom/vaadin.react,asashour/framework,cbmeeks/vaadin,mstahv/framework,jdahlstrom/vaadin.react,Darsstar/framework,mittop/vaadin,oalles/vaadin,travisfw/vaadin,magi42/vaadin,Scarlethue/vaadin,travisfw/vaadin,fireflyc/vaadin,fireflyc/vaadin,oalles/vaadin,kironapublic/vaadin,Flamenco/vaadin,peterl1084/framework,mittop/vaadin,udayinfy/vaadin,carrchang/vaadin,sitexa/vaadin,peterl1084/framework,peterl1084/framework,shahrzadmn/vaadin,Darsstar/framework,carrchang/vaadin,Darsstar/framework,asashour/framework,carrchang/vaadin,Peppe/vaadin,sitexa/vaadin,peterl1084/framework,synes/vaadin,mittop/vaadin,Peppe/vaadin,fireflyc/vaadin,oalles/vaadin,shahrzadmn/vaadin,Legioth/vaadin,kironapublic/vaadin | package com.itmill.toolkit.tests;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import com.itmill.toolkit.Application;
import com.itmill.toolkit.terminal.ThemeResource;
import com.itmill.toolkit.terminal.UserError;
import com.itmill.toolkit.ui.Button;
import com.itmill.toolkit.ui.Component;
import com.itmill.toolkit.ui.Label;
import com.itmill.toolkit.ui.Layout;
import com.itmill.toolkit.ui.OrderedLayout;
import com.itmill.toolkit.ui.Panel;
import com.itmill.toolkit.ui.Select;
import com.itmill.toolkit.ui.TextField;
import com.itmill.toolkit.ui.Window;
import com.itmill.toolkit.ui.Button.ClickEvent;
import com.itmill.toolkit.ui.Button.ClickListener;
import com.itmill.toolkit.ui.Layout.AlignmentHandler;
public class TestOrderedLayout extends Application {
String valignName[] = new String[] { "top", "middle", "bottom" };
int valign[] = new int[] { OrderedLayout.ALIGNMENT_TOP,
OrderedLayout.ALIGNMENT_VERTICAL_CENTER,
OrderedLayout.ALIGNMENT_BOTTOM };
Set<OrderedLayout> layouts = new HashSet<OrderedLayout>();
private OrderedLayout layoutContainer;
private int suffix = 0;
public void init() {
Window w = new Window(getClass().getSimpleName());
setMainWindow(w);
// setTheme("tests-tickets");
// GridLayout layout = new OrderedLayout(1, 10);
// w.setLayout(layout);
w.getLayout().addComponent(new Button("Swap", new ClickListener() {
public void buttonClick(ClickEvent event) {
swapLayouts();
}
}));
layoutContainer = new OrderedLayout(OrderedLayout.ORIENTATION_VERTICAL);
createUI(layoutContainer);
w.getLayout().addComponent(layoutContainer);
// swapLayouts();
}
public void swapLayouts() {
OrderedLayout mainLayout = layoutContainer;
int mainOrient = 1 - mainLayout.getOrientation();
mainLayout.setOrientation(mainOrient);
for (OrderedLayout ol : layouts) {
ol.setOrientation(1 - mainOrient);
float h = ol.getHeight();
int hUnit = ol.getHeightUnits();
float w = ol.getWidth();
int wUnit = ol.getWidthUnits();
ol.setWidth(h, hUnit);
ol.setHeight(w, wUnit);
}
}
private void createUI(Layout layout) {
layout
.addComponent(wrapLayout(layout_field_100pct_button_field(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_overfilled(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout
.addComponent(wrapLayout(layout_overfilled_dynamic_height(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
if (true) {
return;
}
layout
.addComponent(wrapLayout(layout_symmetric_fields(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_leftAndRight(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_fixed_filled(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_dynamic(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_labels(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_captions(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout
.addComponent(wrapLayout(layout_captions_fixed_size(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout
.addComponent(wrapLayout(layout_captions_fixed_size_and_relative_size(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout
.addComponent(wrapLayout(layout_captions_fixed_size_and_fixed_size(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout
.addComponent(wrapLayout(layout_add_remove_components(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_pctFilled(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_pctFilled(new OrderedLayout(
OrderedLayout.ORIENTATION_VERTICAL))));
layout.addComponent(wrapLayout(layout_underFilled(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_basic_test(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
}
private Layout wrapLayout(Layout ol) {
Panel p = new Panel(ol);
p.setSizeUndefined();
p.setCaption(ol.getCaption());
ol.setCaption(null);
OrderedLayout l = new OrderedLayout();
l.setSizeUndefined();
l.addComponent(p);
// p.setWidth("600px");
if (ol instanceof OrderedLayout) {
layouts.add((OrderedLayout) ol);
}
return l;
}
/* LAYOUTS */
private Layout layout1() {
OrderedLayout ol = new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL);
ol.setHeight("200px");
ol.setWidth("");
ol.setCaption("Fixed height (200px) and dynamic width");
TextField tf = new TextField("100px high TextField, valign: bottom");
tf.setHeight("100px");
tf.setWidth("");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Select s = new Select("100% high select");
s.setMultiSelect(true);
s.setHeight("100%");
s.setWidth("");
ol.addComponent(s);
s = new Select("200 px high select");
s.setMultiSelect(true);
s.setHeight("200px");
s.setWidth("");
ol.addComponent(s);
// tf = new TextField("100% high TextField, right/bottom");
// tf.setHeight("100%");
// tf.setWidth("");
// ol.addComponent(tf);
// ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
// OrderedLayout.ALIGNMENT_BOTTOM);
// tf = new TextField("100% high, 200px wide TextField");
// tf.setHeight("100%");
// tf.setWidth("200px");
// ol.addComponent(tf);
return ol;
}
private Layout layout2() {
OrderedLayout ol = new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL);
ol.setHeight("70px");
ol.setWidth("");
ol.setCaption("Fixed height (50px) and dynamic width");
TextField tf = new TextField(
"100px high TextField, valign: bottom, should be partly outside");
tf.setHeight("100px");
tf.setWidth("");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField(
"100% high, 50px wide TextField, valign: bottom, should fill full height");
tf.setHeight("100%");
tf.setWidth("50px");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Label l = new Label(
"100% high, 50px wide Label, valign: bottom, does not fill full height, only needed space");
tf.setHeight("100%");
tf.setWidth("50px");
ol.addComponent(l);
ol.setComponentAlignment(l, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Select s = new Select("100% high select, should fit into layout");
s.setMultiSelect(true);
s.setHeight("100%");
s.setWidth("");
for (int i = 0; i < 10; i++) {
s.addItem(new Object());
}
ol.addComponent(s);
s = new Select("200 px high select, should be partly outside");
s.setMultiSelect(true);
s.setHeight("200px");
s.setWidth("");
ol.addComponent(s);
return ol;
}
private Layout layout3() {
OrderedLayout ol = new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL);
ol.setHeight("");
ol.setWidth("500px");
ol.setCaption("Fixed width (500px) and dynamic height");
TextField tf;
tf = new TextField("100px high TextField, valign: bottom");
tf.setHeight("100px");
tf.setWidth("100%");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField("100px high TextField, valign: top");
tf.setHeight("100px");
tf.setWidth("100%");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
tf = new TextField("100% high, 50px wide TextField, valign: bottom");
tf.setHeight("100%");
tf.setWidth("50px");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Label l = new Label(
"100% high, 50px wide Label, valign: bottom, does not fill full height, only needed space");
tf.setHeight("100%");
tf.setWidth("50px");
ol.addComponent(l);
ol.setComponentAlignment(l, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Select s = new Select("100% high select, should fit into layout");
s.setMultiSelect(true);
s.setHeight("100%");
s.setWidth("100%");
for (int i = 0; i < 10; i++) {
s.addItem(new Object());
}
ol.addComponent(s);
s = new Select("200 px high select, should make the layout 200px high");
s.setMultiSelect(true);
s.setHeight("200px");
s.setWidth("100%");
ol.addComponent(s);
return ol;
}
private Layout layout3New() {
OrderedLayout ol = new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL);
ol.setHeight("300px");
// ol.setWidth("500px");
ol.setWidth("");
ol.setCaption("Dynamic width and fixed height(300px)");
TextField tf;
tf = new TextField("100px high TextField, valign: bottom");
tf.setHeight("100px");
tf.setWidth("100%");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField("100px high TextField, valign: top");
tf.setHeight("100px");
tf.setWidth("100%");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
tf = new TextField("100% high, 50px wide TextField, valign: bottom");
tf.setHeight("100%");
tf.setWidth("50px");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Label l = new Label(
"100% high, 50px wide Label, valign: bottom, does not fill full height, only needed space");
tf.setHeight("100%");
tf.setWidth("50px");
ol.addComponent(l);
ol.setComponentAlignment(l, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Select s = new Select("100% high select, should fit into layout");
s.setMultiSelect(true);
s.setHeight("100%");
s.setWidth("100%");
for (int i = 0; i < 10; i++) {
s.addItem(new Object());
}
ol.addComponent(s);
s = new Select("200 px high select, should make the layout 200px high");
s.setMultiSelect(true);
s.setHeight("200px");
s.setWidth("100%");
ol.addComponent(s);
return ol;
}
private Layout layout4(OrderedLayout ol) {
// ol.setHeight("300px");
// ol.setWidth("500px");
ol.setMargin(true);
ol.setSpacing(true);
ol.setWidth("");
ol.setCaption("Dynamic width and dynamic height");
TextField tf;
tf = new TextField("100% high TextField");
tf.setCaption(null);
tf.setRequired(true);
tf.setValue("100% high Field");
tf.setHeight("100%");
tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
tf = new TextField("100% high TextField");
tf.setCaption("100% high TextField");
tf.setRequired(true);
tf.setValue("100% high Field");
tf.setHeight("100%");
tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
for (int i = 1; i < 4; i++) {
int w = i * 100;
tf = new TextField("Field " + i);
tf.setRows(2);
tf.setValue(w + "px high, " + w + "px wide TextField, valign: "
+ valignName[i % 3]);
tf.setWidth(w + "px");
tf.setHeight(w + "px");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
valign[i % 3]);
}
tf = new TextField("100% high TextField");
tf.setValue("100% high 100px wide");
tf.setRows(2);
tf.setHeight("100%");
tf.setWidth("100px");
ol.addComponent(tf);
return ol;
}
private Layout layout_field_100pct_button_field(OrderedLayout ol) {
ol.setHeight("500px");
ol.setWidth("916px");
ol.setMargin(false);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight()
+ "px) / layout_field_100pct_button_field");
TextField tf;
tf = new TextField("300px x 300px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("300x300 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
ol.addComponent(tf);
Button b;
b = new Button("This is a 100%x50% valign middle button");
b.setSizeFull();
b.setHeight("50%");
ol.addComponent(b);
ol.setExpandRatio(b, 1.0f);
ol.setComponentAlignment(b, AlignmentHandler.ALIGNMENT_RIGHT,
AlignmentHandler.ALIGNMENT_VERTICAL_CENTER);
tf = new TextField("300px x 300px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("300x300 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.addComponent(tf);
return ol;
}
private Layout layout_basic_test(OrderedLayout ol) {
ol.setHeight("700px");
ol.setWidth("900px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight()
+ "px) / layout_basic_test");
TextField tf;
tf = new TextField("300px x 300px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("300x300 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
ol.addComponent(tf);
Button b;
// b = new Button("This is a 100%x50% valign middle button");
// b.setSizeFull();
// b.setHeight("50%");
// ol.addComponent(b, 1.0f);
// ol.setComponentAlignment(b, AlignmentHandler.ALIGNMENT_RIGHT,
// AlignmentHandler.ALIGNMENT_VERTICAL_CENTER);
tf = new TextField("300px x 300px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("300x300 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.addComponent(tf);
return ol;
}
private Layout layout_symmetric_fields(OrderedLayout ol) {
ol.setHeight("900px");
ol.setWidth("900px");
ol.setMargin(false);
ol.setSpacing(false);
// ol.setWidth("");
ol.setCaption("Fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight()
+ "px) / layout_symmetric_fields");
TextField tf;
tf = new TextField("300px x 300px Field");
tf.setValue("300x300 field");
tf.setRows(2);
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
ol.addComponent(tf);
tf = new TextField("300px x 300px Field");
tf.setValue("300x300 field");
tf.setRows(2);
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_HORIZONTAL_CENTER,
OrderedLayout.ALIGNMENT_VERTICAL_CENTER);
ol.addComponent(tf);
tf = new TextField("300px x 300px Field");
tf.setValue("300x300 field");
tf.setRows(2);
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.addComponent(tf);
return ol;
}
private Layout layout_leftAndRight(OrderedLayout ol) {
ol.setHeight("700px");
ol.setWidth("700px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight()
+ "px) / layout_leftAndRight");
TextField tf;
// tf = new TextField("100%x100% Field");
// tf.setCaption(null);
// tf.setValue("100% x 100% TextField");
// tf.setSizeFull();
// tf.setRequired(true);
// // tf.setComponentError(new UserError("It's broken!"));
//
// // tf.setHeight("100%");
// // tf.setWidth("100px");
// tf.setRows(2);
// ol.addComponent(tf);
//
// for (int i = 1; i < 5; i++) {
// int w = i * 100;
// tf = new TextField("Caption field " + i);
// tf.setRows(2);
// tf.setValue(w + "px high, " + w + "px wide TextField, valign: "
// + valignName[i % 3]);
// tf.setWidth(w + "px");
// tf.setHeight(w + "px");
// ol.addComponent(tf);
// ol.setComponentAlignment(tf,
// OrderedLayout.ALIGNMENT_HORIZONTAL_CENTER, valign[i % 3]);
// }
//
// tf.setValue(tf.getValue().toString() + " (100% wide)");
// tf.setWidth("100%");
// tf = new TextField("100%x70px Field");
// tf.setCaption(null);
// tf.setRequired(true);
// // tf.setIcon(new ThemeResource("icons/16/document-add.png"));
// tf.setComponentError(new UserError("abc"));
// tf.setValue("100% high 70px wide TextField");
// tf.setRows(2);
// // tf.setSizeFull();
// tf.setHeight("100%");
// tf.setWidth("70px");
// ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
// OrderedLayout.ALIGNMENT_TOP);
// ol.addComponent(tf);
tf = new TextField("300px x 300px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("300x300 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
ol.addComponent(tf);
tf = new TextField("300px x 300px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("300x300 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.addComponent(tf);
return ol;
}
private Layout layout_fixed_filled(OrderedLayout ol) {
ol.setHeight("700px");
ol.setWidth("700px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Filled with fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight() + "px)");
TextField tf;
tf = new TextField("60%x100% Field");
tf.setCaption("This one has a caption");
tf.setValue("60% x 100% TextField");
tf.setWidth("100%");
tf.setHeight("100%");
tf.setRequired(true);
// tf.setComponentError(new UserError("It's broken!"));
// tf.setHeight("100%");
// tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
ol.setExpandRatio(tf, 1f);
//
tf = new TextField("60%x60% Field");
tf.setCaption(null);
tf.setValue("60% x 60% TextField");
tf.setWidth("100%");
tf.setHeight("60%");
tf.setRequired(true);
ol.addComponent(tf);
ol.setExpandRatio(tf, 1f);
ol.setComponentAlignment(tf, AlignmentHandler.ALIGNMENT_LEFT,
AlignmentHandler.ALIGNMENT_VERTICAL_CENTER);
// tf.setComponentError(new UserError("It's broken!"));
// tf.setHeight("100%");
// tf.setWidth("100px");
tf.setRows(2);
//
// for (int i = 1; i < 5; i++) {
// int w = i * 100;
// tf = new TextField("Caption field " + i);
// tf.setRows(2);
// tf.setValue(w + "px high, " + w + "px wide TextField, valign: "
// + valignName[i % 3]);
// tf.setWidth(w + "px");
// tf.setHeight(w + "px");
// ol.addComponent(tf);
// ol.setComponentAlignment(tf,
// OrderedLayout.ALIGNMENT_HORIZONTAL_CENTER, valign[i % 3]);
// }
//
// tf.setValue(tf.getValue().toString() + " (100% wide)");
// tf.setWidth("100%");
// tf = new TextField("100%x70px Field");
// tf.setCaption(null);
// tf.setRequired(true);
// // tf.setIcon(new ThemeResource("icons/16/document-add.png"));
// tf.setComponentError(new UserError("abc"));
// tf.setValue("100% high 70px wide TextField");
// tf.setRows(2);
// // tf.setSizeFull();
// tf.setHeight("100%");
// tf.setWidth("70px");
// ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
// OrderedLayout.ALIGNMENT_TOP);
// ol.addComponent(tf);
tf = new TextField("200px x 200px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("200x200 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("200px");
tf.setWidth("200px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
ol.addComponent(tf);
tf = new TextField("200px x 200px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("200x200 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("200px");
tf.setWidth("200px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.addComponent(tf);
return ol;
}
private Layout layout_overfilled(OrderedLayout ol) {
ol.setHeight("300px");
ol.setWidth("700px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("OverFilled with fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight() + "px)");
TextField tf;
for (int i = 0; i < 5; i++) {
tf = new TextField("200x200px Field");
tf.setCaption("This one has a caption");
tf.setValue("200x200 TextField");
tf.setWidth("200px");
tf.setHeight("200px");
tf.setRequired(true);
// tf.setComponentError(new UserError("It's broken!"));
// tf.setHeight("100%");
// tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
}
return ol;
}
private Layout layout_overfilled_dynamic_height(OrderedLayout ol) {
ol.setHeight(null);
ol.setWidth("700px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("OverFilled with fixed width (" + ol.getWidth()
+ "px) and dynamic height");
TextField tf;
for (int i = 0; i < 10; i++) {
tf = new TextField("200x200px Field");
tf.setCaption("This one has a caption");
tf.setWidth("200px");
tf.setHeight(((i + 1) * 50) + "px");
tf.setValue(tf.getWidth() + "x" + tf.getHeight() + " TextField");
tf.setRequired(true);
// tf.setComponentError(new UserError("It's broken!"));
// tf.setHeight("100%");
// tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
}
return ol;
}
// private Layout layout_add_components(OrderedLayout ol) {
// ol.setHeight("600px");
// ol.setWidth("600px");
// ol.setMargin(true);
// ol.setSpacing(true);
//
// // ol.setWidth("");
// ol.setCaption("Fixed width (" + ol.getWidth()
// + "px) and fixed height (" + ol.getHeight() + "px)");
//
// for (int i = 0; i < 3; i++) {
// Button b = createAddButton(ol);
// ol.addComponent(b);
// }
//
// return ol;
//
// }
private Layout layout_add_remove_components(OrderedLayout ol) {
ol.setHeight("600px");
ol.setWidth("600px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight()
+ "px) / layout_add_remove_components");
for (int i = 0; i < 2; i++) {
OrderedLayout inner = createAddRemove(ol, "", "");
ol.addComponent(inner);
ol.setComponentAlignment(inner, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
}
return ol;
}
private Layout layout_dynamic(OrderedLayout ol) {
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Dynamic width, dynamic height");
for (int i = 0; i < 3; i++) {
Button b = new Button("Button " + i);
if (i == 2) {
b.setHeight("200px");
} else {
b.setHeight("100%");
}
ol.addComponent(b);
}
return ol;
}
private Layout layout_captions(OrderedLayout ol) {
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Caption test with dynamic width");
TextField tf;
tf = new TextField("Short caption");
ol.addComponent(tf);
tf = new TextField(
"A very long caption which is probably much longer than the field");
ol.addComponent(tf);
tf = new TextField(
"A very long caption which is probably much longer than the field and includes indicators");
tf.setRequired(true);
tf.setComponentError(new UserError("abc123"));
ol.addComponent(tf);
// for (int i = 0; i < 3; i++) {
// Button b = new Button("Button " + i);
// if (i == 2) {
// b.setHeight("200px");
// } else {
// b.setHeight("100%");
// }
// ol.addComponent(b);
// }
return ol;
}
private Layout layout_captions_fixed_size(OrderedLayout ol) {
ol.setWidth("700px");
ol.setHeight("250px");
ol.setMargin(false);
ol.setSpacing(false);
// ol.setWidth("");
ol.setCaption("Caption test with fixed size");
TextField tf;
tf = new TextField("Short caption");
tf.setValue("Undefined width");
tf.setComponentError(new UserError("123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField(
"A long caption which is probably much longer than the field");
tf.setValue("Undefined width");
tf.setRequired(true);
tf.setComponentError(new UserError("123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField(
"A very long caption which is probably much longer than the field and includes indicators");
tf.setValue("Undefined width");
tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setRequired(true);
tf.setComponentError(new UserError("abc123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
// for (int i = 0; i < 3; i++) {
// Button b = new Button("Button " + i);
// if (i == 2) {
// b.setHeight("200px");
// } else {
// b.setHeight("100%");
// }
// ol.addComponent(b);
// }
return ol;
}
private Layout layout_captions_fixed_size_and_relative_size(OrderedLayout ol) {
ol.setWidth("700px");
ol.setHeight("250px");
ol.setMargin(false);
ol.setSpacing(false);
// ol.setWidth("");
ol.setCaption("Caption test with fixed width (700x250)");
TextField tf;
tf = new TextField("Short caption");
tf.setSizeFull();
tf.setValue("100% wide field, ratio 1");
tf.setComponentError(new UserError("123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.setExpandRatio(tf, 1);
tf = new TextField(
"A long caption which is probably much longer than the field");
tf.setValue("100% wide field, ratio 2");
tf.setSizeFull();
tf.setRequired(true);
tf.setComponentError(new UserError("123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.setExpandRatio(tf, 2);
tf = new TextField(
"A very long caption which is probably much longer than the field and includes indicators");
tf.setValue("100% wide field, ratio 3");
tf.setSizeFull();
tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setRequired(true);
tf.setComponentError(new UserError("abc123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.setExpandRatio(tf, 3);
// for (int i = 0; i < 3; i++) {
// Button b = new Button("Button " + i);
// if (i == 2) {
// b.setHeight("200px");
// } else {
// b.setHeight("100%");
// }
// ol.addComponent(b);
// }
return ol;
}
private Layout layout_captions_fixed_size_and_fixed_size(OrderedLayout ol) {
ol.setWidth("700px");
ol.setHeight("250px");
ol.setMargin(false);
ol.setSpacing(false);
// ol.setWidth("");
ol.setCaption("Caption test with fixed width");
TextField tf;
tf = new TextField("Short caption");
tf.setValue("250px wide field");
tf.setWidth("250px");
tf.setComponentError(new UserError("123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField(
"A long caption which is probably much longer than the field");
tf.setWidth("250px");
tf.setValue("250px wide field");
tf.setRequired(true);
tf.setComponentError(new UserError("123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField(
"A very long caption which is probably much longer than the field and includes indicators");
tf.setValue("200px wide field");
tf.setWidth("200px");
tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setRequired(true);
tf.setComponentError(new UserError("abc123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
// for (int i = 0; i < 3; i++) {
// Button b = new Button("Button " + i);
// if (i == 2) {
// b.setHeight("200px");
// } else {
// b.setHeight("100%");
// }
// ol.addComponent(b);
// }
return ol;
}
private Layout layout_labels(OrderedLayout ol) {
// ol.setWidth("700px");
// ol.setHeight("200px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Caption test with fixed width");
Label l;
l = new Label(
"This is a long text and should remain on one line as there is nothing forcing line breaks");
ol.addComponent(l);
// ol.setComponentAlignment(l, OrderedLayout.ALIGNMENT_RIGHT,
// OrderedLayout.ALIGNMENT_BOTTOM);
l = new Label("WTF OMG LOL");
ol.addComponent(l);
// ol.setComponentAlignment(l, OrderedLayout.ALIGNMENT_RIGHT,
// OrderedLayout.ALIGNMENT_BOTTOM);
return ol;
}
private OrderedLayout createAddRemove(OrderedLayout ol, String width,
String buttonSuffix) {
Button b = createAddButton(ol);
Button wb = createWideAddButton(ol);
Button r = createRemoveButton(ol, buttonSuffix);
OrderedLayout inner = new OrderedLayout(
OrderedLayout.ORIENTATION_VERTICAL);
inner.setCaption("Width: " + width);
inner.setWidth(width);
inner.addComponent(b);
inner.addComponent(wb);
inner.addComponent(r);
// inner.setHeight("132px");
return inner;
}
private Button createAddButton(OrderedLayout ol) {
Button b = new Button("Add before", new ClickListener() {
public void buttonClick(ClickEvent event) {
addBefore((OrderedLayout) event.getButton().getData(), event
.getButton().getParent(), "");
}
});
b.setData(ol);
return b;
}
private Button createWideAddButton(OrderedLayout ol) {
Button b = new Button("Add 100% before", new ClickListener() {
public void buttonClick(ClickEvent event) {
addBefore((OrderedLayout) event.getButton().getData(), event
.getButton().getParent(), "100%");
}
});
b.setData(ol);
return b;
}
private Button createRemoveButton(OrderedLayout ol, String suffix) {
Button b = new Button("Remove this " + suffix, new ClickListener() {
public void buttonClick(ClickEvent event) {
remove((OrderedLayout) event.getButton().getData(), event
.getButton().getParent());
}
});
b.setWidth("100%");
b.setData(ol);
return b;
}
protected void remove(OrderedLayout ol, Component c) {
ol.removeComponent(c);
}
protected void addBefore(OrderedLayout ol, Component c, String width) {
int index = 0;
Iterator iter = ol.getComponentIterator();
while (iter.hasNext()) {
if (iter.next() == c) {
break;
}
index++;
}
OrderedLayout inner = createAddRemove(ol, width, String
.valueOf(suffix++));
ol.addComponent(inner, index);
if (width.contains("%")) {
ol.setExpandRatio(inner, 1.0f);
}
ol.setComponentAlignment(inner, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
}
private Layout layout_pctFilled(OrderedLayout ol) {
ol.setHeight("600px");
ol.setWidth("600px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("100 % filled with fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight() + "px)");
TextField tf;
tf = new TextField();
tf.setCaption("This one has a caption");
tf.setValue("60% expand TextField");
tf.setWidth("100%");
tf.setHeight("100%");
// tf.setRequired(true);
// tf.setComponentError(new UserError("It's broken!"));
// tf.setHeight("100%");
// tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
ol.setExpandRatio(tf, 60);
tf = new TextField();
tf.setValue("100px 100px TextField");
tf.setWidth("100px");
tf.setHeight("100px");
tf.setRows(2);
ol.addComponent(tf);
ol.setComponentAlignment(tf,
AlignmentHandler.ALIGNMENT_HORIZONTAL_CENTER,
AlignmentHandler.ALIGNMENT_VERTICAL_CENTER);
//
tf = new TextField("40%x40% Field");
// tf.setCaption(null);
tf.setValue("40% expand (40% height) TextField");
tf.setWidth("100%");
tf.setHeight("40%");
ol.addComponent(tf);
ol.setExpandRatio(tf, 40);
// tf.setRequired(true);
ol.setComponentAlignment(tf, AlignmentHandler.ALIGNMENT_RIGHT,
AlignmentHandler.ALIGNMENT_BOTTOM);
tf.setRows(2);
return ol;
}
private Layout layout_pctFilled2(OrderedLayout ol) {
ol.setHeight("600px");
ol.setWidth("600px");
ol.setMargin(true);
ol.setSpacing(false);
// ol.setWidth("");
ol.setCaption("100 % filled with fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight() + "px)");
TextField tf;
tf = new TextField();
// tf.setCaption("This one has a caption");
tf.setValue("80% x 20% TextField");
tf.setWidth("80%");
tf.setHeight("20%");
// tf.setRequired(true);
// tf.setComponentError(new UserError("It's broken!"));
// tf.setHeight("100%");
// tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
//
tf = new TextField("20%x60% Field");
tf.setCaption(null);
tf.setValue("20% x 60% TextField");
tf.setWidth("20%");
tf.setHeight("60%");
// tf.setRequired(true);
ol.setComponentAlignment(tf, AlignmentHandler.ALIGNMENT_RIGHT,
AlignmentHandler.ALIGNMENT_BOTTOM);
tf.setRows(2);
ol.addComponent(tf);
return ol;
}
private Layout layout_underFilled(OrderedLayout ol) {
ol.setHeight("700px");
ol.setWidth("700px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Underfilled with fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight() + "px)");
TextField tf;
tf = new TextField("60%x100% Field");
tf.setCaption("Short capt");
tf.setValue("60% x 100% TextField");
tf.setWidth("60%");
tf.setHeight("100%");
tf.setRequired(true);
tf.setRows(2);
ol.setComponentAlignment(tf,
AlignmentHandler.ALIGNMENT_HORIZONTAL_CENTER,
AlignmentHandler.ALIGNMENT_VERTICAL_CENTER);
ol.addComponent(tf);
tf = new TextField("200px x 200px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("200x200 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("200px");
tf.setWidth("200px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
ol.addComponent(tf);
tf = new TextField("200px x 200px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("200x200 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("200px");
tf.setWidth("200px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.addComponent(tf);
return ol;
}
} | src/com/itmill/toolkit/tests/TestOrderedLayout.java | package com.itmill.toolkit.tests;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import com.itmill.toolkit.Application;
import com.itmill.toolkit.terminal.ThemeResource;
import com.itmill.toolkit.terminal.UserError;
import com.itmill.toolkit.ui.Button;
import com.itmill.toolkit.ui.Component;
import com.itmill.toolkit.ui.Label;
import com.itmill.toolkit.ui.Layout;
import com.itmill.toolkit.ui.OrderedLayout;
import com.itmill.toolkit.ui.Panel;
import com.itmill.toolkit.ui.Select;
import com.itmill.toolkit.ui.TextField;
import com.itmill.toolkit.ui.Window;
import com.itmill.toolkit.ui.Button.ClickEvent;
import com.itmill.toolkit.ui.Button.ClickListener;
import com.itmill.toolkit.ui.Layout.AlignmentHandler;
public class TestOrderedLayout extends Application {
String valignName[] = new String[] { "top", "middle", "bottom" };
int valign[] = new int[] { OrderedLayout.ALIGNMENT_TOP,
OrderedLayout.ALIGNMENT_VERTICAL_CENTER,
OrderedLayout.ALIGNMENT_BOTTOM };
Set<OrderedLayout> layouts = new HashSet<OrderedLayout>();
private OrderedLayout layoutContainer;
private int suffix = 0;
public void init() {
Window w = new Window(getClass().getSimpleName());
setMainWindow(w);
// setTheme("tests-tickets");
// GridLayout layout = new OrderedLayout(1, 10);
// w.setLayout(layout);
w.getLayout().addComponent(new Button("Swap", new ClickListener() {
public void buttonClick(ClickEvent event) {
swapLayouts();
}
}));
layoutContainer = new OrderedLayout(OrderedLayout.ORIENTATION_VERTICAL);
createUI(layoutContainer);
w.getLayout().addComponent(layoutContainer);
// swapLayouts();
}
public void swapLayouts() {
OrderedLayout mainLayout = layoutContainer;
int mainOrient = 1 - mainLayout.getOrientation();
mainLayout.setOrientation(mainOrient);
for (OrderedLayout ol : layouts) {
ol.setOrientation(1 - mainOrient);
float h = ol.getHeight();
int hUnit = ol.getHeightUnits();
float w = ol.getWidth();
int wUnit = ol.getWidthUnits();
ol.setWidth(h, hUnit);
ol.setHeight(w, wUnit);
}
}
private void createUI(Layout layout) {
layout
.addComponent(wrapLayout(layout_field_100pct_button_field(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout
.addComponent(wrapLayout(layout_symmetric_fields(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_leftAndRight(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_overFilled(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_dynamic(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_labels(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_captions(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout
.addComponent(wrapLayout(layout_captions_fixed_size(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout
.addComponent(wrapLayout(layout_captions_fixed_size_and_relative_size(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout
.addComponent(wrapLayout(layout_captions_fixed_size_and_fixed_size(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout
.addComponent(wrapLayout(layout_add_remove_components(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_pctFilled(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_pctFilled(new OrderedLayout(
OrderedLayout.ORIENTATION_VERTICAL))));
layout.addComponent(wrapLayout(layout_underFilled(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
layout.addComponent(wrapLayout(layout_basic_test(new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL))));
}
private Layout wrapLayout(Layout ol) {
Panel p = new Panel(ol);
p.setCaption(ol.getCaption());
ol.setCaption(null);
OrderedLayout l = new OrderedLayout();
l.addComponent(p);
// p.setWidth("600px");
if (ol instanceof OrderedLayout) {
layouts.add((OrderedLayout) ol);
}
return l;
}
/* LAYOUTS */
private Layout layout1() {
OrderedLayout ol = new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL);
ol.setHeight("200px");
ol.setWidth("");
ol.setCaption("Fixed height (200px) and dynamic width");
TextField tf = new TextField("100px high TextField, valign: bottom");
tf.setHeight("100px");
tf.setWidth("");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Select s = new Select("100% high select");
s.setMultiSelect(true);
s.setHeight("100%");
s.setWidth("");
ol.addComponent(s);
s = new Select("200 px high select");
s.setMultiSelect(true);
s.setHeight("200px");
s.setWidth("");
ol.addComponent(s);
// tf = new TextField("100% high TextField, right/bottom");
// tf.setHeight("100%");
// tf.setWidth("");
// ol.addComponent(tf);
// ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
// OrderedLayout.ALIGNMENT_BOTTOM);
// tf = new TextField("100% high, 200px wide TextField");
// tf.setHeight("100%");
// tf.setWidth("200px");
// ol.addComponent(tf);
return ol;
}
private Layout layout2() {
OrderedLayout ol = new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL);
ol.setHeight("70px");
ol.setWidth("");
ol.setCaption("Fixed height (50px) and dynamic width");
TextField tf = new TextField(
"100px high TextField, valign: bottom, should be partly outside");
tf.setHeight("100px");
tf.setWidth("");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField(
"100% high, 50px wide TextField, valign: bottom, should fill full height");
tf.setHeight("100%");
tf.setWidth("50px");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Label l = new Label(
"100% high, 50px wide Label, valign: bottom, does not fill full height, only needed space");
tf.setHeight("100%");
tf.setWidth("50px");
ol.addComponent(l);
ol.setComponentAlignment(l, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Select s = new Select("100% high select, should fit into layout");
s.setMultiSelect(true);
s.setHeight("100%");
s.setWidth("");
for (int i = 0; i < 10; i++) {
s.addItem(new Object());
}
ol.addComponent(s);
s = new Select("200 px high select, should be partly outside");
s.setMultiSelect(true);
s.setHeight("200px");
s.setWidth("");
ol.addComponent(s);
return ol;
}
private Layout layout3() {
OrderedLayout ol = new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL);
ol.setHeight("");
ol.setWidth("500px");
ol.setCaption("Fixed width (500px) and dynamic height");
TextField tf;
tf = new TextField("100px high TextField, valign: bottom");
tf.setHeight("100px");
tf.setWidth("100%");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField("100px high TextField, valign: top");
tf.setHeight("100px");
tf.setWidth("100%");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
tf = new TextField("100% high, 50px wide TextField, valign: bottom");
tf.setHeight("100%");
tf.setWidth("50px");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Label l = new Label(
"100% high, 50px wide Label, valign: bottom, does not fill full height, only needed space");
tf.setHeight("100%");
tf.setWidth("50px");
ol.addComponent(l);
ol.setComponentAlignment(l, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Select s = new Select("100% high select, should fit into layout");
s.setMultiSelect(true);
s.setHeight("100%");
s.setWidth("100%");
for (int i = 0; i < 10; i++) {
s.addItem(new Object());
}
ol.addComponent(s);
s = new Select("200 px high select, should make the layout 200px high");
s.setMultiSelect(true);
s.setHeight("200px");
s.setWidth("100%");
ol.addComponent(s);
return ol;
}
private Layout layout3New() {
OrderedLayout ol = new OrderedLayout(
OrderedLayout.ORIENTATION_HORIZONTAL);
ol.setHeight("300px");
// ol.setWidth("500px");
ol.setWidth("");
ol.setCaption("Dynamic width and fixed height(300px)");
TextField tf;
tf = new TextField("100px high TextField, valign: bottom");
tf.setHeight("100px");
tf.setWidth("100%");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField("100px high TextField, valign: top");
tf.setHeight("100px");
tf.setWidth("100%");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
tf = new TextField("100% high, 50px wide TextField, valign: bottom");
tf.setHeight("100%");
tf.setWidth("50px");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Label l = new Label(
"100% high, 50px wide Label, valign: bottom, does not fill full height, only needed space");
tf.setHeight("100%");
tf.setWidth("50px");
ol.addComponent(l);
ol.setComponentAlignment(l, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_BOTTOM);
Select s = new Select("100% high select, should fit into layout");
s.setMultiSelect(true);
s.setHeight("100%");
s.setWidth("100%");
for (int i = 0; i < 10; i++) {
s.addItem(new Object());
}
ol.addComponent(s);
s = new Select("200 px high select, should make the layout 200px high");
s.setMultiSelect(true);
s.setHeight("200px");
s.setWidth("100%");
ol.addComponent(s);
return ol;
}
private Layout layout4(OrderedLayout ol) {
// ol.setHeight("300px");
// ol.setWidth("500px");
ol.setMargin(true);
ol.setSpacing(true);
ol.setWidth("");
ol.setCaption("Dynamic width and dynamic height");
TextField tf;
tf = new TextField("100% high TextField");
tf.setCaption(null);
tf.setRequired(true);
tf.setValue("100% high Field");
tf.setHeight("100%");
tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
tf = new TextField("100% high TextField");
tf.setCaption("100% high TextField");
tf.setRequired(true);
tf.setValue("100% high Field");
tf.setHeight("100%");
tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
for (int i = 1; i < 4; i++) {
int w = i * 100;
tf = new TextField("Field " + i);
tf.setRows(2);
tf.setValue(w + "px high, " + w + "px wide TextField, valign: "
+ valignName[i % 3]);
tf.setWidth(w + "px");
tf.setHeight(w + "px");
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
valign[i % 3]);
}
tf = new TextField("100% high TextField");
tf.setValue("100% high 100px wide");
tf.setRows(2);
tf.setHeight("100%");
tf.setWidth("100px");
ol.addComponent(tf);
return ol;
}
private Layout layout_field_100pct_button_field(OrderedLayout ol) {
ol.setHeight("500px");
ol.setWidth("916px");
ol.setMargin(false);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight()
+ "px) / layout_field_100pct_button_field");
TextField tf;
tf = new TextField("300px x 300px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("300x300 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
ol.addComponent(tf);
Button b;
b = new Button("This is a 100%x50% valign middle button");
b.setSizeFull();
b.setHeight("50%");
ol.addComponent(b);
ol.setExpandRatio(b, 1.0f);
ol.setComponentAlignment(b, AlignmentHandler.ALIGNMENT_RIGHT,
AlignmentHandler.ALIGNMENT_VERTICAL_CENTER);
tf = new TextField("300px x 300px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("300x300 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.addComponent(tf);
return ol;
}
private Layout layout_basic_test(OrderedLayout ol) {
ol.setHeight("700px");
ol.setWidth("900px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight()
+ "px) / layout_basic_test");
TextField tf;
tf = new TextField("300px x 300px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("300x300 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
ol.addComponent(tf);
Button b;
// b = new Button("This is a 100%x50% valign middle button");
// b.setSizeFull();
// b.setHeight("50%");
// ol.addComponent(b, 1.0f);
// ol.setComponentAlignment(b, AlignmentHandler.ALIGNMENT_RIGHT,
// AlignmentHandler.ALIGNMENT_VERTICAL_CENTER);
tf = new TextField("300px x 300px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("300x300 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.addComponent(tf);
return ol;
}
private Layout layout_symmetric_fields(OrderedLayout ol) {
ol.setHeight("900px");
ol.setWidth("900px");
ol.setMargin(false);
ol.setSpacing(false);
// ol.setWidth("");
ol.setCaption("Fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight()
+ "px) / layout_symmetric_fields");
TextField tf;
tf = new TextField("300px x 300px Field");
tf.setValue("300x300 field");
tf.setRows(2);
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
ol.addComponent(tf);
tf = new TextField("300px x 300px Field");
tf.setValue("300x300 field");
tf.setRows(2);
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_HORIZONTAL_CENTER,
OrderedLayout.ALIGNMENT_VERTICAL_CENTER);
ol.addComponent(tf);
tf = new TextField("300px x 300px Field");
tf.setValue("300x300 field");
tf.setRows(2);
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.addComponent(tf);
return ol;
}
private Layout layout_leftAndRight(OrderedLayout ol) {
ol.setHeight("700px");
ol.setWidth("700px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight()
+ "px) / layout_leftAndRight");
TextField tf;
// tf = new TextField("100%x100% Field");
// tf.setCaption(null);
// tf.setValue("100% x 100% TextField");
// tf.setSizeFull();
// tf.setRequired(true);
// // tf.setComponentError(new UserError("It's broken!"));
//
// // tf.setHeight("100%");
// // tf.setWidth("100px");
// tf.setRows(2);
// ol.addComponent(tf);
//
// for (int i = 1; i < 5; i++) {
// int w = i * 100;
// tf = new TextField("Caption field " + i);
// tf.setRows(2);
// tf.setValue(w + "px high, " + w + "px wide TextField, valign: "
// + valignName[i % 3]);
// tf.setWidth(w + "px");
// tf.setHeight(w + "px");
// ol.addComponent(tf);
// ol.setComponentAlignment(tf,
// OrderedLayout.ALIGNMENT_HORIZONTAL_CENTER, valign[i % 3]);
// }
//
// tf.setValue(tf.getValue().toString() + " (100% wide)");
// tf.setWidth("100%");
// tf = new TextField("100%x70px Field");
// tf.setCaption(null);
// tf.setRequired(true);
// // tf.setIcon(new ThemeResource("icons/16/document-add.png"));
// tf.setComponentError(new UserError("abc"));
// tf.setValue("100% high 70px wide TextField");
// tf.setRows(2);
// // tf.setSizeFull();
// tf.setHeight("100%");
// tf.setWidth("70px");
// ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
// OrderedLayout.ALIGNMENT_TOP);
// ol.addComponent(tf);
tf = new TextField("300px x 300px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("300x300 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
ol.addComponent(tf);
tf = new TextField("300px x 300px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("300x300 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("300px");
tf.setWidth("300px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.addComponent(tf);
return ol;
}
private Layout layout_overFilled(OrderedLayout ol) {
ol.setHeight("700px");
ol.setWidth("700px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Filled with fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight() + "px)");
TextField tf;
tf = new TextField("60%x100% Field");
tf.setCaption("This one has a caption");
tf.setValue("60% x 100% TextField");
tf.setWidth("100%");
tf.setHeight("100%");
tf.setRequired(true);
ol.setExpandRatio(tf, 1f);
// tf.setComponentError(new UserError("It's broken!"));
// tf.setHeight("100%");
// tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
//
tf = new TextField("60%x60% Field");
tf.setCaption(null);
tf.setValue("60% x 60% TextField");
tf.setWidth("100%");
tf.setHeight("60%");
tf.setRequired(true);
ol.setExpandRatio(tf, 1f);
ol.setComponentAlignment(tf, AlignmentHandler.ALIGNMENT_LEFT,
AlignmentHandler.ALIGNMENT_VERTICAL_CENTER);
// tf.setComponentError(new UserError("It's broken!"));
// tf.setHeight("100%");
// tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
//
// for (int i = 1; i < 5; i++) {
// int w = i * 100;
// tf = new TextField("Caption field " + i);
// tf.setRows(2);
// tf.setValue(w + "px high, " + w + "px wide TextField, valign: "
// + valignName[i % 3]);
// tf.setWidth(w + "px");
// tf.setHeight(w + "px");
// ol.addComponent(tf);
// ol.setComponentAlignment(tf,
// OrderedLayout.ALIGNMENT_HORIZONTAL_CENTER, valign[i % 3]);
// }
//
// tf.setValue(tf.getValue().toString() + " (100% wide)");
// tf.setWidth("100%");
// tf = new TextField("100%x70px Field");
// tf.setCaption(null);
// tf.setRequired(true);
// // tf.setIcon(new ThemeResource("icons/16/document-add.png"));
// tf.setComponentError(new UserError("abc"));
// tf.setValue("100% high 70px wide TextField");
// tf.setRows(2);
// // tf.setSizeFull();
// tf.setHeight("100%");
// tf.setWidth("70px");
// ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
// OrderedLayout.ALIGNMENT_TOP);
// ol.addComponent(tf);
tf = new TextField("200px x 200px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("200x200 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("200px");
tf.setWidth("200px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
ol.addComponent(tf);
tf = new TextField("200px x 200px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("200x200 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("200px");
tf.setWidth("200px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.addComponent(tf);
return ol;
}
// private Layout layout_add_components(OrderedLayout ol) {
// ol.setHeight("600px");
// ol.setWidth("600px");
// ol.setMargin(true);
// ol.setSpacing(true);
//
// // ol.setWidth("");
// ol.setCaption("Fixed width (" + ol.getWidth()
// + "px) and fixed height (" + ol.getHeight() + "px)");
//
// for (int i = 0; i < 3; i++) {
// Button b = createAddButton(ol);
// ol.addComponent(b);
// }
//
// return ol;
//
// }
private Layout layout_add_remove_components(OrderedLayout ol) {
ol.setHeight("600px");
ol.setWidth("600px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight()
+ "px) / layout_add_remove_components");
for (int i = 0; i < 2; i++) {
OrderedLayout inner = createAddRemove(ol, "", "");
ol.addComponent(inner);
ol.setComponentAlignment(inner, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
}
return ol;
}
private Layout layout_dynamic(OrderedLayout ol) {
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Dynamic width, dynamic height");
for (int i = 0; i < 3; i++) {
Button b = new Button("Button " + i);
if (i == 2) {
b.setHeight("200px");
} else {
b.setHeight("100%");
}
ol.addComponent(b);
}
return ol;
}
private Layout layout_captions(OrderedLayout ol) {
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Caption test with dynamic width");
TextField tf;
tf = new TextField("Short caption");
ol.addComponent(tf);
tf = new TextField(
"A very long caption which is probably much longer than the field");
ol.addComponent(tf);
tf = new TextField(
"A very long caption which is probably much longer than the field and includes indicators");
tf.setRequired(true);
tf.setComponentError(new UserError("abc123"));
ol.addComponent(tf);
// for (int i = 0; i < 3; i++) {
// Button b = new Button("Button " + i);
// if (i == 2) {
// b.setHeight("200px");
// } else {
// b.setHeight("100%");
// }
// ol.addComponent(b);
// }
return ol;
}
private Layout layout_captions_fixed_size(OrderedLayout ol) {
ol.setWidth("700px");
ol.setHeight("250px");
ol.setMargin(false);
ol.setSpacing(false);
// ol.setWidth("");
ol.setCaption("Caption test with fixed size");
TextField tf;
tf = new TextField("Short caption");
tf.setValue("Undefined width");
tf.setComponentError(new UserError("123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField(
"A long caption which is probably much longer than the field");
tf.setValue("Undefined width");
tf.setRequired(true);
tf.setComponentError(new UserError("123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField(
"A very long caption which is probably much longer than the field and includes indicators");
tf.setValue("Undefined width");
tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setRequired(true);
tf.setComponentError(new UserError("abc123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
// for (int i = 0; i < 3; i++) {
// Button b = new Button("Button " + i);
// if (i == 2) {
// b.setHeight("200px");
// } else {
// b.setHeight("100%");
// }
// ol.addComponent(b);
// }
return ol;
}
private Layout layout_captions_fixed_size_and_relative_size(OrderedLayout ol) {
ol.setWidth("700px");
ol.setHeight("250px");
ol.setMargin(false);
ol.setSpacing(false);
// ol.setWidth("");
ol.setCaption("Caption test with fixed width (700x250)");
TextField tf;
tf = new TextField("Short caption");
tf.setSizeFull();
tf.setValue("100% wide field, ratio 1");
tf.setComponentError(new UserError("123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.setExpandRatio(tf, 1);
tf = new TextField(
"A long caption which is probably much longer than the field");
tf.setValue("100% wide field, ratio 2");
tf.setSizeFull();
tf.setRequired(true);
tf.setComponentError(new UserError("123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.setExpandRatio(tf, 2);
tf = new TextField(
"A very long caption which is probably much longer than the field and includes indicators");
tf.setValue("100% wide field, ratio 3");
tf.setSizeFull();
tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setRequired(true);
tf.setComponentError(new UserError("abc123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.setExpandRatio(tf, 3);
// for (int i = 0; i < 3; i++) {
// Button b = new Button("Button " + i);
// if (i == 2) {
// b.setHeight("200px");
// } else {
// b.setHeight("100%");
// }
// ol.addComponent(b);
// }
return ol;
}
private Layout layout_captions_fixed_size_and_fixed_size(OrderedLayout ol) {
ol.setWidth("700px");
ol.setHeight("250px");
ol.setMargin(false);
ol.setSpacing(false);
// ol.setWidth("");
ol.setCaption("Caption test with fixed width");
TextField tf;
tf = new TextField("Short caption");
tf.setValue("250px wide field");
tf.setWidth("250px");
tf.setComponentError(new UserError("123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField(
"A long caption which is probably much longer than the field");
tf.setWidth("250px");
tf.setValue("250px wide field");
tf.setRequired(true);
tf.setComponentError(new UserError("123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
tf = new TextField(
"A very long caption which is probably much longer than the field and includes indicators");
tf.setValue("200px wide field");
tf.setWidth("200px");
tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setRequired(true);
tf.setComponentError(new UserError("abc123"));
ol.addComponent(tf);
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
// for (int i = 0; i < 3; i++) {
// Button b = new Button("Button " + i);
// if (i == 2) {
// b.setHeight("200px");
// } else {
// b.setHeight("100%");
// }
// ol.addComponent(b);
// }
return ol;
}
private Layout layout_labels(OrderedLayout ol) {
// ol.setWidth("700px");
// ol.setHeight("200px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Caption test with fixed width");
Label l;
l = new Label(
"This is a long text and should remain on one line as there is nothing forcing line breaks");
ol.addComponent(l);
// ol.setComponentAlignment(l, OrderedLayout.ALIGNMENT_RIGHT,
// OrderedLayout.ALIGNMENT_BOTTOM);
l = new Label("WTF OMG LOL");
ol.addComponent(l);
// ol.setComponentAlignment(l, OrderedLayout.ALIGNMENT_RIGHT,
// OrderedLayout.ALIGNMENT_BOTTOM);
return ol;
}
private OrderedLayout createAddRemove(OrderedLayout ol, String width,
String buttonSuffix) {
Button b = createAddButton(ol);
Button wb = createWideAddButton(ol);
Button r = createRemoveButton(ol, buttonSuffix);
OrderedLayout inner = new OrderedLayout(
OrderedLayout.ORIENTATION_VERTICAL);
inner.setCaption("Width: " + width);
inner.setWidth(width);
if (width.contains("%")) {
ol.setExpandRatio(inner, 1.0f);
}
inner.addComponent(b);
inner.addComponent(wb);
inner.addComponent(r);
// inner.setHeight("132px");
return inner;
}
private Button createAddButton(OrderedLayout ol) {
Button b = new Button("Add before", new ClickListener() {
public void buttonClick(ClickEvent event) {
addBefore((OrderedLayout) event.getButton().getData(), event
.getButton().getParent(), "");
}
});
b.setData(ol);
return b;
}
private Button createWideAddButton(OrderedLayout ol) {
Button b = new Button("Add 100% before", new ClickListener() {
public void buttonClick(ClickEvent event) {
addBefore((OrderedLayout) event.getButton().getData(), event
.getButton().getParent(), "100%");
}
});
b.setData(ol);
return b;
}
private Button createRemoveButton(OrderedLayout ol, String suffix) {
Button b = new Button("Remove this " + suffix, new ClickListener() {
public void buttonClick(ClickEvent event) {
remove((OrderedLayout) event.getButton().getData(), event
.getButton().getParent());
}
});
b.setWidth("100%");
b.setData(ol);
return b;
}
protected void remove(OrderedLayout ol, Component c) {
ol.removeComponent(c);
}
protected void addBefore(OrderedLayout ol, Component c, String width) {
int index = 0;
Iterator iter = ol.getComponentIterator();
while (iter.hasNext()) {
if (iter.next() == c) {
break;
}
index++;
}
OrderedLayout inner = createAddRemove(ol, width, String
.valueOf(suffix++));
ol.addComponent(inner, index);
ol.setComponentAlignment(inner, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
}
private Layout layout_pctFilled(OrderedLayout ol) {
ol.setHeight("600px");
ol.setWidth("600px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("100 % filled with fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight() + "px)");
TextField tf;
tf = new TextField();
tf.setCaption("This one has a caption");
tf.setValue("60% expand TextField");
tf.setWidth("100%");
tf.setHeight("100%");
// tf.setRequired(true);
// tf.setComponentError(new UserError("It's broken!"));
// tf.setHeight("100%");
// tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
ol.setExpandRatio(tf, 60);
tf = new TextField();
tf.setValue("100px 100px TextField");
tf.setWidth("100px");
tf.setHeight("100px");
tf.setRows(2);
ol.addComponent(tf);
ol.setComponentAlignment(tf,
AlignmentHandler.ALIGNMENT_HORIZONTAL_CENTER,
AlignmentHandler.ALIGNMENT_VERTICAL_CENTER);
//
tf = new TextField("40%x40% Field");
// tf.setCaption(null);
tf.setValue("40% expand (40% height) TextField");
tf.setWidth("100%");
tf.setHeight("40%");
ol.setExpandRatio(tf, 40);
// tf.setRequired(true);
ol.setComponentAlignment(tf, AlignmentHandler.ALIGNMENT_RIGHT,
AlignmentHandler.ALIGNMENT_BOTTOM);
tf.setRows(2);
ol.addComponent(tf);
return ol;
}
private Layout layout_pctFilled2(OrderedLayout ol) {
ol.setHeight("600px");
ol.setWidth("600px");
ol.setMargin(true);
ol.setSpacing(false);
// ol.setWidth("");
ol.setCaption("100 % filled with fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight() + "px)");
TextField tf;
tf = new TextField();
// tf.setCaption("This one has a caption");
tf.setValue("80% x 20% TextField");
tf.setWidth("80%");
tf.setHeight("20%");
// tf.setRequired(true);
// tf.setComponentError(new UserError("It's broken!"));
// tf.setHeight("100%");
// tf.setWidth("100px");
tf.setRows(2);
ol.addComponent(tf);
//
tf = new TextField("20%x60% Field");
tf.setCaption(null);
tf.setValue("20% x 60% TextField");
tf.setWidth("20%");
tf.setHeight("60%");
// tf.setRequired(true);
ol.setComponentAlignment(tf, AlignmentHandler.ALIGNMENT_RIGHT,
AlignmentHandler.ALIGNMENT_BOTTOM);
tf.setRows(2);
ol.addComponent(tf);
return ol;
}
private Layout layout_underFilled(OrderedLayout ol) {
ol.setHeight("700px");
ol.setWidth("700px");
ol.setMargin(true);
ol.setSpacing(true);
// ol.setWidth("");
ol.setCaption("Underfilled with fixed width (" + ol.getWidth()
+ "px) and fixed height (" + ol.getHeight() + "px)");
TextField tf;
tf = new TextField("60%x100% Field");
tf.setCaption("Short capt");
tf.setValue("60% x 100% TextField");
tf.setWidth("60%");
tf.setHeight("100%");
tf.setRequired(true);
tf.setRows(2);
ol.setComponentAlignment(tf,
AlignmentHandler.ALIGNMENT_HORIZONTAL_CENTER,
AlignmentHandler.ALIGNMENT_VERTICAL_CENTER);
ol.addComponent(tf);
tf = new TextField("200px x 200px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("200x200 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("200px");
tf.setWidth("200px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_LEFT,
OrderedLayout.ALIGNMENT_TOP);
ol.addComponent(tf);
tf = new TextField("200px x 200px Field");
// tf.setIcon(new ThemeResource("icons/16/document-add.png"));
tf.setValue("200x200 field");
tf.setRows(2);
// tf.setSizeFull();
tf.setHeight("200px");
tf.setWidth("200px");
ol.setComponentAlignment(tf, OrderedLayout.ALIGNMENT_RIGHT,
OrderedLayout.ALIGNMENT_BOTTOM);
ol.addComponent(tf);
return ol;
}
} | Fixed OrderedLayout test case to work with current layouts
svn changeset:6008/svn branch:trunk
| src/com/itmill/toolkit/tests/TestOrderedLayout.java | Fixed OrderedLayout test case to work with current layouts |
|
Java | apache-2.0 | d320592a14d866632c0f7bf375e149413e997d12 | 0 | codehaus/mvel,codehaus/mvel | /**
* MVEL (The MVFLEX Expression Language)
*
* Copyright (C) 2007 Christopher Brock, MVFLEX/Valhalla Project and the Codehaus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.mvel.compiler;
import org.mvel.*;
import static org.mvel.Operator.*;
import org.mvel.ast.*;
import static org.mvel.util.ArrayTools.findFirst;
import org.mvel.util.ExecutionStack;
import static org.mvel.util.ParseTools.*;
import static org.mvel.util.PropertyTools.isDigit;
import static org.mvel.util.PropertyTools.isIdentifierPart;
import org.mvel.util.Stack;
import java.io.Serializable;
import static java.lang.Boolean.FALSE;
import static java.lang.Boolean.TRUE;
import static java.lang.Character.isWhitespace;
import static java.lang.Float.parseFloat;
import static java.lang.Runtime.getRuntime;
import static java.lang.System.getProperty;
import static java.lang.Thread.currentThread;
import static java.util.Collections.synchronizedMap;
import java.util.HashMap;
import java.util.Map;
import java.util.WeakHashMap;
/**
* @author Christopher Brock
*/
public class AbstractParser implements Serializable {
protected char[] expr;
protected int cursor;
protected int length;
protected int fields;
protected boolean greedy = true;
protected boolean lastWasIdentifier = false;
protected boolean lastWasLineLabel = false;
protected boolean lastWasComment = false;
protected boolean literalOnly = true;
protected boolean debugSymbols = false;
private int line = 1;
protected ASTNode lastNode;
private static Map<String, char[]> EX_PRECACHE;
public static final Map<String, Object> LITERALS =
new HashMap<String, Object>(35 * 2, 0.4f);
public static final Map<String, Integer> OPERATORS =
new HashMap<String, Integer>(25 * 2, 0.4f);
protected Stack stk;
protected ExecutionStack splitAccumulator = new ExecutionStack();
protected static ThreadLocal<ParserContext> parserContext;
static {
configureFactory();
/**
* Setup the basic literals
*/
AbstractParser.LITERALS.put("true", TRUE);
AbstractParser.LITERALS.put("false", FALSE);
AbstractParser.LITERALS.put("null", null);
AbstractParser.LITERALS.put("nil", null);
AbstractParser.LITERALS.put("empty", BlankLiteral.INSTANCE);
// AbstractParser.LITERALS.put("this", ThisLiteral.class);
/**
* Add System and all the class wrappers from the JCL.
*/
LITERALS.put("System", System.class);
LITERALS.put("String", String.class);
LITERALS.put("Integer", Integer.class);
LITERALS.put("int", Integer.class);
LITERALS.put("Long", Long.class);
LITERALS.put("long", Long.class);
LITERALS.put("Boolean", Boolean.class);
LITERALS.put("boolean", Boolean.class);
LITERALS.put("Short", Short.class);
LITERALS.put("short", Short.class);
LITERALS.put("Character", Character.class);
LITERALS.put("char", Character.class);
LITERALS.put("Double", Double.class);
LITERALS.put("double", double.class);
LITERALS.put("Float", Float.class);
LITERALS.put("float", float.class);
LITERALS.put("Math", Math.class);
LITERALS.put("Void", Void.class);
LITERALS.put("Object", Object.class);
LITERALS.put("Class", Class.class);
LITERALS.put("ClassLoader", ClassLoader.class);
LITERALS.put("Runtime", Runtime.class);
LITERALS.put("Thread", Thread.class);
LITERALS.put("Compiler", Compiler.class);
LITERALS.put("StringBuffer", StringBuffer.class);
LITERALS.put("ThreadLocal", ThreadLocal.class);
LITERALS.put("SecurityManager", SecurityManager.class);
LITERALS.put("StrictMath", StrictMath.class);
LITERALS.put("Array", java.lang.reflect.Array.class);
if (parseFloat(getProperty("java.version").substring(0, 2)) >= 1.5) {
try {
LITERALS.put("StringBuilder", currentThread().getContextClassLoader().loadClass("java.lang.StringBuilder"));
}
catch (Exception e) {
throw new RuntimeException("cannot resolve a built-in literal", e);
}
}
//LITERALS.putAll(Units.MEASUREMENTS_ALL);
//loadLanguageFeaturesByLevel(5);
setLanguageLevel(5);
}
public static void configureFactory() {
if (MVEL.THREAD_SAFE) {
EX_PRECACHE = synchronizedMap(new WeakHashMap<String, char[]>(10));
}
else {
EX_PRECACHE = new WeakHashMap<String, char[]>(10);
}
}
protected ASTNode nextTokenSkipSymbols() {
ASTNode n = nextToken();
if (n != null && n.getFields() == -1) n = nextToken();
return n;
}
/**
* Retrieve the next token in the expression.
*
* @return -
*/
protected ASTNode nextToken() {
try {
/**
* If the cursor is at the end of the expression, we have nothing more to do:
* return null.
*/
if (cursor >= length) {
return null;
}
else if (!splitAccumulator.isEmpty()) {
return lastNode = (ASTNode) splitAccumulator.pop();
}
int brace, start = cursor, idx;
/**
* Because of parser recursion for sub-expression parsing, we sometimes need to remain
* certain field states. We do not reset for assignments, boolean mode, list creation or
* a capture only mode.
*/
fields = fields & (ASTNode.INLINE_COLLECTION | ASTNode.COMPILE_IMMEDIATE);
boolean capture = false, union = false;
ParserContext pCtx = getParserContext();
if (debugSymbols) {
if (!lastWasLineLabel) {
if (pCtx.getSourceFile() == null) {
throw new CompileException("unable to produce debugging symbols: source name must be provided.");
}
line = pCtx.getLineCount();
skipWhitespaceWithLineAccounting();
if (!pCtx.isKnownLine(pCtx.getSourceFile(), pCtx.setLineCount(line)) && !pCtx.isBlockSymbols()) {
lastWasLineLabel = true;
pCtx.setLineAndOffset(line, cursor);
return lastNode = pCtx.setLastLineLabel(new LineLabel(pCtx.getSourceFile(), line));
}
}
else {
lastWasComment = lastWasLineLabel = false;
}
}
/**
* Skip any whitespace currently under the starting point.
*/
while (start != length && isWhitespace(expr[start])) start++;
/**
* From here to the end of the method is the core MVEL parsing code. Fiddling around here is asking for
* trouble unless you really know what you're doing.
*/
for (cursor = start; cursor != length;) {
if (isIdentifierPart(expr[cursor])) {
/**
* If the current character under the cursor is a valid
* part of an identifier, we keep capturing.
*/
capture = true;
cursor++;
}
else if (capture) {
String t;
if (OPERATORS.containsKey(t = new String(expr, start, cursor - start))) {
switch (OPERATORS.get(t)) {
case NEW:
start = cursor + 1;
captureToEOT();
return lastNode = new NewObjectNode(subArray(start, cursor), fields);
case ASSERT:
start = cursor + 1;
captureToEOS();
return lastNode = new AssertNode(subArray(start, cursor--), fields);
case RETURN:
start = cursor + 1;
captureToEOS();
return lastNode = new ReturnNode(subArray(start, cursor), fields);
case IF:
return captureCodeBlock(ASTNode.BLOCK_IF);
case FOREACH:
return captureCodeBlock(ASTNode.BLOCK_FOREACH);
case WHILE:
return captureCodeBlock(ASTNode.BLOCK_WHILE);
case WITH:
return captureCodeBlock(ASTNode.BLOCK_WITH);
case IMPORT:
start = cursor + 1;
captureToEOS();
ImportNode importNode = new ImportNode(subArray(start, cursor--), fields);
if (importNode.isPackageImport()) {
pCtx.addPackageImport(importNode.getPackageImport());
cursor++;
}
else {
pCtx.addImport(getSimpleClassName(importNode.getImportClass()), importNode.getImportClass());
}
return importNode;
case IMPORT_STATIC:
start = cursor + 1;
captureToEOS();
return lastNode = new StaticImportNode(subArray(start, cursor--));
case FUNCTION:
Function function = (Function) captureCodeBlock(FUNCTION);
capture = false;
start = cursor + 1;
return function;
case UNTYPED_VAR:
start = cursor + 1;
captureToEOT();
int end = cursor;
skipWhitespace();
if (expr[cursor] == '=') {
cursor = start;
continue;
}
else {
String name = new String(subArray(start, end));
if ((idx = pCtx.variableIndexOf(name)) != -1) {
return lastNode = new IndexedDeclTypedVarNode(idx, Object.class);
}
else {
return lastNode = new DeclTypedVarNode(name, Object.class, fields);
}
}
}
}
skipWhitespace();
/**
* If we *were* capturing a token, and we just hit a non-identifier
* character, we stop and figure out what to do.
*/
if (cursor != length && expr[cursor] == '(') {
cursor = balancedCapture(expr, cursor, '(') + 1;
}
/**
* If we encounter any of the following cases, we are still dealing with
* a contiguous token.
*/
String name;
if (cursor != length) {
switch (expr[cursor]) {
case '?':
if (lookToLast() == '.') {
capture = true;
cursor++;
continue;
}
case '+':
switch (lookAhead()) {
case '+':
if ((idx = pCtx.variableIndexOf(name = new String(subArray(start, cursor)))) != -1) {
lastNode = new IndexedPostFixIncNode(idx);
}
else {
lastNode = new PostFixIncNode(name);
}
cursor += 2;
return lastNode;
case '=':
name = new String(expr, start, trimLeft(cursor) - start);
start = cursor += 2;
captureToEOS();
if (union) {
return lastNode = new DeepAssignmentNode(subArray(start, cursor), fields, Operator.ADD, t);
}
else if ((idx = pCtx.variableIndexOf(name)) != -1) {
return lastNode = new IndexedAssignmentNode(subArray(start, cursor), fields, Operator.ADD, name, idx);
}
else {
return lastNode = new AssignmentNode(subArray(start, cursor), fields, Operator.ADD, name);
}
}
break;
case '-':
switch (lookAhead()) {
case '-':
if ((idx = pCtx.variableIndexOf(name = new String(subArray(start, cursor)))) != -1) {
lastNode = new IndexedPostFixDecNode(idx);
}
else {
lastNode = new PostFixDecNode(name);
}
cursor += 2;
return lastNode;
case '=':
name = new String(expr, start, trimLeft(cursor) - start);
start = cursor += 2;
captureToEOS();
if ((idx = pCtx.variableIndexOf(name)) != -1) {
return lastNode = new IndexedOperativeAssign(subArray(start, cursor), Operator.SUB, idx, fields);
}
else {
return lastNode = new OperativeAssign(name, subArray(start, cursor), Operator.SUB, fields);
}
}
break;
case '*':
if (isNext('=')) {
name = new String(expr, start, trimLeft(cursor) - start);
start = cursor += 2;
captureToEOS();
if ((idx = pCtx.variableIndexOf(name)) != -1) {
return lastNode = new IndexedOperativeAssign(subArray(start, cursor), Operator.MULT, idx, fields);
}
else {
return lastNode = new OperativeAssign(name, subArray(start, cursor), Operator.MULT, fields);
}
}
break;
case '/':
if (isNext('=')) {
name = new String(expr, start, trimLeft(cursor) - start);
start = cursor += 2;
captureToEOS();
if ((idx = pCtx.variableIndexOf(name)) != -1) {
return lastNode = new IndexedOperativeAssign(subArray(start, cursor), Operator.DIV, idx, fields);
}
else {
return lastNode = new OperativeAssign(name, subArray(start, cursor), Operator.DIV, fields);
}
}
break;
case ']':
case '[':
cursor = balancedCapture(expr, cursor, '[') + 1;
continue;
case '.':
union = true;
cursor++;
continue;
case '~':
if (isNext('=')) {
char[] stmt = subArray(start, trimLeft(cursor));
start = cursor += 2;
skipWhitespace();
return lastNode = new RegExMatch(stmt, fields, subArray(start, (cursor = balancedCapture(expr, cursor, expr[cursor]) + 1)));
}
break;
case '=':
if (isNext('+')) {
name = new String(expr, start, trimLeft(cursor) - start);
start = cursor += 2;
captureToEOS();
if ((idx = pCtx.variableIndexOf(name)) != -1) {
return lastNode = new IndexedOperativeAssign(subArray(start, cursor), Operator.ADD, idx, fields);
}
else {
return lastNode = new OperativeAssign(name, subArray(start, cursor), Operator.ADD, fields);
}
}
if (greedy && !isNext('=')) {
cursor++;
captureToEOS();
if (union) {
return lastNode = new DeepAssignmentNode(subArray(start, cursor), fields | ASTNode.ASSIGN);
}
else if (lastWasIdentifier) {
/**
* Check for typing information.
*/
if (lastNode.getLiteralValue() instanceof String) {
if (pCtx.hasImport((String) lastNode.getLiteralValue())) {
lastNode.setLiteralValue(pCtx.getImport((String) lastNode.getLiteralValue()));
lastNode.setAsLiteral();
lastNode.discard();
}
else if (stk != null && stk.peek() instanceof Class) {
lastNode.setLiteralValue(stk.pop());
lastNode.setAsLiteral();
lastNode.discard();
}
else {
try {
/**
* take a stab in the dark and try and load the class
*/
lastNode.setLiteralValue(createClass((String) lastNode.getLiteralValue()));
lastNode.setAsLiteral();
lastNode.discard();
}
catch (ClassNotFoundException e) {
/**
* Just fail through.
*/
}
}
}
if (lastNode.isLiteral() && lastNode.getLiteralValue() instanceof Class) {
lastNode.discard();
captureToEOS();
return new TypedVarNode(subArray(start, cursor), fields | ASTNode.ASSIGN, (Class)
lastNode.getLiteralValue());
}
throw new ParseException("unknown class: " + lastNode.getLiteralValue());
}
else
if (pCtx != null && ((idx = pCtx.variableIndexOf(t)) != -1 || (pCtx.isIndexAllocation()))) {
IndexedAssignmentNode ian = new IndexedAssignmentNode(subArray(start, cursor), ASTNode.ASSIGN, idx);
if (idx == -1) {
pCtx.addIndexedVariable(t = ian.getAssignmentVar());
ian.setRegister(idx = pCtx.variableIndexOf(t));
}
return lastNode = ian;
}
else {
return lastNode = new AssignmentNode(subArray(start, cursor), fields | ASTNode.ASSIGN);
}
}
}
}
/**
* Produce the token.
*/
trimWhitespace();
return createPropertyToken(start, cursor);
}
else {
String name;
switch (expr[cursor]) {
case '@': {
start++;
captureToEOT();
name = new String(expr, start, cursor - start);
if (pCtx.getInterceptors() == null || !pCtx.getInterceptors().
containsKey(name)) {
throw new CompileException("reference to undefined interceptor: " + name, expr, cursor);
}
return lastNode = new InterceptorWrapper(pCtx.getInterceptors().get(name), nextToken());
}
case '=':
return createToken(expr, start, (cursor += 2), fields);
case '-':
if (isNext('-')) {
start = cursor += 2;
captureToEOT();
if ((idx = pCtx.variableIndexOf(name = new String(subArray(start, cursor)))) != -1) {
return lastNode = new IndexedPreFixDecNode(idx);
}
else {
return lastNode = new PreFixDecNode(name);
}
}
else if ((cursor != 0 && !isWhitespace(lookBehind())) || !isDigit(lookAhead())) {
return createToken(expr, start, cursor++ + 1, fields);
}
else if ((cursor - 1) != 0 || (!isDigit(lookBehind())) && isDigit(lookAhead())) {
cursor++;
break;
}
case '+':
if (isNext('+')) {
start = cursor += 2;
captureToEOT();
if ((idx = pCtx.variableIndexOf(name = new String(subArray(start, cursor)))) != -1) {
return lastNode = new IndexedPreFixIncNode(idx);
}
else {
return lastNode = new PreFixIncNode(name);
}
}
return createToken(expr, start, cursor++ + 1, fields);
case '*':
if (isNext('*')) {
cursor++;
}
return createToken(expr, start, cursor++ + 1, fields);
case ';':
cursor++;
lastWasIdentifier = false;
return lastNode = new EndOfStatement();
case '#':
case '/':
if (isNext(expr[cursor])) {
/**
* Handle single line comments.
*/
// while (cursor != length && expr[cursor] != '\n') cursor++;
captureToEOL();
if (debugSymbols) {
line = pCtx.getLineCount();
skipWhitespaceWithLineAccounting();
if (lastNode instanceof LineLabel) {
pCtx.getLastLineLabel().setLineNumber(line);
pCtx.addKnownLine(line);
}
lastWasComment = true;
pCtx.setLineCount(line);
}
else if (cursor != length) {
skipWhitespace();
}
if ((start = cursor) >= length) return null;
continue;
}
else if (expr[cursor] == '/' && isNext('*')) {
/**
* Handle multi-line comments.
*/
int len = length - 1;
/**
* This probably seems highly redundant, but sub-compilations within the same
* source will spawn a new compiler, and we need to sync this with the
* parser context;
*/
if (debugSymbols) {
line = pCtx.getLineCount();
}
while (true) {
cursor++;
/**
* Since multi-line comments may cross lines, we must keep track of any line-break
* we encounter.
*/
if (debugSymbols) {
skipWhitespaceWithLineAccounting();
}
if (cursor == len) {
throw new CompileException("unterminated block comment", expr, cursor);
}
if (expr[cursor] == '*' && isNext('/')) {
if ((cursor += 2) >= length) return null;
skipWhitespaceWithLineAccounting();
start = cursor;
break;
}
}
if (debugSymbols) {
pCtx.setLineCount(line);
if (lastNode instanceof LineLabel) {
pCtx.getLastLineLabel().setLineNumber(line);
pCtx.addKnownLine(line);
}
lastWasComment = true;
}
continue;
}
case '?':
case ':':
case '^':
case '%': {
return createToken(expr, start, cursor++ + 1, fields);
}
case '(': {
cursor++;
boolean singleToken = true;
boolean lastWS = false;
skipWhitespace();
for (brace = 1; cursor != length && brace != 0; cursor++) {
switch (expr[cursor]) {
case '(':
brace++;
break;
case ')':
brace--;
break;
case '\'':
cursor = captureStringLiteral('\'', expr, cursor, length);
break;
case '"':
cursor = captureStringLiteral('"', expr, cursor, length);
break;
case 'i':
if (isNext('n') && isWhitespace(lookAhead(2)) && !isIdentifierPart(lookBehind())) {
fields |= ASTNode.FOLD;
for (int level = brace; cursor != length; cursor++) {
switch (expr[cursor]) {
case '(':
brace++;
break;
case ')':
if (--brace != level) {
if (lookAhead() == '.') {
lastNode = createToken(expr, trimRight(start + 1), (start = cursor++), ASTNode.FOLD);
captureToEOT();
return lastNode = new Union(expr, trimRight(start + 2), cursor, fields, lastNode);
}
else {
return createToken(expr, trimRight(start + 1), cursor++, ASTNode.FOLD);
}
}
break;
case '\'':
cursor = captureStringLiteral('\'', expr, cursor, length);
break;
case '"':
cursor = captureStringLiteral('\'', expr, cursor, length);
break;
}
}
}
break;
default:
/**
* Check to see if we should disqualify this current token as a potential
* type-cast candidate.
*/
if ((lastWS && expr[cursor] != '.') || !(isIdentifierPart(expr[cursor]) || expr[cursor] == '.')) {
singleToken = false;
}
else if (isWhitespace(expr[cursor])) {
lastWS = true;
skipWhitespace();
cursor--;
}
}
}
if (brace != 0) {
throw new CompileException("unbalanced braces in expression: (" + brace + "):", expr, cursor);
}
char[] _subset = null;
if (singleToken) {
int st;
String tokenStr = new String(_subset = subset(expr, st = trimRight(start + 1), trimLeft(cursor - 1) - st));
if (pCtx.hasImport(tokenStr)) {
start = cursor;
captureToEOS();
return lastNode = new TypeCast(subset(expr, start, cursor - start), pCtx.getImport(tokenStr), fields);
}
else {
try {
/**
*
* take a stab in the dark and try and load the class
*/
int _start = cursor;
captureToEOS();
return lastNode = new TypeCast(subset(expr, _start, cursor - _start), createClass(tokenStr), fields);
}
catch (ClassNotFoundException e) {
/**
* Just fail through.
*/
}
}
}
if (_subset != null) {
return handleUnion(handleSubstatement(new Substatement(_subset, fields)));
}
else {
return handleUnion(handleSubstatement(new Substatement(subset(expr, start = trimRight(start + 1), trimLeft(cursor - 1) - start), fields)));
}
}
case '}':
case ']':
case ')': {
throw new ParseException("unbalanced braces", expr, cursor);
}
case '>': {
if (expr[cursor + 1] == '>') {
if (expr[cursor += 2] == '>') cursor++;
return createToken(expr, start, cursor, fields);
}
else if (expr[cursor + 1] == '=') {
return createToken(expr, start, cursor += 2, fields);
}
else {
return createToken(expr, start, ++cursor, fields);
}
}
case '<': {
if (expr[++cursor] == '<') {
if (expr[++cursor] == '<') cursor++;
return createToken(expr, start, cursor, fields);
}
else if (expr[cursor] == '=') {
return createToken(expr, start, ++cursor, fields);
}
else {
return createToken(expr, start, cursor, fields);
}
}
case '\'':
case '"':
lastNode = new LiteralNode(
handleStringEscapes(
subset(expr, start + 1, (cursor = captureStringLiteral(expr[cursor], expr, cursor, length)) - start - 1))
, String.class);
cursor++;
if (tokenContinues()) {
return lastNode = handleUnion(lastNode);
}
return lastNode;
case '&': {
if (expr[cursor++ + 1] == '&') {
return createToken(expr, start, ++cursor, fields);
}
else {
return createToken(expr, start, cursor, fields);
}
}
case '|': {
if (expr[cursor++ + 1] == '|') {
return createToken(expr, start, ++cursor, fields);
}
else {
return createToken(expr, start, cursor, fields);
}
}
case '~':
if ((cursor++ - 1 != 0 || !isIdentifierPart(lookBehind()))
&& isDigit(expr[cursor])) {
start = cursor;
captureToEOT();
return lastNode = new Invert(subset(expr, start, cursor - start), fields);
}
else if (expr[cursor] == '(') {
start = cursor--;
captureToEOT();
return lastNode = new Invert(subset(expr, start, cursor - start), fields);
}
else {
if (expr[cursor] == '=') cursor++;
return createToken(expr, start, cursor, fields);
}
case '!': {
if (isIdentifierPart(expr[++cursor])) {
start = cursor;
captureToEOT();
return lastNode = new Negation(subset(expr, start, cursor - start), fields);
}
else if (expr[cursor] == '(') {
start = cursor--;
captureToEOT();
return lastNode = new Negation(subset(expr, start, cursor - start), fields);
}
else if (expr[cursor] != '=')
throw new CompileException("unexpected operator '!'", expr, cursor, null);
else {
return createToken(expr, start, ++cursor, fields);
}
}
case '[':
case '{':
cursor = balancedCapture(expr, cursor, expr[cursor]) + 1;
if (tokenContinues()) {
// if (lookAhead(1) == '.') {
lastNode = new InlineCollectionNode(expr, start, start = cursor, fields);
captureToEOT();
return lastNode = new Union(expr, start + 1, cursor, fields, lastNode);
}
else {
return lastNode = new InlineCollectionNode(expr, start, cursor, fields);
}
default:
cursor++;
}
}
}
if (start == cursor) return null;
return createPropertyToken(start, cursor);
}
catch (CompileException e) {
throw new CompileException(e.getMessage(), expr, cursor, e.getCursor() == 0, e);
}
}
public ASTNode handleSubstatement(Substatement stmt) {
if (stmt.getStatement() != null && stmt.getStatement().isLiteralOnly()) {
return new LiteralNode(stmt.getStatement().getValue(null, null, null), fields);
}
else {
return stmt;
}
}
protected ASTNode handleUnion(ASTNode node) {
if (cursor != length) {
skipWhitespace();
if (expr[cursor] == '.') {
int union = cursor + 1;
captureToEOT();
return lastNode = new Union(expr, union, cursor, fields, node);
}
else if (expr[cursor] == '[') {
captureToEOT();
return lastNode = new Union(expr, cursor, cursor, fields, node);
}
}
return lastNode = node;
}
/**
* Most of this method should be self-explanatory.
*
* @param expr -
* @param start -
* @param end -
* @param fields -
* @return -
*/
private ASTNode createToken(final char[] expr, final int start, final int end, int fields) {
lastWasIdentifier = (lastNode = new ASTNode(expr, start, end, fields)).isIdentifier();
return lastNode;
}
private char[] subArray(final int start, final int end) {
if (start >= end) return new char[0];
char[] newA = new char[end - start];
for (int i = 0; i != newA.length; i++)
newA[i] = expr[i + start];
return newA;
}
private ASTNode createPropertyToken(int start, int end) {
lastWasIdentifier = true;
String tmp;
if (parserContext != null && parserContext.get() != null && parserContext.get().hasImports()) {
char[] _subset = subset(expr, start, cursor - start);
int offset;
if ((offset = findFirst('.', _subset)) != -1) {
String iStr = new String(_subset, 0, offset);
if (getParserContext().hasImport(iStr)) {
return lastNode = new LiteralDeepPropertyNode(subset(_subset, offset + 1, _subset.length - offset - 1), fields, getParserContext().getImport(iStr));
}
}
else {
if (getParserContext().hasImport(tmp = new String(_subset))) {
Object i = getParserContext().getStaticOrClassImport(tmp);
if (i instanceof Class) {
return lastNode = new LiteralNode(i, Class.class);
}
}
lastWasIdentifier = true;
return lastNode = new ASTNode(_subset, 0, _subset.length, fields);
}
}
else if ((fields & ASTNode.METHOD) != 0) {
return lastNode = new ASTNode(expr, start, end, fields);
}
else if (LITERALS.containsKey(tmp = new String(expr, start, end - start))) {
return lastNode = new LiteralNode(LITERALS.get(tmp));
}
else if (OPERATORS.containsKey(tmp)) {
return lastNode = new OperatorNode(OPERATORS.get(tmp));
}
return lastNode = new ASTNode(expr, start, end, fields);
}
private ASTNode createBlockToken(final int condStart,
final int condEnd, final int blockStart, final int blockEnd, int type) {
lastWasIdentifier = false;
cursor++;
if (!isStatementManuallyTerminated()) {
splitAccumulator.add(new EndOfStatement());
}
switch (type) {
case ASTNode.BLOCK_IF:
return new IfNode(subArray(condStart, condEnd), subArray(blockStart, blockEnd), fields);
case ASTNode.BLOCK_FOREACH:
return new ForEachNode(subArray(condStart, condEnd), subArray(blockStart, blockEnd), fields);
case ASTNode.BLOCK_WHILE:
return new WhileNode(subArray(condStart, condEnd), subArray(blockStart, blockEnd), fields);
default:
return new WithNode(subArray(condStart, condEnd), subArray(blockStart, blockEnd), fields);
}
}
private ASTNode captureCodeBlock(int type) {
boolean cond = true;
ASTNode first = null;
ASTNode tk = null;
switch (type) {
case ASTNode.BLOCK_IF: {
do {
if (tk != null) {
captureToNextTokenJunction();
skipWhitespace();
cond = expr[cursor] != '{' && expr[cursor] == 'i' && expr[++cursor] == 'f'
&& (isWhitespace(expr[++cursor]) || expr[cursor] == '(');
}
if (((IfNode) (tk = _captureBlock(tk, expr, cond, type))).getElseBlock() != null) {
cursor++;
return first;
}
if (first == null) first = tk;
if (cursor != length && expr[cursor] != ';') {
cursor++;
}
}
while (ifThenElseblockContinues());
return first;
}
default: // either BLOCK_WITH or BLOCK_FOREACH
captureToNextTokenJunction();
if (debugSymbols) {
skipWhitespaceWithLineAccounting();
}
else {
skipWhitespace();
}
return _captureBlock(null, expr, true, type);
}
}
private ASTNode _captureBlock(ASTNode node, final char[] expr, boolean cond, int type) {
skipWhitespace();
int startCond = 0;
int endCond = 0;
int blockStart;
int blockEnd;
/**
* Functions are a special case we handle differently from the rest of block parsing
*/
if (type == FUNCTION) {
int start = cursor;
captureToNextTokenJunction();
if (cursor == length) {
throw new CompileException("unexpected end of statement", expr, start);
}
/**
* Grabe the function name.
*/
String functionName = new String(expr, start, (startCond = cursor) - start).trim();
/**
* Check to see if the name is legal.
*/
if (isReservedWord(functionName) || !isValidNameorLabel(functionName))
throw new CompileException("illegal function name or use of reserved word", expr, cursor);
if (expr[cursor] == '(') {
/**
* If we discover an opening bracket after the function name, we check to see
* if this function accepts parameters.
*/
endCond = cursor = balancedCapture(expr, startCond = cursor, '(');
startCond++;
cursor++;
skipWhitespace();
if (cursor >= length) {
throw new CompileException("unbalanced braces", expr, cursor);
}
else if (expr[cursor] == '{') {
blockStart = cursor;
blockEnd = cursor = balancedCapture(expr, cursor, '{');
}
else {
blockStart = cursor;
captureToEOS();
blockEnd = cursor;
}
}
else {
/**
* This function has not parameters.
*/
if (expr[cursor] == '{') {
/**
* This function is bracketed. We capture the entire range in the brackets.
*/
blockStart = cursor;
blockEnd = cursor = balancedCapture(expr, cursor, '{');
}
else {
/**
* This is a single statement function declaration. We only capture the statement.
*/
blockStart = cursor;
captureToEOS();
blockEnd = cursor;
}
}
/**
* Trim any whitespace from the captured block range.
*/
blockStart = trimRight(blockStart + 1);
blockEnd = trimLeft(blockEnd);
cursor++;
/**
* Check if the function is manually terminated.
*/
if (!isStatementManuallyTerminated()) {
/**
* Add an EndOfStatement to the split accumulator in the parser.
*/
splitAccumulator.add(new EndOfStatement());
}
/**
* Produce the funciton node.
*/
return new Function(functionName, subArray(startCond, endCond), subArray(blockStart, blockEnd));
}
else if (cond) {
/**
* This block is an: IF, FOREACH or WHILE node.
*/
if (debugSymbols) {
int[] cap = balancedCaptureWithLineAccounting(expr, startCond = cursor, '(');
endCond = cursor = cap[0];
startCond++;
cursor++;
getParserContext().setLineCount(line = getParserContext().getLineCount() + cap[1]);
}
else {
endCond = cursor = balancedCapture(expr, startCond = cursor, '(');
startCond++;
cursor++;
}
}
skipWhitespace();
if (cursor >= length) {
throw new CompileException("unbalanced braces", expr, cursor);
}
else if (expr[cursor] == '{') {
blockStart = cursor;
if (debugSymbols) {
int[] cap = balancedCaptureWithLineAccounting(expr, cursor, '{');
blockEnd = cursor = cap[0];
getParserContext().setLineCount((line = getParserContext().getLineCount() + cap[1]));
}
else {
blockEnd = cursor = balancedCapture(expr, cursor, '{');
}
}
else {
blockStart = cursor - 1;
captureToEOSorEOL();
blockEnd = cursor + 1;
}
if (type == ASTNode.BLOCK_IF) {
IfNode ifNode = (IfNode) node;
if (node != null) {
if (!cond) {
return ifNode.setElseBlock(subArray(trimRight(blockStart + 1), trimLeft(blockEnd - 1)));
}
else {
return ifNode.setElseIf((IfNode) createBlockToken(startCond, endCond, trimRight(blockStart + 1),
trimLeft(blockEnd), type));
}
}
else {
return createBlockToken(startCond, endCond, blockStart + 1, blockEnd, type);
}
}
// DON"T REMOVE THIS COMMENT!
// else if (isFlag(ASTNode.BLOCK_FOREACH) || isFlag(ASTNode.BLOCK_WITH)) {
else {
return createBlockToken(startCond, endCond, trimRight(blockStart + 1), trimLeft(blockEnd), type);
}
}
/**
* Checking from the current cursor position, check to see if the if-then-else block continues.
*
* @return boolean value
*/
protected boolean ifThenElseblockContinues() {
if ((cursor + 4) < length) {
if (expr[cursor] != ';') cursor--;
skipWhitespace();
return expr[cursor] == 'e' && expr[cursor + 1] == 'l' && expr[cursor + 2] == 's' && expr[cursor + 3] == 'e'
&& (isWhitespace(expr[cursor + 4]) || expr[cursor + 4] == '{');
}
return false;
}
/**
* Checking from the current cursor position, check to see if we're inside a contiguous identifier.
*
* @return
*/
protected boolean tokenContinues() {
if (cursor >= length) return false;
else if (expr[cursor] == '.' || expr[cursor] == '[') return true;
else if (isWhitespace(expr[cursor])) {
int markCurrent = cursor;
skipWhitespace();
if (cursor != length && (expr[cursor] == '.' || expr[cursor] == '[')) return true;
cursor = markCurrent;
}
return false;
}
/**
* Capture from the current cursor position, to the end of the statement.
*/
protected void captureToEOS() {
while (cursor != length) {
switch (expr[cursor]) {
case '(':
case '[':
case '{':
cursor = balancedCapture(expr, cursor, expr[cursor]);
break;
case ';':
case '}':
return;
}
cursor++;
}
}
/**
* From the current cursor position, capture to the end of statement, or the end of line, whichever comes first.
*/
protected void captureToEOSorEOL() {
while (cursor != length && (expr[cursor] != '\n' && expr[cursor] != '\r' && expr[cursor] != ';')) {
cursor++;
}
}
/**
* From the current cursor position, capture to the end of the line.
*/
protected void captureToEOL() {
while (cursor != length && (expr[cursor] != '\n')) cursor++;
}
/**
* From the current cursor position, capture to the end of the current token.
*/
protected void captureToEOT() {
skipWhitespace();
while (++cursor != length) {
switch (expr[cursor]) {
case '(':
case '[':
case '{':
cursor = balancedCapture(expr, cursor, expr[cursor]);
break;
case '=':
case '&':
case '|':
case ';':
return;
case '.':
skipWhitespace();
break;
default:
if (isWhitespace(expr[cursor])) {
skipWhitespace();
if (expr[cursor] == '.') {
if (cursor != length) cursor++;
skipWhitespace();
break;
}
else {
trimWhitespace();
return;
}
}
}
}
}
/**
* From the specified cursor position, trim out any whitespace between the current position and the end of the
* last non-whitespace character.
*
* @param pos - current position
* @return new position.
*/
protected int trimLeft(int pos) {
while (pos != 0 && isWhitespace(expr[pos - 1])) pos--;
return pos;
}
/**
* From the specified cursor position, trim out any whitespace between the current position and beginning of the
* first non-whitespace character.
*
* @param pos
* @return
*/
protected int trimRight(int pos) {
while (pos != length && isWhitespace(expr[pos])) pos++;
return pos;
}
/**
* If the cursor is currently pointing to whitespace, move the cursor forward to the first non-whitespace
* character.
*/
protected void skipWhitespace() {
while (cursor != length && isWhitespace(expr[cursor])) cursor++;
}
/**
* If the cursor is currently pointing to whitespace, move the cursor forward to the first non-whitespace
* character, but account for carraige returns in the script (updates parser field: line).
*/
protected void skipWhitespaceWithLineAccounting() {
while (cursor != length && isWhitespace(expr[cursor])) {
switch (expr[cursor]) {
case '\n':
line++;
case '\r':
cursor++;
continue;
}
cursor++;
}
}
/**
* From the current cursor position, capture to the end of the next token junction.
*/
protected void captureToNextTokenJunction() {
while (cursor != length) {
switch (expr[cursor]) {
case '{':
case '(':
return;
default:
if (isWhitespace(expr[cursor])) return;
cursor++;
}
}
}
/**
* From the current cursor position, trim backward over any whitespace to the first non-whitespace character.
*/
protected void trimWhitespace() {
while (cursor != 0 && isWhitespace(expr[cursor - 1])) cursor--;
}
/**
* Check if the specified string is a reserved word in the parser.
*
* @param name
* @return
*/
public static boolean isReservedWord(String name) {
return LITERALS.containsKey(name) || OPERATORS.containsKey(name);
}
/**
* Check if the specfied string represents a valid name of label.
*
* @param name
* @return
*/
public static boolean isValidNameorLabel(String name) {
for (char c : name.toCharArray()) {
if (c == '.') return false;
else if (!isIdentifierPart(c)) return false;
}
return true;
}
protected void setExpression(String expression) {
if (expression != null && !"".equals(expression)) {
if (!EX_PRECACHE.containsKey(expression)) {
length = (this.expr = expression.toCharArray()).length;
// trim any whitespace.
while (length != 0 && isWhitespace(this.expr[length - 1])) length--;
char[] e = new char[length];
//arraycopy(this.expr, 0, e, 0, length);
for (int i = 0; i != e.length; i++)
e[i] = expr[i];
EX_PRECACHE.put(expression, e);
}
else {
length = (expr = EX_PRECACHE.get(expression)).length;
}
}
}
protected void setExpression(char[] expression) {
length = (this.expr = expression).length;
while (length != 0 && isWhitespace(this.expr[length - 1])) length--;
}
/**
* Return the previous non-whitespace character.
*
* @return
*/
protected char lookToLast() {
if (cursor == 0) return 0;
int temp = cursor;
while (temp != 0 && isWhitespace(expr[--temp])) ;
return expr[temp];
}
/**
* Return the last character (delta -1 of cursor position).
*
* @return
*/
protected char lookBehind() {
if (cursor == 0) return 0;
return expr[cursor - 1];
}
/**
* Return the next character (delta 1 of cursor position).
*
* @return
*/
protected char lookAhead() {
int tmp = cursor + 1;
if (tmp != length) return expr[tmp];
return 0;
}
/**
* Return the character, forward of the currrent cursor position based on the specified range delta.
*
* @param range
* @return
*/
protected char lookAhead(int range) {
if ((cursor + range) >= length) return 0;
else {
return expr[cursor + range];
}
}
/**
* NOTE: This method assumes that the current position of the cursor is at the end of a logical statement, to
* begin with.
* <p/>
* Determines whether or not the logical statement is manually terminated with a statement separator (';').
*
* @return
*/
protected boolean isStatementManuallyTerminated() {
if (cursor >= length) return true;
int c = cursor;
while (c != length && isWhitespace(expr[c])) c++;
return (c != length && expr[c] == ';');
}
/**
* Returns true of if the detal 1 of the cursor matches the specified character.
*
* @param c
* @return
*/
protected boolean isNext(char c) {
return lookAhead() == c;
}
protected ParserContext getParserContext() {
if (parserContext == null || parserContext.get() == null) {
newContext();
}
return parserContext.get();
}
public static ParserContext getCurrentThreadParserContext() {
return contextControl(GET_OR_CREATE, null, null);
}
public static void setCurrentThreadParserContext(ParserContext pCtx) {
contextControl(SET, pCtx, null);
}
/**
* Create a new ParserContext in the current thread.
*/
protected void newContext() {
contextControl(SET, new ParserContext(), this);
}
/**
* Create a new ParserContext in the current thread, using the one specified.
*
* @param pCtx
*/
protected void newContext(ParserContext pCtx) {
contextControl(SET, pCtx, this);
}
/**
* Remove the current ParserContext from the thread.
*/
protected void removeContext() {
contextControl(REMOVE, null, this);
}
protected static ParserContext contextControl(int operation, ParserContext pCtx, AbstractParser parser) {
synchronized (getRuntime()) {
if (parserContext == null) parserContext = new ThreadLocal<ParserContext>();
switch (operation) {
case SET:
pCtx.setRootParser(parser);
parserContext.set(pCtx);
return null;
case REMOVE:
parserContext.set(null);
return null;
case GET_OR_CREATE:
if (parserContext.get() == null) {
parserContext.set(new ParserContext(parser));
}
case GET:
return parserContext.get();
}
}
return null;
}
protected static final int SET = 0;
protected static final int REMOVE = 1;
protected static final int GET = 2;
protected static final int GET_OR_CREATE = 3;
public boolean isDebugSymbols() {
return debugSymbols;
}
public void setDebugSymbols(boolean debugSymbols) {
this.debugSymbols = debugSymbols;
}
protected static String getCurrentSourceFileName() {
if (parserContext != null && parserContext.get() != null) {
return parserContext.get().getSourceFile();
}
return null;
}
protected void addFatalError(String message) {
getParserContext().addError(new ErrorDetail(getParserContext().getLineCount(), cursor - getParserContext().getLineOffset(), true, message));
}
protected void addFatalError(String message, int row, int cols) {
getParserContext().addError(new ErrorDetail(row, cols, true, message));
}
protected void addWarning(String message) {
getParserContext().addError(new ErrorDetail(message, false));
}
public static final int LEVEL_5_CONTROL_FLOW = 5;
public static final int LEVEL_4_ASSIGNMENT = 4;
public static final int LEVEL_3_ITERATION = 3;
public static final int LEVEL_2_MULTI_STATEMENT = 2;
public static final int LEVEL_1_BASIC_LANG = 1;
public static final int LEVEL_0_PROPERTY_ONLY = 0;
public static void setLanguageLevel(int level) {
OPERATORS.clear();
OPERATORS.putAll(loadLanguageFeaturesByLevel(level));
}
public static Map<String, Integer> loadLanguageFeaturesByLevel(int languageLevel) {
Map<String, Integer> operatorsTable = new HashMap<String, Integer>();
switch (languageLevel) {
case 5: // control flow operations
operatorsTable.put("if", IF);
operatorsTable.put("else", ELSE);
operatorsTable.put("?", TERNARY);
operatorsTable.put("switch", SWITCH);
operatorsTable.put("function", FUNCTION);
operatorsTable.put("def", FUNCTION);
case 4: // assignment
operatorsTable.put("=", ASSIGN);
operatorsTable.put("var", UNTYPED_VAR);
operatorsTable.put("+=", ASSIGN_ADD);
operatorsTable.put("-=", ASSIGN_SUB);
case 3: // iteration
operatorsTable.put("foreach", FOREACH);
operatorsTable.put("while", WHILE);
operatorsTable.put("for", FOR);
operatorsTable.put("do", DO);
case 2: // multi-statement
operatorsTable.put("return", RETURN);
operatorsTable.put(";", END_OF_STMT);
case 1: // boolean, math ops, projection, assertion, objection creation, block setters, imports
operatorsTable.put("+", ADD);
operatorsTable.put("-", SUB);
operatorsTable.put("*", MULT);
operatorsTable.put("**", POWER);
operatorsTable.put("/", DIV);
operatorsTable.put("%", MOD);
operatorsTable.put("==", EQUAL);
operatorsTable.put("!=", NEQUAL);
operatorsTable.put(">", GTHAN);
operatorsTable.put(">=", GETHAN);
operatorsTable.put("<", LTHAN);
operatorsTable.put("<=", LETHAN);
operatorsTable.put("&&", AND);
operatorsTable.put("and", AND);
operatorsTable.put("||", OR);
operatorsTable.put("or", CHOR);
operatorsTable.put("~=", REGEX);
operatorsTable.put("instanceof", INSTANCEOF);
operatorsTable.put("is", INSTANCEOF);
operatorsTable.put("contains", CONTAINS);
operatorsTable.put("soundslike", SOUNDEX);
operatorsTable.put("strsim", SIMILARITY);
operatorsTable.put("convertable_to", CONVERTABLE_TO);
operatorsTable.put("#", STR_APPEND);
operatorsTable.put("&", BW_AND);
operatorsTable.put("|", BW_OR);
operatorsTable.put("^", BW_XOR);
operatorsTable.put("<<", BW_SHIFT_LEFT);
operatorsTable.put("<<<", BW_USHIFT_LEFT);
operatorsTable.put(">>", BW_SHIFT_RIGHT);
operatorsTable.put(">>>", BW_USHIFT_RIGHT);
operatorsTable.put("new", Operator.NEW);
operatorsTable.put("in", PROJECTION);
operatorsTable.put("with", WITH);
operatorsTable.put("assert", ASSERT);
operatorsTable.put("import", IMPORT);
operatorsTable.put("import_static", IMPORT_STATIC);
operatorsTable.put("++", INC);
operatorsTable.put("--", DEC);
case 0: // Property access and inline collections
operatorsTable.put(":", TERNARY_ELSE);
}
return operatorsTable;
}
/**
* Remove the current parser context from the thread.
*/
public static void resetParserContext() {
contextControl(REMOVE, null, null);
}
}
| src/main/java/org/mvel/compiler/AbstractParser.java | /**
* MVEL (The MVFLEX Expression Language)
*
* Copyright (C) 2007 Christopher Brock, MVFLEX/Valhalla Project and the Codehaus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.mvel.compiler;
import org.mvel.*;
import static org.mvel.Operator.*;
import org.mvel.ast.*;
import static org.mvel.util.ArrayTools.findFirst;
import org.mvel.util.ExecutionStack;
import static org.mvel.util.ParseTools.*;
import static org.mvel.util.PropertyTools.isDigit;
import static org.mvel.util.PropertyTools.isIdentifierPart;
import org.mvel.util.Stack;
import java.io.Serializable;
import static java.lang.Boolean.FALSE;
import static java.lang.Boolean.TRUE;
import static java.lang.Character.isWhitespace;
import static java.lang.Float.parseFloat;
import static java.lang.Runtime.getRuntime;
import static java.lang.System.getProperty;
import static java.lang.Thread.currentThread;
import static java.util.Collections.synchronizedMap;
import java.util.HashMap;
import java.util.Map;
import java.util.WeakHashMap;
/**
* @author Christopher Brock
*/
public class AbstractParser implements Serializable {
protected char[] expr;
protected int cursor;
protected int length;
protected int fields;
protected boolean greedy = true;
protected boolean lastWasIdentifier = false;
protected boolean lastWasLineLabel = false;
protected boolean lastWasComment = false;
protected boolean literalOnly = true;
protected boolean debugSymbols = false;
private int line = 1;
protected ASTNode lastNode;
private static Map<String, char[]> EX_PRECACHE;
public static final Map<String, Object> LITERALS =
new HashMap<String, Object>(35 * 2, 0.4f);
public static final Map<String, Integer> OPERATORS =
new HashMap<String, Integer>(25 * 2, 0.4f);
protected Stack stk;
protected ExecutionStack splitAccumulator = new ExecutionStack();
protected static ThreadLocal<ParserContext> parserContext;
static {
configureFactory();
/**
* Setup the basic literals
*/
AbstractParser.LITERALS.put("true", TRUE);
AbstractParser.LITERALS.put("false", FALSE);
AbstractParser.LITERALS.put("null", null);
AbstractParser.LITERALS.put("nil", null);
AbstractParser.LITERALS.put("empty", BlankLiteral.INSTANCE);
// AbstractParser.LITERALS.put("this", ThisLiteral.class);
/**
* Add System and all the class wrappers from the JCL.
*/
LITERALS.put("System", System.class);
LITERALS.put("String", String.class);
LITERALS.put("Integer", Integer.class);
LITERALS.put("int", Integer.class);
LITERALS.put("Long", Long.class);
LITERALS.put("long", Long.class);
LITERALS.put("Boolean", Boolean.class);
LITERALS.put("boolean", Boolean.class);
LITERALS.put("Short", Short.class);
LITERALS.put("short", Short.class);
LITERALS.put("Character", Character.class);
LITERALS.put("char", Character.class);
LITERALS.put("Double", Double.class);
LITERALS.put("double", double.class);
LITERALS.put("Float", Float.class);
LITERALS.put("float", float.class);
LITERALS.put("Math", Math.class);
LITERALS.put("Void", Void.class);
LITERALS.put("Object", Object.class);
LITERALS.put("Class", Class.class);
LITERALS.put("ClassLoader", ClassLoader.class);
LITERALS.put("Runtime", Runtime.class);
LITERALS.put("Thread", Thread.class);
LITERALS.put("Compiler", Compiler.class);
LITERALS.put("StringBuffer", StringBuffer.class);
LITERALS.put("ThreadLocal", ThreadLocal.class);
LITERALS.put("SecurityManager", SecurityManager.class);
LITERALS.put("StrictMath", StrictMath.class);
LITERALS.put("Array", java.lang.reflect.Array.class);
if (parseFloat(getProperty("java.version").substring(0, 2)) >= 1.5) {
try {
LITERALS.put("StringBuilder", currentThread().getContextClassLoader().loadClass("java.lang.StringBuilder"));
}
catch (Exception e) {
throw new RuntimeException("cannot resolve a built-in literal", e);
}
}
//LITERALS.putAll(Units.MEASUREMENTS_ALL);
//loadLanguageFeaturesByLevel(5);
setLanguageLevel(5);
}
public static void configureFactory() {
if (MVEL.THREAD_SAFE) {
EX_PRECACHE = synchronizedMap(new WeakHashMap<String, char[]>(10));
}
else {
EX_PRECACHE = new WeakHashMap<String, char[]>(10);
}
}
protected ASTNode nextTokenSkipSymbols() {
ASTNode n = nextToken();
if (n != null && n.getFields() == -1) n = nextToken();
return n;
}
/**
* Retrieve the next token in the expression.
*
* @return -
*/
protected ASTNode nextToken() {
try {
/**
* If the cursor is at the end of the expression, we have nothing more to do:
* return null.
*/
if (cursor >= length) {
return null;
}
else if (!splitAccumulator.isEmpty()) {
return lastNode = (ASTNode) splitAccumulator.pop();
}
int brace, start = cursor, idx;
/**
* Because of parser recursion for sub-expression parsing, we sometimes need to remain
* certain field states. We do not reset for assignments, boolean mode, list creation or
* a capture only mode.
*/
fields = fields & (ASTNode.INLINE_COLLECTION | ASTNode.COMPILE_IMMEDIATE);
boolean capture = false, union = false;
ParserContext pCtx = getParserContext();
if (debugSymbols) {
if (!lastWasLineLabel) {
if (pCtx.getSourceFile() == null) {
throw new CompileException("unable to produce debugging symbols: source name must be provided.");
}
line = pCtx.getLineCount();
skipWhitespaceWithLineAccounting();
if (!pCtx.isKnownLine(pCtx.getSourceFile(), pCtx.setLineCount(line)) && !pCtx.isBlockSymbols()) {
lastWasLineLabel = true;
pCtx.setLineAndOffset(line, cursor);
return lastNode = pCtx.setLastLineLabel(new LineLabel(pCtx.getSourceFile(), line));
}
}
else {
lastWasComment = lastWasLineLabel = false;
}
}
/**
* Skip any whitespace currently under the starting point.
*/
while (start != length && isWhitespace(expr[start])) start++;
/**
* From here to the end of the method is the core MVEL parsing code. Fiddling around here is asking for
* trouble unless you really know what you're doing.
*/
for (cursor = start; cursor != length;) {
if (isIdentifierPart(expr[cursor])) {
/**
* If the current character under the cursor is a valid
* part of an identifier, we keep capturing.
*/
capture = true;
cursor++;
}
else if (capture) {
String t;
if (OPERATORS.containsKey(t = new String(expr, start, cursor - start))) {
switch (OPERATORS.get(t)) {
case NEW:
start = cursor + 1;
captureToEOT();
return lastNode = new NewObjectNode(subArray(start, cursor), fields);
case ASSERT:
start = cursor + 1;
captureToEOS();
return lastNode = new AssertNode(subArray(start, cursor--), fields);
case RETURN:
start = cursor + 1;
captureToEOS();
return lastNode = new ReturnNode(subArray(start, cursor), fields);
case IF:
return captureCodeBlock(ASTNode.BLOCK_IF);
case FOREACH:
return captureCodeBlock(ASTNode.BLOCK_FOREACH);
case WHILE:
return captureCodeBlock(ASTNode.BLOCK_WHILE);
case WITH:
return captureCodeBlock(ASTNode.BLOCK_WITH);
case IMPORT:
start = cursor + 1;
captureToEOS();
ImportNode importNode = new ImportNode(subArray(start, cursor--), fields);
if (importNode.isPackageImport()) {
pCtx.addPackageImport(importNode.getPackageImport());
cursor++;
}
else {
pCtx.addImport(getSimpleClassName(importNode.getImportClass()), importNode.getImportClass());
}
return importNode;
case IMPORT_STATIC:
start = cursor + 1;
captureToEOS();
return lastNode = new StaticImportNode(subArray(start, cursor--));
case FUNCTION:
Function function = (Function) captureCodeBlock(FUNCTION);
capture = false;
start = cursor + 1;
return function;
case UNTYPED_VAR:
start = cursor + 1;
captureToEOT();
int end = cursor;
skipWhitespace();
if (expr[cursor] == '=') {
cursor = start;
continue;
}
else {
String name = new String(subArray(start, end));
if ((idx = pCtx.variableIndexOf(name)) != -1) {
return lastNode = new IndexedDeclTypedVarNode(idx, Object.class);
}
else {
return lastNode = new DeclTypedVarNode(name, Object.class, fields);
}
}
}
}
skipWhitespace();
/**
* If we *were* capturing a token, and we just hit a non-identifier
* character, we stop and figure out what to do.
*/
if (cursor != length && expr[cursor] == '(') {
cursor = balancedCapture(expr, cursor, '(') + 1;
}
/**
* If we encounter any of the following cases, we are still dealing with
* a contiguous token.
*/
String name;
if (cursor != length) {
switch (expr[cursor]) {
case '?':
if (lookToLast() == '.') {
capture = true;
cursor++;
continue;
}
case '+':
switch (lookAhead()) {
case '+':
if ((idx = pCtx.variableIndexOf(name = new String(subArray(start, cursor)))) != -1) {
lastNode = new IndexedPostFixIncNode(idx);
}
else {
lastNode = new PostFixIncNode(name);
}
cursor += 2;
return lastNode;
case '=':
name = new String(expr, start, trimLeft(cursor) - start);
start = cursor += 2;
captureToEOS();
if (union) {
return lastNode = new DeepAssignmentNode(subArray(start, cursor), fields, Operator.ADD, t);
}
else if ((idx = pCtx.variableIndexOf(name)) != -1) {
return lastNode = new IndexedAssignmentNode(subArray(start, cursor), fields, Operator.ADD, name, idx);
}
else {
return lastNode = new AssignmentNode(subArray(start, cursor), fields, Operator.ADD, name);
}
}
break;
case '-':
switch (lookAhead()) {
case '-':
if ((idx = pCtx.variableIndexOf(name = new String(subArray(start, cursor)))) != -1) {
lastNode = new IndexedPostFixDecNode(idx);
}
else {
lastNode = new PostFixDecNode(name);
}
cursor += 2;
return lastNode;
case '=':
name = new String(expr, start, trimLeft(cursor) - start);
start = cursor += 2;
captureToEOS();
if ((idx = pCtx.variableIndexOf(name)) != -1) {
return lastNode = new IndexedOperativeAssign(subArray(start, cursor), Operator.SUB, idx, fields);
}
else {
return lastNode = new OperativeAssign(name, subArray(start, cursor), Operator.SUB, fields);
}
}
break;
case '*':
if (isNext('=')) {
name = new String(expr, start, trimLeft(cursor) - start);
start = cursor += 2;
captureToEOS();
if ((idx = pCtx.variableIndexOf(name)) != -1) {
return lastNode = new IndexedOperativeAssign(subArray(start, cursor), Operator.MULT, idx, fields);
}
else {
return lastNode = new OperativeAssign(name, subArray(start, cursor), Operator.MULT, fields);
}
}
break;
case '/':
if (isNext('=')) {
name = new String(expr, start, trimLeft(cursor) - start);
start = cursor += 2;
captureToEOS();
if ((idx = pCtx.variableIndexOf(name)) != -1) {
return lastNode = new IndexedOperativeAssign(subArray(start, cursor), Operator.DIV, idx, fields);
}
else {
return lastNode = new OperativeAssign(name, subArray(start, cursor), Operator.DIV, fields);
}
}
break;
case ']':
case '[':
cursor = balancedCapture(expr, cursor, '[') + 1;
continue;
case '.':
union = true;
cursor++;
continue;
case '~':
if (isNext('=')) {
char[] stmt = subArray(start, trimLeft(cursor));
start = cursor += 2;
skipWhitespace();
return lastNode = new RegExMatch(stmt, fields, subArray(start, (cursor = balancedCapture(expr, cursor, expr[cursor]) + 1)));
}
break;
case '=':
if (isNext('+')) {
name = new String(expr, start, trimLeft(cursor) - start);
start = cursor += 2;
captureToEOS();
if ((idx = pCtx.variableIndexOf(name)) != -1) {
return lastNode = new IndexedOperativeAssign(subArray(start, cursor), Operator.ADD, idx, fields);
}
else {
return lastNode = new OperativeAssign(name, subArray(start, cursor), Operator.ADD, fields);
}
}
if (greedy && !isNext('=')) {
cursor++;
captureToEOS();
if (union) {
return lastNode = new DeepAssignmentNode(subArray(start, cursor), fields | ASTNode.ASSIGN);
}
else if (lastWasIdentifier) {
/**
* Check for typing information.
*/
if (lastNode.getLiteralValue() instanceof String) {
if (pCtx.hasImport((String) lastNode.getLiteralValue())) {
lastNode.setLiteralValue(pCtx.getImport((String) lastNode.getLiteralValue()));
lastNode.setAsLiteral();
lastNode.discard();
}
else if (stk != null && stk.peek() instanceof Class) {
lastNode.setLiteralValue(stk.pop());
lastNode.setAsLiteral();
lastNode.discard();
}
else {
try {
/**
* take a stab in the dark and try and load the class
*/
lastNode.setLiteralValue(createClass((String) lastNode.getLiteralValue()));
lastNode.setAsLiteral();
lastNode.discard();
}
catch (ClassNotFoundException e) {
/**
* Just fail through.
*/
}
}
}
if (lastNode.isLiteral() && lastNode.getLiteralValue() instanceof Class) {
lastNode.discard();
captureToEOS();
return new TypedVarNode(subArray(start, cursor), fields | ASTNode.ASSIGN, (Class)
lastNode.getLiteralValue());
}
throw new ParseException("unknown class: " + lastNode.getLiteralValue());
}
else
if (pCtx != null && ((idx = pCtx.variableIndexOf(t)) != -1 || (pCtx.isIndexAllocation()))) {
IndexedAssignmentNode ian = new IndexedAssignmentNode(subArray(start, cursor), ASTNode.ASSIGN, idx);
if (idx == -1) {
pCtx.addIndexedVariable(t = ian.getAssignmentVar());
ian.setRegister(idx = pCtx.variableIndexOf(t));
}
return lastNode = ian;
}
else {
return lastNode = new AssignmentNode(subArray(start, cursor), fields | ASTNode.ASSIGN);
}
}
}
}
/**
* Produce the token.
*/
trimWhitespace();
return createPropertyToken(start, cursor);
}
else {
String name;
switch (expr[cursor]) {
case '@': {
start++;
captureToEOT();
name = new String(expr, start, cursor - start);
if (pCtx.getInterceptors() == null || !pCtx.getInterceptors().
containsKey(name)) {
throw new CompileException("reference to undefined interceptor: " + name, expr, cursor);
}
return lastNode = new InterceptorWrapper(pCtx.getInterceptors().get(name), nextToken());
}
case '=':
return createToken(expr, start, (cursor += 2), fields);
case '-':
if (isNext('-')) {
start = cursor += 2;
captureToEOT();
if ((idx = pCtx.variableIndexOf(name = new String(subArray(start, cursor)))) != -1) {
return lastNode = new IndexedPreFixDecNode(idx);
}
else {
return lastNode = new PreFixDecNode(name);
}
}
else if ((cursor != 0 && !isWhitespace(lookBehind())) || !isDigit(lookAhead())) {
return createToken(expr, start, cursor++ + 1, fields);
}
else if ((cursor - 1) != 0 || (!isDigit(lookBehind())) && isDigit(lookAhead())) {
cursor++;
break;
}
case '+':
if (isNext('+')) {
start = cursor += 2;
captureToEOT();
if ((idx = pCtx.variableIndexOf(name = new String(subArray(start, cursor)))) != -1) {
return lastNode = new IndexedPreFixIncNode(idx);
}
else {
return lastNode = new PreFixIncNode(name);
}
}
return createToken(expr, start, cursor++ + 1, fields);
case '*':
if (isNext('*')) {
cursor++;
}
return createToken(expr, start, cursor++ + 1, fields);
case ';':
cursor++;
lastWasIdentifier = false;
return lastNode = new EndOfStatement();
case '#':
case '/':
if (isNext(expr[cursor])) {
/**
* Handle single line comments.
*/
while (cursor != length && expr[cursor] != '\n') cursor++;
if (debugSymbols) {
line = pCtx.getLineCount();
skipWhitespaceWithLineAccounting();
if (lastNode instanceof LineLabel) {
pCtx.getLastLineLabel().setLineNumber(line);
pCtx.addKnownLine(line);
}
lastWasComment = true;
pCtx.setLineCount(line);
}
else if (cursor != length) {
skipWhitespace();
}
if ((start = cursor) >= length) return null;
continue;
}
else if (expr[cursor] == '/' && isNext('*')) {
/**
* Handle multi-line comments.
*/
int len = length - 1;
/**
* This probably seems highly redundant, but sub-compilations within the same
* source will spawn a new compiler, and we need to sync this with the
* parser context;
*/
if (debugSymbols) {
line = pCtx.getLineCount();
}
while (true) {
cursor++;
/**
* Since multi-line comments may cross lines, we must keep track of any line-break
* we encounter.
*/
if (debugSymbols) {
skipWhitespaceWithLineAccounting();
}
if (cursor == len) {
throw new CompileException("unterminated block comment", expr, cursor);
}
if (expr[cursor] == '*' && isNext('/')) {
if ((cursor += 2) >= length) return null;
skipWhitespaceWithLineAccounting();
start = cursor;
break;
}
}
if (debugSymbols) {
pCtx.setLineCount(line);
if (lastNode instanceof LineLabel) {
pCtx.getLastLineLabel().setLineNumber(line);
pCtx.addKnownLine(line);
}
lastWasComment = true;
}
continue;
}
case '?':
case ':':
case '^':
case '%': {
return createToken(expr, start, cursor++ + 1, fields);
}
case '(': {
cursor++;
boolean singleToken = true;
boolean lastWS = false;
skipWhitespace();
for (brace = 1; cursor != length && brace != 0; cursor++) {
switch (expr[cursor]) {
case '(':
brace++;
break;
case ')':
brace--;
break;
case '\'':
cursor = captureStringLiteral('\'', expr, cursor, length);
break;
case '"':
cursor = captureStringLiteral('"', expr, cursor, length);
break;
case 'i':
if (isNext('n') && isWhitespace(lookAhead(2)) && !isIdentifierPart(lookBehind())) {
fields |= ASTNode.FOLD;
for (int level = brace; cursor != length; cursor++) {
switch (expr[cursor]) {
case '(':
brace++;
break;
case ')':
if (--brace != level) {
if (lookAhead() == '.') {
lastNode = createToken(expr, trimRight(start + 1), (start = cursor++), ASTNode.FOLD);
captureToEOT();
return lastNode = new Union(expr, trimRight(start + 2), cursor, fields, lastNode);
}
else {
return createToken(expr, trimRight(start + 1), cursor++, ASTNode.FOLD);
}
}
break;
case '\'':
cursor = captureStringLiteral('\'', expr, cursor, length);
break;
case '"':
cursor = captureStringLiteral('\'', expr, cursor, length);
break;
}
}
}
break;
default:
/**
* Check to see if we should disqualify this current token as a potential
* type-cast candidate.
*/
if ((lastWS && expr[cursor] != '.') || !(isIdentifierPart(expr[cursor]) || expr[cursor] == '.')) {
singleToken = false;
}
else if (isWhitespace(expr[cursor])) {
lastWS = true;
skipWhitespace();
cursor--;
}
}
}
if (brace != 0) {
throw new CompileException("unbalanced braces in expression: (" + brace + "):", expr, cursor);
}
char[] _subset = null;
if (singleToken) {
int st;
String tokenStr = new String(_subset = subset(expr, st = trimRight(start + 1), trimLeft(cursor - 1) - st));
if (pCtx.hasImport(tokenStr)) {
start = cursor;
captureToEOS();
return lastNode = new TypeCast(subset(expr, start, cursor - start), pCtx.getImport(tokenStr), fields);
}
else {
try {
/**
*
* take a stab in the dark and try and load the class
*/
int _start = cursor;
captureToEOS();
return lastNode = new TypeCast(subset(expr, _start, cursor - _start), createClass(tokenStr), fields);
}
catch (ClassNotFoundException e) {
/**
* Just fail through.
*/
}
}
}
if (_subset != null) {
return handleUnion(handleSubstatement(new Substatement(_subset, fields)));
}
else {
return handleUnion(handleSubstatement(new Substatement(subset(expr, start = trimRight(start + 1), trimLeft(cursor - 1) - start), fields)));
}
}
case '}':
case ']':
case ')': {
throw new ParseException("unbalanced braces", expr, cursor);
}
case '>': {
if (expr[cursor + 1] == '>') {
if (expr[cursor += 2] == '>') cursor++;
return createToken(expr, start, cursor, fields);
}
else if (expr[cursor + 1] == '=') {
return createToken(expr, start, cursor += 2, fields);
}
else {
return createToken(expr, start, ++cursor, fields);
}
}
case '<': {
if (expr[++cursor] == '<') {
if (expr[++cursor] == '<') cursor++;
return createToken(expr, start, cursor, fields);
}
else if (expr[cursor] == '=') {
return createToken(expr, start, ++cursor, fields);
}
else {
return createToken(expr, start, cursor, fields);
}
}
case '\'':
case '"':
lastNode = new LiteralNode(
handleStringEscapes(
subset(expr, start + 1, (cursor = captureStringLiteral(expr[cursor], expr, cursor, length)) - start - 1))
, String.class);
cursor++;
if (tokenContinues()) {
return lastNode = handleUnion(lastNode);
}
return lastNode;
case '&': {
if (expr[cursor++ + 1] == '&') {
return createToken(expr, start, ++cursor, fields);
}
else {
return createToken(expr, start, cursor, fields);
}
}
case '|': {
if (expr[cursor++ + 1] == '|') {
return createToken(expr, start, ++cursor, fields);
}
else {
return createToken(expr, start, cursor, fields);
}
}
case '~':
if ((cursor++ - 1 != 0 || !isIdentifierPart(lookBehind()))
&& isDigit(expr[cursor])) {
start = cursor;
captureToEOT();
return lastNode = new Invert(subset(expr, start, cursor - start), fields);
}
else if (expr[cursor] == '(') {
start = cursor--;
captureToEOT();
return lastNode = new Invert(subset(expr, start, cursor - start), fields);
}
else {
if (expr[cursor] == '=') cursor++;
return createToken(expr, start, cursor, fields);
}
case '!': {
if (isIdentifierPart(expr[++cursor])) {
start = cursor;
captureToEOT();
return lastNode = new Negation(subset(expr, start, cursor - start), fields);
}
else if (expr[cursor] == '(') {
start = cursor--;
captureToEOT();
return lastNode = new Negation(subset(expr, start, cursor - start), fields);
}
else if (expr[cursor] != '=')
throw new CompileException("unexpected operator '!'", expr, cursor, null);
else {
return createToken(expr, start, ++cursor, fields);
}
}
case '[':
case '{':
cursor = balancedCapture(expr, cursor, expr[cursor]) + 1;
if (tokenContinues()) {
// if (lookAhead(1) == '.') {
lastNode = new InlineCollectionNode(expr, start, start = cursor, fields);
captureToEOT();
return lastNode = new Union(expr, start + 1, cursor, fields, lastNode);
}
else {
return lastNode = new InlineCollectionNode(expr, start, cursor, fields);
}
default:
cursor++;
}
}
}
if (start == cursor) return null;
return createPropertyToken(start, cursor);
}
catch (CompileException e) {
throw new CompileException(e.getMessage(), expr, cursor, e.getCursor() == 0, e);
}
}
public ASTNode handleSubstatement(Substatement stmt) {
if (stmt.getStatement() != null && stmt.getStatement().isLiteralOnly()) {
return new LiteralNode(stmt.getStatement().getValue(null, null, null), fields);
}
else {
return stmt;
}
}
protected ASTNode handleUnion(ASTNode node) {
if (cursor != length) {
skipWhitespace();
if (expr[cursor] == '.') {
int union = cursor + 1;
captureToEOT();
return lastNode = new Union(expr, union, cursor, fields, node);
}
else if (expr[cursor] == '[') {
captureToEOT();
return lastNode = new Union(expr, cursor, cursor, fields, node);
}
}
return lastNode = node;
}
/**
* Most of this method should be self-explanatory.
*
* @param expr -
* @param start -
* @param end -
* @param fields -
* @return -
*/
private ASTNode createToken(final char[] expr, final int start, final int end, int fields) {
lastWasIdentifier = (lastNode = new ASTNode(expr, start, end, fields)).isIdentifier();
return lastNode;
}
private char[] subArray(final int start, final int end) {
if (start >= end) return new char[0];
char[] newA = new char[end - start];
for (int i = 0; i != newA.length; i++)
newA[i] = expr[i + start];
return newA;
}
private ASTNode createPropertyToken(int start, int end) {
lastWasIdentifier = true;
String tmp;
if (parserContext != null && parserContext.get() != null && parserContext.get().hasImports()) {
char[] _subset = subset(expr, start, cursor - start);
int offset;
if ((offset = findFirst('.', _subset)) != -1) {
String iStr = new String(_subset, 0, offset);
if (getParserContext().hasImport(iStr)) {
return lastNode = new LiteralDeepPropertyNode(subset(_subset, offset + 1, _subset.length - offset - 1), fields, getParserContext().getImport(iStr));
}
}
else {
if (getParserContext().hasImport(tmp = new String(_subset))) {
Object i = getParserContext().getStaticOrClassImport(tmp);
if (i instanceof Class) {
return lastNode = new LiteralNode(i, Class.class);
}
}
lastWasIdentifier = true;
return lastNode = new ASTNode(_subset, 0, _subset.length, fields);
}
}
else if ((fields & ASTNode.METHOD) != 0) {
return lastNode = new ASTNode(expr, start, end, fields);
}
else if (LITERALS.containsKey(tmp = new String(expr, start, end - start))) {
return lastNode = new LiteralNode(LITERALS.get(tmp));
}
else if (OPERATORS.containsKey(tmp)) {
return lastNode = new OperatorNode(OPERATORS.get(tmp));
}
return lastNode = new ASTNode(expr, start, end, fields);
}
private ASTNode createBlockToken(final int condStart,
final int condEnd, final int blockStart, final int blockEnd, int type) {
lastWasIdentifier = false;
cursor++;
if (!isStatementManuallyTerminated()) {
splitAccumulator.add(new EndOfStatement());
}
switch (type) {
case ASTNode.BLOCK_IF:
return new IfNode(subArray(condStart, condEnd), subArray(blockStart, blockEnd), fields);
case ASTNode.BLOCK_FOREACH:
return new ForEachNode(subArray(condStart, condEnd), subArray(blockStart, blockEnd), fields);
case ASTNode.BLOCK_WHILE:
return new WhileNode(subArray(condStart, condEnd), subArray(blockStart, blockEnd), fields);
default:
return new WithNode(subArray(condStart, condEnd), subArray(blockStart, blockEnd), fields);
}
}
private ASTNode captureCodeBlock(int type) {
boolean cond = true;
ASTNode first = null;
ASTNode tk = null;
switch (type) {
case ASTNode.BLOCK_IF: {
do {
if (tk != null) {
captureToNextTokenJunction();
skipWhitespace();
cond = expr[cursor] != '{' && expr[cursor] == 'i' && expr[++cursor] == 'f'
&& (isWhitespace(expr[++cursor]) || expr[cursor] == '(');
}
if (((IfNode) (tk = _captureBlock(tk, expr, cond, type))).getElseBlock() != null) {
cursor++;
return first;
}
if (first == null) first = tk;
if (cursor != length && expr[cursor] != ';') {
cursor++;
}
}
while (blockContinues());
return first;
}
default: // either BLOCK_WITH or BLOCK_FOREACH
captureToNextTokenJunction();
if (debugSymbols) {
skipWhitespaceWithLineAccounting();
}
else {
skipWhitespace();
}
return _captureBlock(null, expr, true, type);
}
}
private ASTNode _captureBlock(ASTNode node, final char[] expr, boolean cond, int type) {
skipWhitespace();
int startCond = 0;
int endCond = 0;
int blockStart;
int blockEnd;
/**
* Functions are a special case we handle differently from the rest of block parsing
*/
if (type == FUNCTION) {
int start = cursor;
captureToNextTokenJunction();
if (cursor == length) {
throw new CompileException("expected '('", expr, start);
}
String functionName = new String(expr, start, (startCond = cursor) - start).trim();
if (isReservedWord(functionName))
throw new CompileException("illegal function name: use of reserved word", expr, cursor);
if (expr[cursor] == '(') {
endCond = cursor = balancedCapture(expr, startCond = cursor, '(');
startCond++;
cursor++;
skipWhitespace();
if (cursor >= length) {
throw new CompileException("unbalanced braces", expr, cursor);
}
else if (expr[cursor] == '{') {
blockStart = cursor;
blockEnd = cursor = balancedCapture(expr, cursor, '{');
}
else {
blockStart = cursor;
captureToEOS();
blockEnd = cursor;
}
}
else {
if (expr[cursor] == '{') {
blockStart = cursor;
blockEnd = cursor = balancedCapture(expr, cursor, '{');
}
else {
blockStart = cursor;
captureToEOS();
blockEnd = cursor;
}
}
blockStart = trimRight(blockStart + 1);
blockEnd = trimLeft(blockEnd);
cursor++;
if (!isStatementManuallyTerminated()) {
splitAccumulator.add(new EndOfStatement());
}
return new Function(functionName, subArray(startCond, endCond), subArray(blockStart, blockEnd));
}
else if (cond) {
if (debugSymbols) {
int[] cap = balancedCaptureWithLineAccounting(expr, startCond = cursor, '(');
endCond = cursor = cap[0];
startCond++;
cursor++;
getParserContext().setLineCount(line = getParserContext().getLineCount() + cap[1]);
}
else {
endCond = cursor = balancedCapture(expr, startCond = cursor, '(');
startCond++;
cursor++;
}
}
skipWhitespace();
if (cursor >= length) {
throw new CompileException("unbalanced braces", expr, cursor);
}
else if (expr[cursor] == '{') {
blockStart = cursor;
if (debugSymbols) {
int[] cap = balancedCaptureWithLineAccounting(expr, cursor, '{');
blockEnd = cursor = cap[0];
getParserContext().setLineCount((line = getParserContext().getLineCount() + cap[1]));
}
else {
blockEnd = cursor = balancedCapture(expr, cursor, '{');
}
}
else {
blockStart = cursor - 1;
captureToEOLorOF();
blockEnd = cursor + 1;
}
if (type == ASTNode.BLOCK_IF) {
IfNode ifNode = (IfNode) node;
if (node != null) {
if (!cond) {
return ifNode.setElseBlock(subArray(trimRight(blockStart + 1), trimLeft(blockEnd - 1)));
}
else {
return ifNode.setElseIf((IfNode) createBlockToken(startCond, endCond, trimRight(blockStart + 1),
trimLeft(blockEnd), type));
}
}
else {
return createBlockToken(startCond, endCond, blockStart + 1, blockEnd, type);
}
}
// DON"T REMOVE THIS COMMENT!
// else if (isFlag(ASTNode.BLOCK_FOREACH) || isFlag(ASTNode.BLOCK_WITH)) {
else {
return createBlockToken(startCond, endCond, trimRight(blockStart + 1), trimLeft(blockEnd), type);
}
}
protected boolean blockContinues() {
if ((cursor + 4) < length) {
if (expr[cursor] != ';') cursor--;
skipWhitespace();
return expr[cursor] == 'e' && expr[cursor + 1] == 'l' && expr[cursor + 2] == 's' && expr[cursor + 3] == 'e'
&& (isWhitespace(expr[cursor + 4]) || expr[cursor + 4] == '{');
}
return false;
}
protected boolean tokenContinues() {
if (cursor >= length) return false;
else if (expr[cursor] == '.' || expr[cursor] == '[') return true;
else if (isWhitespace(expr[cursor])) {
int markCurrent = cursor;
skipWhitespace();
if (cursor != length && (expr[cursor] == '.' || expr[cursor] == '[')) return true;
cursor = markCurrent;
}
return false;
}
protected void captureToEOS() {
while (cursor != length) {
switch (expr[cursor]) {
case '(':
case '[':
case '{':
cursor = balancedCapture(expr, cursor, expr[cursor]);
break;
case ';':
case '}':
return;
}
cursor++;
}
}
protected void captureToEOLorOF() {
while (cursor != length && (expr[cursor] != '\n' && expr[cursor] != '\r' && expr[cursor] != ';')) {
cursor++;
}
}
protected void captureToEOT() {
skipWhitespace();
while (++cursor != length) {
switch (expr[cursor]) {
case '(':
case '[':
case '{':
cursor = balancedCapture(expr, cursor, expr[cursor]);
break;
case '=':
case '&':
case '|':
case ';':
return;
case '.':
skipWhitespace();
break;
default:
if (isWhitespace(expr[cursor])) {
skipWhitespace();
if (expr[cursor] == '.') {
if (cursor != length) cursor++;
skipWhitespace();
break;
}
else {
trimWhitespace();
return;
}
}
}
}
}
protected int trimLeft(int pos) {
while (pos != 0 && isWhitespace(expr[pos - 1])) pos--;
return pos;
}
protected int trimRight(int pos) {
while (pos != length && isWhitespace(expr[pos])) pos++;
return pos;
}
protected void skipWhitespace() {
while (cursor != length && isWhitespace(expr[cursor])) cursor++;
}
protected void skipWhitespaceWithLineAccounting() {
while (cursor != length && isWhitespace(expr[cursor])) {
switch (expr[cursor]) {
case '\n':
line++;
case '\r':
cursor++;
continue;
}
cursor++;
}
}
protected void captureToNextTokenJunction() {
while (cursor != length) {
switch (expr[cursor]) {
case '{':
case '(':
return;
default:
if (isWhitespace(expr[cursor])) return;
cursor++;
}
}
}
protected void trimWhitespace() {
while (cursor != 0 && isWhitespace(expr[cursor - 1])) cursor--;
}
protected void setExpression(String expression) {
if (expression != null && !"".equals(expression)) {
if (!EX_PRECACHE.containsKey(expression)) {
length = (this.expr = expression.toCharArray()).length;
// trim any whitespace.
while (length != 0 && isWhitespace(this.expr[length - 1])) length--;
char[] e = new char[length];
//arraycopy(this.expr, 0, e, 0, length);
for (int i = 0; i != e.length; i++)
e[i] = expr[i];
EX_PRECACHE.put(expression, e);
}
else {
length = (expr = EX_PRECACHE.get(expression)).length;
}
}
}
protected void setExpression(char[] expression) {
length = (this.expr = expression).length;
while (length != 0 && isWhitespace(this.expr[length - 1])) length--;
}
public static boolean isReservedWord(String name) {
return LITERALS.containsKey(name) || OPERATORS.containsKey(name);
}
protected char lookToLast() {
if (cursor == 0) return 0;
int temp = cursor;
while (temp != 0 && isWhitespace(expr[--temp])) ;
return expr[temp];
}
protected char lookBehind() {
if (cursor == 0) return 0;
return expr[cursor - 1];
}
protected char lookAhead() {
int tmp = cursor + 1;
if (tmp != length) return expr[tmp];
return 0;
}
protected char lookAhead(int range) {
if ((cursor + range) >= length) return 0;
else {
return expr[cursor + range];
}
}
protected boolean isStatementManuallyTerminated() {
if (cursor >= length) return true;
int c = cursor;
while (c != length && isWhitespace(expr[c])) c++;
return (c != length && expr[c] == ';');
}
protected boolean isNext(char c) {
return lookAhead() == c;
}
protected ParserContext getParserContext() {
if (parserContext == null || parserContext.get() == null) {
newContext();
}
return parserContext.get();
}
public static ParserContext getCurrentThreadParserContext() {
return contextControl(GET_OR_CREATE, null, null);
}
public static void setCurrentThreadParserContext(ParserContext pCtx) {
contextControl(SET, pCtx, null);
}
protected void newContext() {
contextControl(SET, new ParserContext(), this);
}
protected void newContext(ParserContext pCtx) {
contextControl(SET, pCtx, this);
}
protected void removeContext() {
contextControl(REMOVE, null, this);
}
protected static ParserContext contextControl(int operation, ParserContext pCtx, AbstractParser parser) {
synchronized (getRuntime()) {
if (parserContext == null) parserContext = new ThreadLocal<ParserContext>();
switch (operation) {
case SET:
pCtx.setRootParser(parser);
parserContext.set(pCtx);
return null;
case REMOVE:
parserContext.set(null);
return null;
case GET_OR_CREATE:
if (parserContext.get() == null) {
parserContext.set(new ParserContext(parser));
}
case GET:
return parserContext.get();
}
}
return null;
}
protected static final int SET = 0;
protected static final int REMOVE = 1;
protected static final int GET = 2;
protected static final int GET_OR_CREATE = 3;
public boolean isDebugSymbols() {
return debugSymbols;
}
public void setDebugSymbols(boolean debugSymbols) {
this.debugSymbols = debugSymbols;
}
protected static String getCurrentSourceFileName() {
if (parserContext != null && parserContext.get() != null) {
return parserContext.get().getSourceFile();
}
return null;
}
protected void addFatalError(String message) {
getParserContext().addError(new ErrorDetail(getParserContext().getLineCount(), cursor - getParserContext().getLineOffset(), true, message));
}
protected void addFatalError(String message, int row, int cols) {
getParserContext().addError(new ErrorDetail(row, cols, true, message));
}
protected void addWarning(String message) {
getParserContext().addError(new ErrorDetail(message, false));
}
public static final int LEVEL_5_CONTROL_FLOW = 5;
public static final int LEVEL_4_ASSIGNMENT = 4;
public static final int LEVEL_3_ITERATION = 3;
public static final int LEVEL_2_MULTI_STATEMENT = 2;
public static final int LEVEL_1_BASIC_LANG = 1;
public static final int LEVEL_0_PROPERTY_ONLY = 0;
public static void setLanguageLevel(int level) {
OPERATORS.clear();
OPERATORS.putAll(loadLanguageFeaturesByLevel(level));
}
public static Map<String, Integer> loadLanguageFeaturesByLevel(int languageLevel) {
Map<String, Integer> operatorsTable = new HashMap<String, Integer>();
switch (languageLevel) {
case 5: // control flow operations
operatorsTable.put("if", IF);
operatorsTable.put("else", ELSE);
operatorsTable.put("?", TERNARY);
operatorsTable.put("switch", SWITCH);
operatorsTable.put("function", FUNCTION);
operatorsTable.put("def", FUNCTION);
case 4: // assignment
operatorsTable.put("=", ASSIGN);
operatorsTable.put("var", UNTYPED_VAR);
operatorsTable.put("+=", ASSIGN_ADD);
operatorsTable.put("-=", ASSIGN_SUB);
case 3: // iteration
operatorsTable.put("foreach", FOREACH);
operatorsTable.put("while", WHILE);
operatorsTable.put("for", FOR);
operatorsTable.put("do", DO);
case 2: // multi-statement
operatorsTable.put("return", RETURN);
operatorsTable.put(";", END_OF_STMT);
case 1: // boolean, math ops, projection, assertion, objection creation, block setters, imports
operatorsTable.put("+", ADD);
operatorsTable.put("-", SUB);
operatorsTable.put("*", MULT);
operatorsTable.put("**", POWER);
operatorsTable.put("/", DIV);
operatorsTable.put("%", MOD);
operatorsTable.put("==", EQUAL);
operatorsTable.put("!=", NEQUAL);
operatorsTable.put(">", GTHAN);
operatorsTable.put(">=", GETHAN);
operatorsTable.put("<", LTHAN);
operatorsTable.put("<=", LETHAN);
operatorsTable.put("&&", AND);
operatorsTable.put("and", AND);
operatorsTable.put("||", OR);
operatorsTable.put("or", CHOR);
operatorsTable.put("~=", REGEX);
operatorsTable.put("instanceof", INSTANCEOF);
operatorsTable.put("is", INSTANCEOF);
operatorsTable.put("contains", CONTAINS);
operatorsTable.put("soundslike", SOUNDEX);
operatorsTable.put("strsim", SIMILARITY);
operatorsTable.put("convertable_to", CONVERTABLE_TO);
operatorsTable.put("#", STR_APPEND);
operatorsTable.put("&", BW_AND);
operatorsTable.put("|", BW_OR);
operatorsTable.put("^", BW_XOR);
operatorsTable.put("<<", BW_SHIFT_LEFT);
operatorsTable.put("<<<", BW_USHIFT_LEFT);
operatorsTable.put(">>", BW_SHIFT_RIGHT);
operatorsTable.put(">>>", BW_USHIFT_RIGHT);
operatorsTable.put("new", Operator.NEW);
operatorsTable.put("in", PROJECTION);
operatorsTable.put("with", WITH);
operatorsTable.put("assert", ASSERT);
operatorsTable.put("import", IMPORT);
operatorsTable.put("import_static", IMPORT_STATIC);
operatorsTable.put("++", INC);
operatorsTable.put("--", DEC);
case 0: // Property access and inline collections
operatorsTable.put(":", TERNARY_ELSE);
}
return operatorsTable;
}
public static void resetParserContext() {
contextControl(REMOVE, null, null);
}
}
| code comments added. | src/main/java/org/mvel/compiler/AbstractParser.java | code comments added. |
|
Java | apache-2.0 | 991821968ce2c9950254fa86c56b9a024596d3a4 | 0 | eFaps/eFaps-Kernel-Install,eFaps/eFaps-Kernel-Install | /*
* Copyright 2003 - 2013 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Revision: $Rev$
* Last Changed: $Date$
* Last Changed By: $Author$
*/
package org.efaps.esjp.common.jasperreport;
import java.awt.Color;
import java.io.File;
import java.io.InputStream;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Map;
import net.sf.dynamicreports.jasper.builder.JasperReportBuilder;
import net.sf.dynamicreports.jasper.builder.export.Exporters;
import net.sf.dynamicreports.jasper.builder.export.JasperPdfExporterBuilder;
import net.sf.dynamicreports.jasper.builder.export.JasperXhtmlExporterBuilder;
import net.sf.dynamicreports.jasper.builder.export.JasperXlsExporterBuilder;
import net.sf.dynamicreports.jasper.constant.SizeUnit;
import net.sf.dynamicreports.report.builder.DynamicReports;
import net.sf.dynamicreports.report.builder.component.SubreportBuilder;
import net.sf.dynamicreports.report.builder.style.StyleBuilder;
import net.sf.dynamicreports.report.constant.HorizontalAlignment;
import net.sf.dynamicreports.report.constant.VerticalAlignment;
import net.sf.dynamicreports.report.exception.DRException;
import net.sf.jasperreports.engine.JRDataSource;
import org.efaps.admin.event.Parameter;
import org.efaps.admin.event.Parameter.ParameterValues;
import org.efaps.admin.program.esjp.EFapsRevision;
import org.efaps.admin.program.esjp.EFapsUUID;
import org.efaps.ci.CIAdminProgram;
import org.efaps.db.Checkout;
import org.efaps.db.Context;
import org.efaps.db.InstanceQuery;
import org.efaps.db.QueryBuilder;
import org.efaps.esjp.common.file.FileUtil;
import org.efaps.util.EFapsException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TODO comment!
*
* @author The eFaps Team
* @version $Id$
*/
@EFapsUUID("599905c7-373a-4c9c-928f-2cf2714d30b3")
@EFapsRevision("$Rev$")
public abstract class AbstractDynamicReport_Base
{
/**
* Logging instance used to give logging information of this class.
*/
protected static final Logger LOG = LoggerFactory.getLogger(AbstractDynamicReport_Base.class);
/**
* Reportbuilder this class is based on.
*/
private final JasperReportBuilder report = DynamicReports.report();
private String fileName;
/**
* Get the style for the columns in case of a html document.
*
* @param _parameter Parameter as passed by the eFaps API
* @throws EFapsException on error
*/
protected void configure4Html(final Parameter _parameter)
throws EFapsException
{
getReport().setColumnTitleStyle(getColumnTitleStyle4Html(_parameter))
.setColumnStyle(getColumnStyle4Html(_parameter))
.setGroupStyle(getGroupStyle4Html(_parameter))
.setGroupTitleStyle(getGroupStyle4Html(_parameter))
.setIgnorePageWidth(true).setIgnorePagination(true).setHighlightDetailEvenRows(true);
}
/**
* Get the style for the columns in case of a excel document.
*
* @param _parameter Parameter as passed by the eFaps API
* @throws EFapsException on error
*/
protected void configure4Excel(final Parameter _parameter)
throws EFapsException
{
getReport().setColumnTitleStyle(getColumnTitleStyle4Excel(_parameter))
.setColumnStyle(getColumnStyle4Excel(_parameter)).ignorePageWidth().ignorePagination();
}
/**
* Get the style for the columns in case of a pdf document.
*
* @param _parameter Parameter as passed by the eFaps API
* @throws EFapsException on error
*/
protected void configure4Pdf(final Parameter _parameter,
final JasperReportBuilder _builder)
throws EFapsException
{
_builder.setColumnTitleStyle(getColumnTitleStyle4Pdf(_parameter))
.setColumnStyle(getColumnStyle4Pdf(_parameter))
.setIgnorePageWidth(false).setIgnorePagination(false).setHighlightDetailEvenRows(true);
}
/**
* Get the style for the columns in case of a html document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
protected StyleBuilder getColumnStyle4Html(final Parameter _parameter)
throws EFapsException
{
return DynamicReports.stl.style().setPadding(DynamicReports.stl.padding(2));
}
/**
* Get the style for the columns in case of a excel document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
protected StyleBuilder getColumnStyle4Excel(final Parameter _parameter)
throws EFapsException
{
return DynamicReports.stl.style();
}
/**
* Get the style for the columns in case of a pdf document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
protected StyleBuilder getColumnStyle4Pdf(final Parameter _parameter)
throws EFapsException
{
return DynamicReports.stl.style().setPadding(DynamicReports.stl.padding(2))
.setLeftBorder(DynamicReports.stl.pen1Point())
.setRightBorder(DynamicReports.stl.pen1Point());
}
/**
* Get the style for the title columns in case of a html document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
protected StyleBuilder getColumnTitleStyle4Html(final Parameter _parameter)
throws EFapsException
{
return DynamicReports.stl.style()
.setBorder(DynamicReports.stl.pen1Point())
.setHorizontalAlignment(HorizontalAlignment.CENTER)
.setBackgroundColor(Color.LIGHT_GRAY)
.bold().setVerticalAlignment(VerticalAlignment.MIDDLE)
.setPadding(DynamicReports.stl.padding(2));
}
/**
* Get the style for the title columns in case of a excel document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
protected StyleBuilder getColumnTitleStyle4Excel(final Parameter _parameter)
throws EFapsException
{
return DynamicReports.stl.style().setHorizontalAlignment(HorizontalAlignment.CENTER)
.setVerticalAlignment(VerticalAlignment.MIDDLE).setPadding(2)
.setBackgroundColor(Color.LIGHT_GRAY)
.bold();
}
/**
* Get the style for the title columns in case of a pdf document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
protected StyleBuilder getColumnTitleStyle4Pdf(final Parameter _parameter)
throws EFapsException
{
return DynamicReports.stl.style()
.setBorder(DynamicReports.stl.pen1Point())
.setHorizontalAlignment(HorizontalAlignment.CENTER)
.setBackgroundColor(Color.LIGHT_GRAY)
.bold().setVerticalAlignment(VerticalAlignment.MIDDLE)
.setPadding(DynamicReports.stl.padding(2));
}
/**
* Get the style for the group columns in case of a html document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return style as StyleBuilder
* @throws EFapsException on error
*/
private StyleBuilder getGroupStyle4Html(final Parameter _parameter)
{
return DynamicReports.stl.style().bold()
.setHorizontalAlignment(HorizontalAlignment.LEFT)
.setPadding(DynamicReports.stl.padding(2));
}
/**
* @param _parameter Parameter as passed by the eFaps API
* @return a JRDataSource
* @throws EFapsException on error
*/
protected abstract JRDataSource createDataSource(final Parameter _parameter)
throws EFapsException;
/**
* @param _parameter Parameter as passed by the eFaps API
* @param _builder Report builder
* @throws EFapsException on error
*/
protected abstract void addColumnDefintion(final Parameter _parameter,
final JasperReportBuilder _builder)
throws EFapsException;
/**
* Getter method for the instance variable {@link #report}.
*
* @return value of instance variable {@link #report}
*/
public JasperReportBuilder getReport()
{
return this.report;
}
/**
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
public String getHtml(final Parameter _parameter) throws EFapsException
{
return getHtml(_parameter, false);
}
/**
* @param _parameter Parameter as passed by the eFaps API
* @param _strip strip the body tags, so that onlty the table remains
* @return html document as String
* @throws EFapsException on error
*/
public File getPDF(final Parameter _parameter)
throws EFapsException
{
File file = null;
try {
final Map<?, ?> properties = (Map<?, ?>) _parameter.get(ParameterValues.PROPERTIES);
final String template = String.valueOf(properties.get("Template"));
final JasperReportBuilder subreport = DynamicReports.report();
configure4Pdf(_parameter, subreport);
addColumnDefintion(_parameter, subreport);
final SubreportBuilder sub = DynamicReports.cmp.subreport(subreport);
sub.setDataSource(createDataSource(_parameter));
getReport().detail(sub);
file = new FileUtil().getFile(getFileName() == null ? "PDF" : getFileName(), "pdf");
final JasperPdfExporterBuilder exporter = Exporters.pdfExporter(file);
final InputStream in = getTemplate(_parameter, template);
getReport().setTemplateDesign(in);
final EFapsDataSource ds = new EFapsDataSource();
ds.init(getReport().toJasperReport(), _parameter, null, null);
getReport().setLocale(Context.getThreadContext().getLocale()).setDataSource(ds).toPdf(exporter);
} catch (final DRException e) {
AbstractDynamicReport_Base.LOG.error("catched DRException", e);
}
return file;
}
/**
* @param _parameter Parameter as passed by the eFaps API
* @param _strip strip the body tags, so that onlty the table remains
* @return html document as String
* @throws EFapsException on error
*/
public File getExcel(final Parameter _parameter)
throws EFapsException
{
File file = null;
try {
file = new FileUtil().getFile(getFileName() == null ? "XLS" : getFileName(), "xls");
addColumnDefintion(_parameter, getReport());
final JasperXlsExporterBuilder exporter = Exporters.xlsExporter(file);
exporter.setIgnorePageMargins(true)
.setDetectCellType(true)
.setIgnoreCellBackground(true)
.setWhitePageBackground(false)
.setRemoveEmptySpaceBetweenColumns(true);
configure4Excel(_parameter);
getReport().setLocale(Context.getThreadContext().getLocale())
.setDataSource(createDataSource(_parameter)).toXls(exporter);
} catch (final DRException e) {
AbstractDynamicReport_Base.LOG.error("catched DRException", e);
}
return file;
}
/**
* @return filename
*/
public String getFileName()
{
return this.fileName;
}
/**
* Setter method for instance variable {@link #fileName}.
*
* @param fileName value for instance variable {@link #fileName}
*/
public void setFileName(final String _fileName)
{
this.fileName = _fileName;
}
/**
* @param _parameter Parameter as passed by the eFaps API
* @param _strip strip the body tags, so that onlty the table remains
* @return html document as String
* @throws EFapsException on error
*/
public String getHtml(final Parameter _parameter,
final boolean _strip)
throws EFapsException
{
final Writer writer = new StringWriter();
try {
addColumnDefintion(_parameter, getReport());
final JasperXhtmlExporterBuilder exporter = Exporters.xhtmlExporter(writer);
if (_strip) {
exporter.setHtmlHeader("").setHtmlFooter("");
}
exporter.setIgnorePageMargins(true).setSizeUnit(SizeUnit.PIXEL);
configure4Html(_parameter);
getReport().setLocale(Context.getThreadContext().getLocale())
.setDataSource(createDataSource(_parameter)).toXhtml(exporter);
} catch (final DRException e) {
AbstractDynamicReport_Base.LOG.error("catched DRException", e);
}
return writer.toString();
}
/**
* @param _parameter Parameter as passed by the eFaps API
* @param _strip strip the body tags, so that onlty the table remains
* @return html document as String
* @throws EFapsException on error
*/
public String getHtmlSnipplet(final Parameter _parameter)
throws EFapsException
{
return getHtml(_parameter, true);
}
public InputStream getTemplate(final Parameter _parameter,
final String _jasperFileName)
{
AbstractDynamicReport_Base.LOG.debug("getting Template '{}'", _jasperFileName);
InputStream ret = null;
try {
final QueryBuilder queryBldr = new QueryBuilder(CIAdminProgram.JasperReport);
;
queryBldr.addWhereAttrEqValue(CIAdminProgram.JasperReport.Name, _jasperFileName);
final InstanceQuery query = queryBldr.getQuery();
query.execute();
if (query.next()) {
final Checkout checkout = new Checkout(query.getCurrentValue());
ret = checkout.execute();
}
} catch (final EFapsException e) {
AbstractDynamicReport_Base.LOG.error("error in getTemplate", e);
}
return ret;
}
}
| src/main/efaps/ESJP/org/efaps/esjp/common/jasperreport/AbstractDynamicReport_Base.java | /*
* Copyright 2003 - 2013 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Revision: $Rev$
* Last Changed: $Date$
* Last Changed By: $Author$
*/
package org.efaps.esjp.common.jasperreport;
import java.awt.Color;
import java.io.File;
import java.io.InputStream;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Map;
import net.sf.dynamicreports.jasper.builder.JasperReportBuilder;
import net.sf.dynamicreports.jasper.builder.export.Exporters;
import net.sf.dynamicreports.jasper.builder.export.JasperPdfExporterBuilder;
import net.sf.dynamicreports.jasper.builder.export.JasperXhtmlExporterBuilder;
import net.sf.dynamicreports.jasper.builder.export.JasperXlsExporterBuilder;
import net.sf.dynamicreports.jasper.constant.SizeUnit;
import net.sf.dynamicreports.report.builder.DynamicReports;
import net.sf.dynamicreports.report.builder.component.SubreportBuilder;
import net.sf.dynamicreports.report.builder.style.StyleBuilder;
import net.sf.dynamicreports.report.constant.HorizontalAlignment;
import net.sf.dynamicreports.report.constant.VerticalAlignment;
import net.sf.dynamicreports.report.exception.DRException;
import net.sf.jasperreports.engine.JRDataSource;
import org.efaps.admin.event.Parameter;
import org.efaps.admin.event.Parameter.ParameterValues;
import org.efaps.admin.program.esjp.EFapsRevision;
import org.efaps.admin.program.esjp.EFapsUUID;
import org.efaps.ci.CIAdminProgram;
import org.efaps.db.Checkout;
import org.efaps.db.Context;
import org.efaps.db.InstanceQuery;
import org.efaps.db.QueryBuilder;
import org.efaps.esjp.common.file.FileUtil;
import org.efaps.util.EFapsException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TODO comment!
*
* @author The eFaps Team
* @version $Id$
*/
@EFapsUUID("599905c7-373a-4c9c-928f-2cf2714d30b3")
@EFapsRevision("$Rev$")
public abstract class AbstractDynamicReport_Base
{
/**
* Logging instance used to give logging information of this class.
*/
protected static final Logger LOG = LoggerFactory.getLogger(AbstractDynamicReport_Base.class);
/**
* Reportbuilder this class is based on.
*/
private final JasperReportBuilder report = DynamicReports.report();
private String fileName;
/**
* Get the style for the columns in case of a html document.
*
* @param _parameter Parameter as passed by the eFaps API
* @throws EFapsException on error
*/
protected void configure4Html(final Parameter _parameter)
throws EFapsException
{
getReport().setColumnTitleStyle(getColumnTitleStyle4Html(_parameter))
.setColumnStyle(getColumnStyle4Html(_parameter))
.setIgnorePageWidth(true).setIgnorePagination(true).setHighlightDetailEvenRows(true);
}
/**
* Get the style for the columns in case of a excel document.
*
* @param _parameter Parameter as passed by the eFaps API
* @throws EFapsException on error
*/
protected void configure4Excel(final Parameter _parameter)
throws EFapsException
{
getReport().setColumnTitleStyle(getColumnTitleStyle4Excel(_parameter))
.setColumnStyle(getColumnStyle4Excel(_parameter)).ignorePageWidth().ignorePagination();
}
/**
* Get the style for the columns in case of a pdf document.
*
* @param _parameter Parameter as passed by the eFaps API
* @throws EFapsException on error
*/
protected void configure4Pdf(final Parameter _parameter,
final JasperReportBuilder _builder)
throws EFapsException
{
_builder.setColumnTitleStyle(getColumnTitleStyle4Pdf(_parameter))
.setColumnStyle(getColumnStyle4Pdf(_parameter))
.setIgnorePageWidth(false).setIgnorePagination(false).setHighlightDetailEvenRows(true);
}
/**
* Get the style for the columns in case of a html document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
protected StyleBuilder getColumnStyle4Html(final Parameter _parameter)
throws EFapsException
{
return DynamicReports.stl.style().setPadding(DynamicReports.stl.padding(2));
}
/**
* Get the style for the columns in case of a excel document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
protected StyleBuilder getColumnStyle4Excel(final Parameter _parameter)
throws EFapsException
{
return DynamicReports.stl.style();
}
/**
* Get the style for the columns in case of a pdf document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
protected StyleBuilder getColumnStyle4Pdf(final Parameter _parameter)
throws EFapsException
{
return DynamicReports.stl.style().setPadding(DynamicReports.stl.padding(2))
.setLeftBorder(DynamicReports.stl.pen1Point())
.setRightBorder(DynamicReports.stl.pen1Point());
}
/**
* Get the style for the title columns in case of a html document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
protected StyleBuilder getColumnTitleStyle4Html(final Parameter _parameter)
throws EFapsException
{
return DynamicReports.stl.style()
.setBorder(DynamicReports.stl.pen1Point())
.setHorizontalAlignment(HorizontalAlignment.CENTER)
.setBackgroundColor(Color.LIGHT_GRAY)
.bold().setVerticalAlignment(VerticalAlignment.MIDDLE)
.setPadding(DynamicReports.stl.padding(2));
}
/**
* Get the style for the title columns in case of a excel document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
protected StyleBuilder getColumnTitleStyle4Excel(final Parameter _parameter)
throws EFapsException
{
return DynamicReports.stl.style().setHorizontalAlignment(HorizontalAlignment.CENTER)
.setVerticalAlignment(VerticalAlignment.MIDDLE).setPadding(2)
.setBackgroundColor(Color.LIGHT_GRAY)
.bold();
}
/**
* Get the style for the title columns in case of a pdf document.
*
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
protected StyleBuilder getColumnTitleStyle4Pdf(final Parameter _parameter)
throws EFapsException
{
return DynamicReports.stl.style()
.setBorder(DynamicReports.stl.pen1Point())
.setHorizontalAlignment(HorizontalAlignment.CENTER)
.setBackgroundColor(Color.LIGHT_GRAY)
.bold().setVerticalAlignment(VerticalAlignment.MIDDLE)
.setPadding(DynamicReports.stl.padding(2));
}
/**
* @param _parameter Parameter as passed by the eFaps API
* @return a JRDataSource
* @throws EFapsException on error
*/
protected abstract JRDataSource createDataSource(final Parameter _parameter)
throws EFapsException;
/**
* @param _parameter Parameter as passed by the eFaps API
* @param _builder Report builder
* @throws EFapsException on error
*/
protected abstract void addColumnDefintion(final Parameter _parameter,
final JasperReportBuilder _builder)
throws EFapsException;
/**
* Getter method for the instance variable {@link #report}.
*
* @return value of instance variable {@link #report}
*/
public JasperReportBuilder getReport()
{
return this.report;
}
/**
* @param _parameter Parameter as passed by the eFaps API
* @return html document as String
* @throws EFapsException on error
*/
public String getHtml(final Parameter _parameter) throws EFapsException
{
return getHtml(_parameter, false);
}
/**
* @param _parameter Parameter as passed by the eFaps API
* @param _strip strip the body tags, so that onlty the table remains
* @return html document as String
* @throws EFapsException on error
*/
public File getPDF(final Parameter _parameter)
throws EFapsException
{
File file = null;
try {
final Map<?, ?> properties = (Map<?, ?>) _parameter.get(ParameterValues.PROPERTIES);
final String template = String.valueOf(properties.get("Template"));
final JasperReportBuilder subreport = DynamicReports.report();
configure4Pdf(_parameter, subreport);
addColumnDefintion(_parameter, subreport);
final SubreportBuilder sub = DynamicReports.cmp.subreport(subreport);
sub.setDataSource(createDataSource(_parameter));
getReport().detail(sub);
file = new FileUtil().getFile(getFileName() == null ? "PDF" : getFileName(), "pdf");
final JasperPdfExporterBuilder exporter = Exporters.pdfExporter(file);
final InputStream in = getTemplate(_parameter, template);
getReport().setTemplateDesign(in);
final EFapsDataSource ds = new EFapsDataSource();
ds.init(getReport().toJasperReport(), _parameter, null, null);
getReport().setLocale(Context.getThreadContext().getLocale()).setDataSource(ds).toPdf(exporter);
} catch (final DRException e) {
AbstractDynamicReport_Base.LOG.error("catched DRException", e);
}
return file;
}
/**
* @param _parameter Parameter as passed by the eFaps API
* @param _strip strip the body tags, so that onlty the table remains
* @return html document as String
* @throws EFapsException on error
*/
public File getExcel(final Parameter _parameter)
throws EFapsException
{
File file = null;
try {
file = new FileUtil().getFile(getFileName() == null ? "XLS" : getFileName(), "xls");
addColumnDefintion(_parameter, getReport());
final JasperXlsExporterBuilder exporter = Exporters.xlsExporter(file);
exporter.setIgnorePageMargins(true)
.setDetectCellType(true)
.setIgnoreCellBackground(true)
.setWhitePageBackground(false)
.setRemoveEmptySpaceBetweenColumns(true);
configure4Excel(_parameter);
getReport().setLocale(Context.getThreadContext().getLocale())
.setDataSource(createDataSource(_parameter)).toXls(exporter);
} catch (final DRException e) {
AbstractDynamicReport_Base.LOG.error("catched DRException", e);
}
return file;
}
/**
* @return filename
*/
public String getFileName()
{
return this.fileName;
}
/**
* Setter method for instance variable {@link #fileName}.
*
* @param fileName value for instance variable {@link #fileName}
*/
public void setFileName(final String _fileName)
{
this.fileName = _fileName;
}
/**
* @param _parameter Parameter as passed by the eFaps API
* @param _strip strip the body tags, so that onlty the table remains
* @return html document as String
* @throws EFapsException on error
*/
public String getHtml(final Parameter _parameter,
final boolean _strip)
throws EFapsException
{
final Writer writer = new StringWriter();
try {
addColumnDefintion(_parameter, getReport());
final JasperXhtmlExporterBuilder exporter = Exporters.xhtmlExporter(writer);
if (_strip) {
exporter.setHtmlHeader("").setHtmlFooter("");
}
exporter.setIgnorePageMargins(true).setSizeUnit(SizeUnit.PIXEL);
configure4Html(_parameter);
getReport().setLocale(Context.getThreadContext().getLocale())
.setDataSource(createDataSource(_parameter)).toXhtml(exporter);
} catch (final DRException e) {
AbstractDynamicReport_Base.LOG.error("catched DRException", e);
}
return writer.toString();
}
/**
* @param _parameter Parameter as passed by the eFaps API
* @param _strip strip the body tags, so that onlty the table remains
* @return html document as String
* @throws EFapsException on error
*/
public String getHtmlSnipplet(final Parameter _parameter)
throws EFapsException
{
return getHtml(_parameter, true);
}
public InputStream getTemplate(final Parameter _parameter,
final String _jasperFileName)
{
AbstractDynamicReport_Base.LOG.debug("getting Template '{}'", _jasperFileName);
InputStream ret = null;
try {
final QueryBuilder queryBldr = new QueryBuilder(CIAdminProgram.JasperReport);
;
queryBldr.addWhereAttrEqValue(CIAdminProgram.JasperReport.Name, _jasperFileName);
final InstanceQuery query = queryBldr.getQuery();
query.execute();
if (query.next()) {
final Checkout checkout = new Checkout(query.getCurrentValue());
ret = checkout.execute();
}
} catch (final EFapsException e) {
AbstractDynamicReport_Base.LOG.error("error in getTemplate", e);
}
return ret;
}
}
| - kernel-install: add method to set style for groups.
git-svn-id: 4afd028e37a0ecb7b60cc6a38eb25d9930f4ee19@9011 fee104cc-1dfa-8c0f-632d-d3b7e6b59fb0
| src/main/efaps/ESJP/org/efaps/esjp/common/jasperreport/AbstractDynamicReport_Base.java | - kernel-install: add method to set style for groups. |
|
Java | apache-2.0 | d912243b397a0f643c19f6573a6606190e846619 | 0 | sourcepit/maven-dependency-model | /**
* Copyright (c) 2013 Sourcepit.org contributors and others. All rights reserved. This program and the accompanying
* materials are made available under the terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.sourcepit.maven.dependency.model.impl;
import org.sourcepit.common.maven.model.ArtifactKey;
import org.sourcepit.common.maven.model.MavenArtifact;
import org.sourcepit.maven.dependency.model.DependencyModel;
import org.sourcepit.maven.dependency.model.DependencyTree;
public final class DependencyModelOperations
{
private DependencyModelOperations()
{
super();
}
public static MavenArtifact getArtifact(DependencyModel model, ArtifactKey artifactKey)
{
for (MavenArtifact artifact : model.getArtifacts())
{
if (artifactKey.equals(artifact.getArtifactKey()))
{
return artifact;
}
}
return null;
}
public static DependencyTree getDependencyTree(DependencyModel model, ArtifactKey artifactKey)
{
for (DependencyTree tree : model.getDependencyTrees())
{
final MavenArtifact artifact = tree.getArtifact();
if (artifact != null && artifactKey.equals(artifact.getArtifactKey()))
{
return tree;
}
}
return null;
}
}
| src/main/java/org/sourcepit/maven/dependency/model/impl/DependencyModelOperations.java | /**
* Copyright (c) 2013 Sourcepit.org contributors and others. All rights reserved. This program and the accompanying
* materials are made available under the terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.sourcepit.maven.dependency.model.impl;
import org.sourcepit.common.maven.model.ArtifactKey;
import org.sourcepit.common.maven.model.MavenArtifact;
import org.sourcepit.maven.dependency.model.DependencyModel;
import org.sourcepit.maven.dependency.model.DependencyTree;
public final class DependencyModelOperations
{
private DependencyModelOperations()
{
super();
}
public static MavenArtifact getArtifact(DependencyModel model, ArtifactKey artifactKey)
{
for (MavenArtifact artifact : model.getArtifacts())
{
if (artifactKey.equals(artifact.getArtifactKey()))
{
return artifact;
}
}
return null;
}
public static DependencyTree getDependencyTree(DependencyModel model, ArtifactKey artifactKey)
{
for (DependencyTree tree : model.getDependencyTrees())
{
if (artifactKey.equals(tree.getArtifact().getArtifactKey()))
{
return tree;
}
}
return null;
}
}
| [fixed] NPE in DependencyModelOperations#getDependencyTree(..) | src/main/java/org/sourcepit/maven/dependency/model/impl/DependencyModelOperations.java | [fixed] NPE in DependencyModelOperations#getDependencyTree(..) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.